diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 185cf6702e69..646aadc9b5ad 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,7 +3,7 @@ "appPort": 8080, "features": { "ghcr.io/devcontainers/features/go:1": { - "version": "1.23" + "version": "1.24" }, "ghcr.io/devcontainers/features/node:1": { "version": "20" diff --git a/.devcontainer/pre-build.sh b/.devcontainer/pre-build.sh index a74aaae14402..92127cfcb78b 100755 --- a/.devcontainer/pre-build.sh +++ b/.devcontainer/pre-build.sh @@ -1,19 +1,20 @@ -#!/usr/bin/env sh +#!/usr/bin/env bash set -eux -# install kubernetes +# install kubernetes using the minimum tested version +. hack/k8s-versions.sh wget -q -O - https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash -k3d cluster get k3s-default || k3d cluster create --image rancher/k3s:v1.27.3-k3s1 --wait +k3d cluster get k3s-default || k3d cluster create --image "rancher/k3s:${K8S_VERSIONS[min]}-k3s1" --wait k3d kubeconfig merge --kubeconfig-merge-default # install kubectl -curl -LO https://dl.k8s.io/release/v1.27.3/bin/linux/$(go env GOARCH)/kubectl +curl -LO "https://dl.k8s.io/release/${K8S_VERSIONS[min]}/bin/linux/$(go env GOARCH)/kubectl" chmod +x ./kubectl sudo mv ./kubectl /usr/local/bin/kubectl kubectl cluster-info # install kit -make kit +curl -q https://raw.githubusercontent.com/kitproj/kit/main/install.sh | sh # install protocol buffer compiler (protoc) sudo apt update diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml index 15df3e3d8b9a..f72236888829 100644 --- a/.github/workflows/ci-build.yaml +++ b/.github/workflows/ci-build.yaml @@ -129,7 +129,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" cache: true - run: make test STATIC_FILES=false GOTEST='go test -p 20 -covermode=atomic -coverprofile=coverage.out' - name: Upload coverage report @@ -151,7 +151,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" cache: true # windows run does not use makefile target because it does a lot more than just testing and is not cross-platform compatible - run: go test -p 20 -covermode=atomic -coverprofile='coverage.out' $(go list ./... | select-string -Pattern 'github.com/argoproj/argo-workflows/v3/workflow/controller' , 'github.com/argoproj/argo-workflows/v3/server' -NotMatch) @@ -242,15 +242,15 @@ jobs: profile: minimal use-api: true - test: test-executor - install_k3s_version: v1.28.13+k3s1 + k8s_version: min profile: minimal use-api: false - test: test-corefunctional - install_k3s_version: v1.28.13+k3s1 + k8s_version: min profile: minimal use-api: false - test: test-functional - install_k3s_version: v1.28.13+k3s1 + k8s_version: min profile: minimal use-api: false steps: @@ -267,7 +267,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" cache: true - name: Install Java for the SDK if: ${{matrix.test == 'test-java-sdk'}} @@ -283,12 +283,11 @@ jobs: python-version: '3.x' cache: pip - name: Install and start K3S + env: + K8S_VERSION: ${{ matrix.k8s_version || 'max' }} run: | - if ! echo "${{ matrix.install_k3s_version }}" | egrep '^v[0-9]+\.[0-9]+\.[0-9]+\+k3s1$'; then - export INSTALL_K3S_VERSION=v1.31.0+k3s1 - else - export INSTALL_K3S_VERSION=${{ matrix.install_k3s_version }} - fi + . hack/k8s-versions.sh + export INSTALL_K3S_VERSION="${K8S_VERSIONS[$K8S_VERSION]}+k3s1" curl -sfL https://get.k3s.io | INSTALL_K3S_CHANNEL=stable \ INSTALL_K3S_EXEC="--docker --kubelet-arg=config=${GITHUB_WORKSPACE}/test/e2e/manifests/kubelet-configuration.yaml" \ @@ -407,7 +406,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" cache: true - name: Install protoc run: | @@ -444,7 +443,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" cache: true - run: make lint STATIC_FILES=false # if lint makes changes that are not in the PR, fail the build diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index b233813e2452..3c53939f1f94 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -37,7 +37,7 @@ jobs: python-version: 3.9 - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: '1.23' + go-version: "1.24" - uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 with: node-version: "19" diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 2d7ab6c532b1..6a3ce2b4517b 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -245,7 +245,7 @@ jobs: node-version: "20" # change in all GH Workflows - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: - go-version: "1.23" + go-version: "1.24" - name: Restore node packages cache uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0 with: diff --git a/.golangci.yml b/.golangci.yml index b8b0227bd4f6..33599445690e 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,6 +1,5 @@ -# https://golangci-lint.run/usage/quick-start/ +version: "2" run: - timeout: 12m build-tags: - api - cli @@ -17,13 +16,7 @@ linters: - bodyclose - copyloopvar - errcheck - - goimports - # only minor issues - # - errorlint - # seems to have bugs in recent version, also slow - # - gci - gosec - - gosimple - govet - ineffassign - misspell @@ -34,36 +27,70 @@ linters: - sqlclosecheck - staticcheck - testifylint - - typecheck - unparam - unused -linters-settings: - goimports: - local-prefixes: github.com/argoproj/argo-workflows/ - gosec: - includes: - - G304 - - G307 - excludes: - # G106: Use of ssh InsecureIgnoreHostKey should be audited - - G106 - # G402: TLS InsecureSkipVerify set true - - G402 - # G601: Implicit memory aliasing in for loop. - - G601 -issues: - exclude-rules: - - path: server/artifacts/artifact_server_test.go - text: "response body must be closed" - exclude-dirs: - - dist - - docs - - examples - - hack - - manifests - - pkg/client - - sdks - - ui - - vendor - exclude-files: - - server/static/files.go + settings: + gosec: + includes: + - G304 + - G307 + excludes: + # G106: Use of ssh InsecureIgnoreHostKey should be audited + - G106 + # G402: TLS InsecureSkipVerify set true + - G402 + staticcheck: + checks: + - all + # Capitalised variable names + - "-ST1003" + # Capitalised error strings + - "-ST1005" + # Receiver names + - "-ST1016" + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + rules: + - path: server/artifacts/artifact_server_test.go + text: response body must be closed + paths: + - dist + - docs + - examples + - hack + - manifests + - pkg/client + - sdks + - ui + - vendor + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + - goimports + settings: + goimports: + local-prefixes: + - github.com/argoproj/argo-workflows/ + exclusions: + generated: lax + paths: + - dist + - docs + - examples + - hack + - manifests + - pkg/client + - sdks + - ui + - vendor + - third_party$ + - builtin$ + - examples$ diff --git a/.spelling b/.spelling index 370eec2cfbee..f835d79b1c8a 100644 --- a/.spelling +++ b/.spelling @@ -183,6 +183,7 @@ liveness localhost maxFailures maxSuccess +md memoization memoized memoizing @@ -200,6 +201,7 @@ parameterizing params pprof pre-commit +qps rc2 repo roadmap diff --git a/Dockerfile b/Dockerfile index 0a801172de9f..0e3218b94402 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ ARG GIT_COMMIT=unknown ARG GIT_TAG=unknown ARG GIT_TREE_STATE=unknown -FROM golang:1.23-alpine3.19 as builder +FROM golang:1.24-alpine3.21 as builder # libc-dev to build openapi-gen RUN apk update && apk add --no-cache \ @@ -109,6 +109,8 @@ USER 8737 WORKDIR /home/argo +# Temporary workaround for https://github.com/grpc/grpc-go/issues/434 +ENV GRPC_ENFORCE_ALPN_ENABLED=false COPY hack/ssh_known_hosts /etc/ssh/ COPY hack/nsswitch.conf /etc/ COPY --from=argocli-build /go/src/github.com/argoproj/argo-workflows/dist/argo /bin/ diff --git a/Dockerfile.windows b/Dockerfile.windows index 2b2bac69e218..293aaa39f08f 100644 --- a/Dockerfile.windows +++ b/Dockerfile.windows @@ -11,7 +11,7 @@ ARG GIT_TREE_STATE=unknown # had issues with official golange image for windows so I'm using plain servercore FROM mcr.microsoft.com/windows/servercore:${IMAGE_OS_VERSION} as builder -ENV GOLANG_VERSION=1.23 +ENV GOLANG_VERSION=1.24 SHELL ["powershell", "-Command"] # install chocolatey package manager diff --git a/Makefile b/Makefile index b0a6923806c2..919511b583fa 100644 --- a/Makefile +++ b/Makefile @@ -298,12 +298,12 @@ swagger: \ $(GOPATH)/bin/mockery: Makefile # update this in Nix when upgrading it here ifneq ($(USE_NIX), true) - go install github.com/vektra/mockery/v2@v2.42.2 + go install github.com/vektra/mockery/v2@v2.53.3 endif $(GOPATH)/bin/controller-gen: Makefile # update this in Nix when upgrading it here ifneq ($(USE_NIX), true) - go install sigs.k8s.io/controller-tools/cmd/controller-gen@v0.15.0 + go install sigs.k8s.io/controller-tools/cmd/controller-gen@v0.17.2 endif $(GOPATH)/bin/go-to-protobuf: Makefile # update this in Nix when upgrading it here @@ -451,7 +451,7 @@ dist/manifests/%: manifests/% # lint/test/etc $(GOPATH)/bin/golangci-lint: Makefile - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b `go env GOPATH`/bin v1.61.0 + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b `go env GOPATH`/bin v2.1.1 .PHONY: lint lint: server/static/files.go $(GOPATH)/bin/golangci-lint @@ -658,7 +658,7 @@ dist/kubernetes.swagger.json: Makefile @mkdir -p dist # recurl will only fetch if the file doesn't exist, so delete it rm -f $@ - ./hack/recurl.sh $@ https://raw.githubusercontent.com/kubernetes/kubernetes/v1.30.3/api/openapi-spec/swagger.json + ./hack/recurl.sh $@ https://raw.githubusercontent.com/kubernetes/kubernetes/v1.32.2/api/openapi-spec/swagger.json pkg/apiclient/_.secondary.swagger.json: hack/api/swagger/secondaryswaggergen.go pkg/apis/workflow/v1alpha1/openapi_generated.go dist/kubernetes.swagger.json rm -Rf v3 vendor @@ -712,7 +712,7 @@ endif .PHONY: docs-spellcheck docs-spellcheck: /usr/local/bin/mdspell # check docs for spelling mistakes - mdspell --ignore-numbers --ignore-acronyms --en-us --no-suggestions --report $(shell find docs -name '*.md' -not -name upgrading.md -not -name README.md -not -name fields.md -not -name upgrading.md -not -name executor_swagger.md -not -path '*/cli/*') + mdspell --ignore-numbers --ignore-acronyms --en-us --no-suggestions --report $(shell find docs -name '*.md' -not -name upgrading.md -not -name README.md -not -name fields.md -not -name upgrading.md -not -name executor_swagger.md -not -path '*/cli/*' -not -name tested-kubernetes-versions.md) # alphabetize spelling file -- ignore first line (comment), then sort the rest case-sensitive and remove duplicates $(shell cat .spelling | awk 'NR<2{ print $0; next } { print $0 | "LC_COLLATE=C sort" }' | uniq | tee .spelling > /dev/null) @@ -737,7 +737,7 @@ endif .PHONY: docs-lint docs-lint: /usr/local/bin/markdownlint # lint docs - markdownlint docs --fix --ignore docs/fields.md --ignore docs/executor_swagger.md --ignore docs/cli --ignore docs/walk-through/the-structure-of-workflow-specs.md + markdownlint docs --fix --ignore docs/fields.md --ignore docs/executor_swagger.md --ignore docs/cli --ignore docs/walk-through/the-structure-of-workflow-specs.md --ignore docs/tested-kubernetes-versions.md /usr/local/bin/mkdocs: # update this in Nix when upgrading it here @@ -756,6 +756,9 @@ docs: /usr/local/bin/mkdocs \ # check environment-variables.md contains all variables mentioned in the code ./hack/docs/check-env-doc.sh # build the docs +ifeq ($(shell echo $(GIT_BRANCH) | head -c 8),release-) + ./hack/docs/tested-versions.sh > docs/tested-kubernetes-versions.md +endif TZ=UTC mkdocs build --strict # tell the user the fastest way to edit docs @echo "ℹ️ If you want to preview your docs, open site/index.html. If you want to edit them with hot-reload, run 'make docs-serve' to start mkdocs on port 8000" diff --git a/api/jsonschema/schema.json b/api/jsonschema/schema.json index be9c5a9e03f2..fd15fed99a0c 100644 --- a/api/jsonschema/schema.json +++ b/api/jsonschema/schema.json @@ -1,11 +1,11 @@ { - "$id": "http://workflows.argoproj.io/workflows.json", - "$schema": "http://json-schema.org/schema#", + "$id": "https://raw.githubusercontent.com/argoproj/argo-workflows/HEAD/api/jsonschema/schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", "definitions": { "eventsource.CreateEventSourceRequest": { "properties": { "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "namespace": { "type": "string" @@ -19,7 +19,7 @@ "eventsource.EventSourceWatchEvent": { "properties": { "object": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "type": { "type": "string" @@ -59,7 +59,7 @@ "eventsource.UpdateEventSourceRequest": { "properties": { "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "name": { "type": "string" @@ -70,62 +70,7 @@ }, "type": "object" }, - "google.protobuf.Any": { - "properties": { - "type_url": { - "type": "string" - }, - "value": { - "format": "byte", - "type": "string" - } - }, - "type": "object" - }, - "grpc.gateway.runtime.Error": { - "properties": { - "code": { - "type": "integer" - }, - "details": { - "items": { - "$ref": "#/definitions/google.protobuf.Any" - }, - "type": "array" - }, - "error": { - "type": "string" - }, - "message": { - "type": "string" - } - }, - "type": "object" - }, - "grpc.gateway.runtime.StreamError": { - "properties": { - "details": { - "items": { - "$ref": "#/definitions/google.protobuf.Any" - }, - "type": "array" - }, - "grpc_code": { - "type": "integer" - }, - "http_code": { - "type": "integer" - }, - "http_status": { - "type": "string" - }, - "message": { - "type": "string" - } - }, - "type": "object" - }, - "io.argoproj.events.v1alpha1.AMQPConsumeConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPConsumeConfig": { "properties": { "autoAck": { "title": "AutoAck when true, the server will acknowledge deliveries to this consumer prior to writing\nthe delivery to the network\n+optional", @@ -151,22 +96,22 @@ "title": "AMQPConsumeConfig holds the configuration to immediately starts delivering queued messages\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.AMQPEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPEventSource": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "Auth hosts secret selectors for username and password\n+optional" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff holds parameters applied to connection.\n+optional" }, "consume": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPConsumeConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPConsumeConfig", "title": "Consume holds the configuration to immediately starts delivering queued messages\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.Consume\n+optional" }, "exchangeDeclare": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPExchangeDeclareConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPExchangeDeclareConfig", "title": "ExchangeDeclare holds the configuration for the exchange on the server\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.ExchangeDeclare\n+optional" }, "exchangeName": { @@ -178,7 +123,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -193,11 +138,11 @@ "type": "object" }, "queueBind": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPQueueBindConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueBindConfig", "title": "QueueBind holds the configuration that binds an exchange to a queue so that publishings to the\nexchange will be routed to the queue when the publishing routing key matches the binding routing key\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.QueueBind\n+optional" }, "queueDeclare": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPQueueDeclareConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueDeclareConfig", "title": "QueueDeclare holds the configuration of a queue to hold messages and deliver to consumers.\nDeclaring creates a queue if it doesn't already exist, or ensures that an existing queue matches\nthe same parameters\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.QueueDeclare\n+optional" }, "routingKey": { @@ -205,7 +150,7 @@ "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the amqp client.\n+optional" }, "url": { @@ -220,7 +165,7 @@ "title": "AMQPEventSource refers to an event-source for AMQP stream events", "type": "object" }, - "io.argoproj.events.v1alpha1.AMQPExchangeDeclareConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPExchangeDeclareConfig": { "properties": { "autoDelete": { "title": "AutoDelete removes the exchange when no bindings are active\n+optional", @@ -242,7 +187,7 @@ "title": "AMQPExchangeDeclareConfig holds the configuration for the exchange on the server\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.AMQPQueueBindConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueBindConfig": { "properties": { "noWait": { "title": "NowWait false and the queue could not be bound, the channel will be closed with an error\n+optional", @@ -252,7 +197,7 @@ "title": "AMQPQueueBindConfig holds the configuration that binds an exchange to a queue so that publishings to the\nexchange will be routed to the queue when the publishing routing key matches the binding routing key\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.AMQPQueueDeclareConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueDeclareConfig": { "properties": { "arguments": { "title": "Arguments of a queue (also known as \"x-arguments\") used for optional features and plugins\n+optional", @@ -282,7 +227,7 @@ "title": "AMQPQueueDeclareConfig holds the configuration of a queue to hold messages and deliver to consumers.\nDeclaring creates a queue if it doesn't already exist, or ensures that an existing queue matches\nthe same parameters\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.AWSLambdaTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AWSLambdaTrigger": { "properties": { "accessKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -298,7 +243,7 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" @@ -306,7 +251,7 @@ "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -326,7 +271,7 @@ "title": "AWSLambdaTrigger refers to specification of the trigger to invoke an AWS Lambda function", "type": "object" }, - "io.argoproj.events.v1alpha1.Amount": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount": { "description": "Amount represent a numeric amount.", "properties": { "value": { @@ -336,7 +281,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.ArgoWorkflowTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArgoWorkflowTrigger": { "properties": { "args": { "items": { @@ -351,31 +296,31 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of parameters to pass to resolved Argo Workflow object", "type": "array" }, "source": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArtifactLocation", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation", "title": "Source of the K8s resource file(s)" } }, "title": "ArgoWorkflowTrigger is the trigger for the Argo Workflow", "type": "object" }, - "io.argoproj.events.v1alpha1.ArtifactLocation": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation": { "properties": { "configmap": { "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapKeySelector", "title": "Configmap that stores the artifact" }, "file": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.FileArtifact", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileArtifact", "title": "File artifact is artifact stored in a file" }, "git": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitArtifact", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitArtifact", "title": "Git repository hosting the artifact" }, "inline": { @@ -383,22 +328,22 @@ "type": "string" }, "resource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Resource", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResource", "title": "Resource is generic template for K8s resource" }, "s3": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Artifact", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact", "title": "S3 compliant artifact" }, "url": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.URLArtifact", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.URLArtifact", "title": "URL to fetch the artifact from" } }, "title": "ArtifactLocation describes the source location for an external artifact", "type": "object" }, - "io.argoproj.events.v1alpha1.AzureEventHubsTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventHubsTrigger": { "properties": { "fqdn": { "title": "FQDN refers to the namespace dns of Azure Event Hubs to be used i.e. \u003cnamespace\u003e.servicebus.windows.net", @@ -410,7 +355,7 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" @@ -418,7 +363,7 @@ "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -434,10 +379,10 @@ "title": "AzureEventHubsTrigger refers to specification of the Azure Event Hubs Trigger", "type": "object" }, - "io.argoproj.events.v1alpha1.AzureEventsHubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventsHubEventSource": { "properties": { "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "fqdn": { @@ -457,17 +402,17 @@ }, "sharedAccessKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "SharedAccessKey is the generated value of the key" + "title": "SharedAccessKey is the generated value of the key. If both this field and SharedAccessKeyName are not provided\nit will try to access via Azure AD with DefaultAzureCredential, FQDN and HubName.\n+optional" }, "sharedAccessKeyName": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "SharedAccessKeyName is the name you chose for your application's SAS keys" + "title": "SharedAccessKeyName is the name you chose for your application's SAS keys. If both this field and SharedAccessKey are not provided\nit will try to access via Azure AD with DefaultAzureCredential, FQDN and HubName.\n+optional" } }, "title": "AzureEventsHubEventSource describes the event source for azure events hub\nMore info at https://docs.microsoft.com/en-us/azure/event-hubs/", "type": "object" }, - "io.argoproj.events.v1alpha1.AzureQueueStorageEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureQueueStorageEventSource": { "properties": { "connectionString": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -482,7 +427,7 @@ "type": "boolean" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -512,14 +457,14 @@ "title": "AzureQueueStorageEventSource describes the event source for azure queue storage\nmore info at https://learn.microsoft.com/en-us/azure/storage/queues/", "type": "object" }, - "io.argoproj.events.v1alpha1.AzureServiceBusEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusEventSource": { "properties": { "connectionString": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", "title": "ConnectionString is the connection string for the Azure Service Bus. If this fields is not provided\nit will try to access via Azure AD with DefaultAzureCredential and FullyQualifiedNamespace.\n+optional" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "fullyQualifiedNamespace": { @@ -546,7 +491,7 @@ "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the service bus client\n+optional" }, "topicName": { @@ -557,7 +502,7 @@ "title": "AzureServiceBusEventSource describes the event source for azure service bus\nMore info at https://docs.microsoft.com/en-us/azure/service-bus-messaging/", "type": "object" }, - "io.argoproj.events.v1alpha1.AzureServiceBusTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusTrigger": { "properties": { "connectionString": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -565,7 +510,7 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" @@ -573,7 +518,7 @@ "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -586,7 +531,7 @@ "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the service bus client\n+optional" }, "topicName": { @@ -596,18 +541,18 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.Backoff": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff": { "properties": { "duration": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Int64OrString", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Int64OrString", "title": "The initial duration in nanoseconds or strings like \"1s\", \"3m\"\n+optional" }, "factor": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Amount", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount", "title": "Duration is multiplied by factor each iteration\n+optional" }, "jitter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Amount", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount", "title": "The amount of jitter applied each iteration\n+optional" }, "steps": { @@ -618,7 +563,7 @@ "title": "Backoff for an operation", "type": "object" }, - "io.argoproj.events.v1alpha1.BasicAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth": { "properties": { "password": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -632,10 +577,10 @@ "title": "BasicAuth contains the reference to K8s secrets that holds the username and password", "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketAuth": { "properties": { "basic": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketBasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketBasicAuth", "title": "Basic is BasicAuth auth strategy.\n+optional" }, "oauthToken": { @@ -646,7 +591,7 @@ "title": "BitbucketAuth holds the different auth strategies for connecting to Bitbucket", "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketBasicAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketBasicAuth": { "properties": { "password": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -657,13 +602,13 @@ "description": "Username refers to the K8s secret that holds the username." } }, - "title": "BasicAuth holds the information required to authenticate user via basic auth mechanism", + "title": "BitbucketBasicAuth holds the information required to authenticate user via basic auth mechanism", "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketEventSource": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketAuth", "description": "Auth information required to connect to Bitbucket." }, "deleteHookOnFinish": { @@ -678,7 +623,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -698,7 +643,7 @@ }, "repositories": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketRepository" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketRepository" }, "title": "Repositories holds a list of repositories for which integration needs to set up\n+optional", "type": "array" @@ -708,14 +653,14 @@ "type": "string" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook refers to the configuration required to run an http server" } }, "title": "BitbucketEventSource describes the event source for Bitbucket", "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketRepository": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketRepository": { "properties": { "owner": { "title": "Owner is the owner of the repository", @@ -728,14 +673,18 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketServerEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerEventSource": { "properties": { "accessToken": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "AccessToken is reference to K8s secret which holds the bitbucket api access information" + "title": "AccessToken is reference to K8s secret which holds the bitbucket api access information.\n+optional" }, "bitbucketserverBaseURL": { - "title": "BitbucketServerBaseURL is the base URL for API requests to a custom endpoint", + "description": "BitbucketServerBaseURL is the base URL for API requests to a custom endpoint.", + "type": "string" + }, + "checkInterval": { + "title": "CheckInterval is a duration in which to wait before checking that the webhooks exist, e.g. 1s, 30m, 2h... (defaults to 1m)\n+optional", "type": "string" }, "deleteHookOnFinish": { @@ -746,11 +695,11 @@ "items": { "type": "string" }, - "title": "Events are bitbucket event to listen to.\nRefer https://confluence.atlassian.com/bitbucketserver/event-payload-938025882.html", + "title": "Events are bitbucket event to listen to.\nRefer https://confluence.atlassian.com/bitbucketserver/event-payload-938025882.html\n+optional", "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -760,51 +709,66 @@ "title": "Metadata holds the user defined metadata which will passed along the event payload.\n+optional", "type": "object" }, + "oneEventPerChange": { + "title": "OneEventPerChange controls whether to process each change in a repo:refs_changed webhook event as a separate io.argoproj.workflow.v1alpha1. This setting is useful when multiple tags are\npushed simultaneously for the same commit, and each tag needs to independently trigger an action, such as a distinct workflow in Argo Workflows. When enabled, the\nBitbucketServerEventSource publishes an individual BitbucketServerEventData for each change, ensuring independent processing of each tag or reference update in a\nsingle webhook event.\n+optional", + "type": "boolean" + }, "projectKey": { - "title": "DeprecatedProjectKey is the key of project for which integration needs to set up\nDeprecated: use Repositories instead. Will be unsupported in v1.8\n+optional", + "title": "DeprecatedProjectKey is the key of project for which integration needs to set up.\nDeprecated: use Repositories instead. Will be unsupported in v1.8.\n+optional", "type": "string" }, + "projects": { + "items": { + "type": "string" + }, + "title": "Projects holds a list of projects for which integration needs to set up, this will add the webhook to all repositories in the project.\n+optional", + "type": "array" + }, "repositories": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketServerRepository" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerRepository" }, - "title": "Repositories holds a list of repositories for which integration needs to set up\n+optional", + "title": "Repositories holds a list of repositories for which integration needs to set up.\n+optional", "type": "array" }, "repositorySlug": { - "title": "DeprecatedRepositorySlug is the slug of the repository for which integration needs to set up\nDeprecated: use Repositories instead. Will be unsupported in v1.8\n+optional", + "title": "DeprecatedRepositorySlug is the slug of the repository for which integration needs to set up.\nDeprecated: use Repositories instead. Will be unsupported in v1.8.\n+optional", "type": "string" }, + "skipBranchRefsChangedOnOpenPR": { + "title": "SkipBranchRefsChangedOnOpenPR bypasses the event repo:refs_changed for branches whenever there's an associated open pull request.\nThis helps in optimizing the event handling process by avoiding unnecessary triggers for branch reference changes that are already part of a pull request under review.\n+optional", + "type": "boolean" + }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the bitbucketserver client.\n+optional" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", - "title": "Webhook holds configuration to run a http server" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", + "description": "Webhook holds configuration to run a http server." }, "webhookSecret": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "WebhookSecret is reference to K8s secret which holds the bitbucket webhook secret (for HMAC validation)" + "title": "WebhookSecret is reference to K8s secret which holds the bitbucket webhook secret (for HMAC validation).\n+optional" } }, "title": "BitbucketServerEventSource refers to event-source related to Bitbucket Server events", "type": "object" }, - "io.argoproj.events.v1alpha1.BitbucketServerRepository": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerRepository": { "properties": { "projectKey": { - "title": "ProjectKey is the key of project for which integration needs to set up", + "description": "ProjectKey is the key of project for which integration needs to set up.", "type": "string" }, "repositorySlug": { - "title": "RepositorySlug is the slug of the repository for which integration needs to set up", + "description": "RepositorySlug is the slug of the repository for which integration needs to set up.", "type": "string" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.CalendarEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CalendarEventSource": { "properties": { "exclusionDates": { "description": "ExclusionDates defines the list of DATE-TIME exceptions for recurring events.", @@ -814,7 +778,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "interval": { @@ -829,7 +793,7 @@ "type": "object" }, "persistence": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventPersistence", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventPersistence", "title": "Persistence hold the configuration for event persistence" }, "schedule": { @@ -844,7 +808,7 @@ "title": "CalendarEventSource describes a time based dependency. One of the fields (schedule, interval, or recurrence) must be passed.\nSchedule takes precedence over interval; interval takes precedence over recurrence", "type": "object" }, - "io.argoproj.events.v1alpha1.CatchupConfiguration": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CatchupConfiguration": { "properties": { "enabled": { "title": "Enabled enables to triggered the missed schedule when eventsource restarts", @@ -857,7 +821,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.Condition": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Condition": { "properties": { "lastTransitionTime": { "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.Time", @@ -883,7 +847,7 @@ "title": "Condition contains details about resource state", "type": "object" }, - "io.argoproj.events.v1alpha1.ConditionsResetByTime": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetByTime": { "properties": { "cron": { "title": "Cron is a cron-like expression. For reference, see: https://en.wikipedia.org/wiki/Cron", @@ -896,16 +860,16 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.ConditionsResetCriteria": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetCriteria": { "properties": { "byTime": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConditionsResetByTime", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetByTime", "title": "Schedule is a cron-like expression. For reference, see: https://en.wikipedia.org/wiki/Cron" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.ConfigMapPersistence": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConfigMapPersistence": { "properties": { "createIfNotExist": { "title": "CreateIfNotExist will create configmap if it doesn't exists", @@ -918,7 +882,46 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.CustomTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Container": { + "properties": { + "env": { + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" + }, + "title": "+optional", + "type": "array" + }, + "envFrom": { + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.EnvFromSource" + }, + "title": "+optional", + "type": "array" + }, + "imagePullPolicy": { + "title": "+optional", + "type": "string" + }, + "resources": { + "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements", + "title": "+optional" + }, + "securityContext": { + "$ref": "#/definitions/io.k8s.api.core.v1.SecurityContext", + "title": "+optional" + }, + "volumeMounts": { + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.VolumeMount" + }, + "title": "+optional", + "type": "array" + } + }, + "title": "Container defines customized spec for a container", + "type": "object" + }, + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CustomTrigger": { "description": "CustomTrigger refers to the specification of the custom trigger.", "properties": { "certSecret": { @@ -928,14 +931,14 @@ "parameters": { "description": "Parameters is the list of parameters that is applied to resolved custom trigger trigger object.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -961,7 +964,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.DataFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.DataFilter": { "properties": { "comparator": { "description": "Comparator compares the event data with a user given value.\nCan be \"\u003e=\", \"\u003e\", \"=\", \"!=\", \"\u003c\", or \"\u003c=\".\nIs optional, and if left blank treated as equality \"=\".", @@ -990,7 +993,7 @@ "title": "DataFilter describes constraints and filters for event data\nRegular Expressions are purposefully not a feature as they are overkill for our uses here\nSee Rob Pike's Post: https://commandcenter.blogspot.com/2011/08/regular-expressions-in-lexing-and.html", "type": "object" }, - "io.argoproj.events.v1alpha1.EmailTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmailTrigger": { "description": "EmailTrigger refers to the specification of the email notification trigger.", "properties": { "body": { @@ -1007,7 +1010,7 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" @@ -1038,7 +1041,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.EmitterEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmitterEventSource": { "properties": { "broker": { "description": "Broker URI to connect to.", @@ -1053,11 +1056,11 @@ "type": "string" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff holds parameters applied to connection.\n+optional" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -1076,7 +1079,7 @@ "title": "Password to use to connect to broker\n+optional" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the emitter client.\n+optional" }, "username": { @@ -1087,7 +1090,7 @@ "title": "EmitterEventSource describes the event source for emitter\nMore info at https://emitter.io/develop/getting-started/", "type": "object" }, - "io.argoproj.events.v1alpha1.EventContext": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventContext": { "properties": { "datacontenttype": { "description": "DataContentType - A MIME (RFC2046) string describing the media type of `data`.", @@ -1121,7 +1124,7 @@ "title": "EventContext holds the context of the cloudevent received from an event source.\n+protobuf.options.(gogoproto.goproto_stringer)=false", "type": "object" }, - "io.argoproj.events.v1alpha1.EventDependency": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependency": { "properties": { "eventName": { "title": "EventName is the name of the event", @@ -1132,7 +1135,7 @@ "type": "string" }, "filters": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependencyFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyFilter", "title": "Filters and rules governing toleration of success and constraints on the context and data of an event" }, "filtersLogicalOperator": { @@ -1144,23 +1147,23 @@ "type": "string" }, "transform": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependencyTransformer", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyTransformer", "title": "Transform transforms the event data" } }, "title": "EventDependency describes a dependency", "type": "object" }, - "io.argoproj.events.v1alpha1.EventDependencyFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyFilter": { "description": "EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1.", "properties": { "context": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventContext", "title": "Context filter constraints" }, "data": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.DataFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.DataFilter" }, "title": "Data filter constraints with escalation", "type": "array" @@ -1176,7 +1179,7 @@ "exprs": { "description": "Exprs contains the list of expressions evaluated against the event payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ExprFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ExprFilter" }, "type": "array" }, @@ -1185,13 +1188,13 @@ "type": "string" }, "time": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TimeFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TimeFilter", "title": "Time filter on the event with escalation" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.EventDependencyTransformer": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyTransformer": { "properties": { "jq": { "title": "JQ holds the jq command applied for transformation\n+optional", @@ -1205,36 +1208,36 @@ "title": "EventDependencyTransformer transforms the event", "type": "object" }, - "io.argoproj.events.v1alpha1.EventPersistence": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventPersistence": { "properties": { "catchup": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CatchupConfiguration", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CatchupConfiguration", "title": "Catchup enables to triggered the missed schedule when eventsource restarts" }, "configMap": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConfigMapPersistence", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConfigMapPersistence", "title": "ConfigMap holds configmap details for persistence" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.EventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource": { "properties": { "metadata": { "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" }, "spec": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceSpec" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceSpec" }, "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceStatus", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceStatus", "title": "+optional" } }, "title": "EventSource is the definition of a eventsource resource\n+genclient\n+kubebuilder:resource:shortName=es\n+kubebuilder:subresource:status\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.EventSourceFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter": { "properties": { "expression": { "type": "string" @@ -1242,11 +1245,11 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.EventSourceList": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceList": { "properties": { "items": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "type": "array" }, @@ -1257,60 +1260,60 @@ "title": "EventSourceList is the list of eventsource resources\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object", "type": "object" }, - "io.argoproj.events.v1alpha1.EventSourceSpec": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceSpec": { "properties": { "amqp": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPEventSource" }, "title": "AMQP event sources", "type": "object" }, "azureEventsHub": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureEventsHubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventsHubEventSource" }, "title": "AzureEventsHub event sources", "type": "object" }, "azureQueueStorage": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureQueueStorageEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureQueueStorageEventSource" }, "title": "AzureQueueStorage event source", "type": "object" }, "azureServiceBus": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureServiceBusEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusEventSource" }, "title": "Azure Service Bus event source", "type": "object" }, "bitbucket": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketEventSource" }, "title": "Bitbucket event sources", "type": "object" }, "bitbucketserver": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketServerEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerEventSource" }, "title": "Bitbucket Server event sources", "type": "object" }, "calendar": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CalendarEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CalendarEventSource" }, "title": "Calendar event sources", "type": "object" }, "emitter": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EmitterEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmitterEventSource" }, "title": "Emitter event source", "type": "object" @@ -1321,105 +1324,105 @@ }, "file": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.FileEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileEventSource" }, "title": "File event sources", "type": "object" }, "generic": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GenericEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GenericEventSource" }, "title": "Generic event source", "type": "object" }, "gerrit": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GerritEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GerritEventSource" }, "title": "Gerrit event source", "type": "object" }, "github": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GithubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubEventSource" }, "title": "Github event sources", "type": "object" }, "gitlab": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitlabEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitlabEventSource" }, "title": "Gitlab event sources", "type": "object" }, "hdfs": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.HDFSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HDFSEventSource" }, "title": "HDFS event sources", "type": "object" }, "kafka": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaEventSource" }, "title": "Kafka event sources", "type": "object" }, "minio": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Artifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact" }, "title": "Minio event sources", "type": "object" }, "mqtt": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.MQTTEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.MQTTEventSource" }, "title": "MQTT event sources", "type": "object" }, "nats": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSEventsSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSEventsSource" }, "title": "NATS event sources", "type": "object" }, "nsq": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NSQEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NSQEventSource" }, "title": "NSQ event source", "type": "object" }, "pubSub": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PubSubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PubSubEventSource" }, "title": "PubSub event sources", "type": "object" }, "pulsar": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PulsarEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarEventSource" }, "title": "Pulsar event source", "type": "object" }, "redis": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RedisEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisEventSource" }, "title": "Redis event source", "type": "object" }, "redisStream": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RedisStreamEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisStreamEventSource" }, "title": "Redis stream source", "type": "object" @@ -1430,64 +1433,64 @@ }, "resource": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ResourceEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceEventSource" }, "title": "Resource event sources", "type": "object" }, "service": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Service", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Service", "title": "Service is the specifications of the service to expose the event source\n+optional" }, "sftp": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SFTPEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SFTPEventSource" }, "title": "SFTP event sources", "type": "object" }, "slack": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackEventSource" }, "title": "Slack event sources", "type": "object" }, "sns": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SNSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SNSEventSource" }, "title": "SNS event sources", "type": "object" }, "sqs": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SQSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SQSEventSource" }, "title": "SQS event sources", "type": "object" }, "storageGrid": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StorageGridEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridEventSource" }, "title": "StorageGrid event sources", "type": "object" }, "stripe": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StripeEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StripeEventSource" }, "title": "Stripe event sources", "type": "object" }, "template": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Template", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template", "title": "Template is the pod specification for the event source\n+optional" }, "webhook": { "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookEventSource" }, "title": "Webhook event sources", "type": "object" @@ -1496,16 +1499,16 @@ "title": "EventSourceSpec refers to specification of event-source resource", "type": "object" }, - "io.argoproj.events.v1alpha1.EventSourceStatus": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceStatus": { "properties": { "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Status" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status" } }, "title": "EventSourceStatus holds the status of the event-source resource", "type": "object" }, - "io.argoproj.events.v1alpha1.ExprFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ExprFilter": { "properties": { "expr": { "description": "Expr refers to the expression that determines the outcome of the filter.", @@ -1514,14 +1517,14 @@ "fields": { "description": "Fields refers to set of keys that refer to the paths within event payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PayloadField" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PayloadField" }, "type": "array" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.FileArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileArtifact": { "properties": { "path": { "type": "string" @@ -1530,7 +1533,7 @@ "title": "FileArtifact contains information about an artifact in a filesystem", "type": "object" }, - "io.argoproj.events.v1alpha1.FileEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileEventSource": { "description": "FileEventSource describes an event-source for file related events.", "properties": { "eventType": { @@ -1538,7 +1541,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -1553,13 +1556,13 @@ "type": "boolean" }, "watchPathConfig": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig", "title": "WatchPathConfig contains configuration about the file path to watch" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.GenericEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GenericEventSource": { "description": "GenericEventSource refers to a generic event source. It can be used to implement a custom event source.", "properties": { "authSecret": { @@ -1571,7 +1574,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "insecure": { @@ -1596,10 +1599,10 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.GerritEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GerritEventSource": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "Auth hosts secret selectors for username and password\n+optional" }, "deleteHookOnFinish": { @@ -1614,7 +1617,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "gerritBaseURL": { @@ -1644,14 +1647,14 @@ "type": "boolean" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook holds configuration to run a http server" } }, "title": "GerritEventSource refers to event-source related to gerrit events", "type": "object" }, - "io.argoproj.events.v1alpha1.GitArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitArtifact": { "properties": { "branch": { "title": "Branch to use to pull trigger resource\n+optional", @@ -1662,7 +1665,7 @@ "type": "string" }, "creds": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitCreds", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitCreds", "title": "Creds contain reference to git username and password\n+optional" }, "filePath": { @@ -1678,7 +1681,7 @@ "type": "string" }, "remote": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitRemoteConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitRemoteConfig", "title": "Remote to manage set of tracked repositories. Defaults to \"origin\".\nRefer https://git-scm.com/docs/git-remote\n+optional" }, "sshKeySecret": { @@ -1697,7 +1700,7 @@ "title": "GitArtifact contains information about an artifact stored in git", "type": "object" }, - "io.argoproj.events.v1alpha1.GitCreds": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitCreds": { "properties": { "password": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" @@ -1709,7 +1712,7 @@ "title": "GitCreds contain reference to git username and password", "type": "object" }, - "io.argoproj.events.v1alpha1.GitRemoteConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitRemoteConfig": { "properties": { "name": { "description": "Name of the remote to fetch from.", @@ -1726,7 +1729,7 @@ "title": "GitRemoteConfig contains the configuration of a Git remote", "type": "object" }, - "io.argoproj.events.v1alpha1.GithubAppCreds": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubAppCreds": { "properties": { "appID": { "title": "AppID refers to the GitHub App ID for the application you created", @@ -1743,7 +1746,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.GithubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubEventSource": { "properties": { "active": { "title": "Active refers to status of the webhook for event deliveries.\nhttps://developer.github.com/webhooks/creating/#active\n+optional", @@ -1769,11 +1772,11 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "githubApp": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GithubAppCreds", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubAppCreds", "title": "GitHubApp holds the GitHub app credentials\n+optional" }, "githubBaseURL": { @@ -1813,7 +1816,7 @@ "repositories": { "description": "Repositories holds the information of repositories, which uses repo owner as the key,\nand list of repo names as the value. Not required if Organizations is set.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.OwnedRepositories" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OwnedRepositories" }, "type": "array" }, @@ -1822,7 +1825,7 @@ "type": "string" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook refers to the configuration required to run a http server" }, "webhookSecret": { @@ -1833,7 +1836,7 @@ "title": "GithubEventSource refers to event-source for github related events", "type": "object" }, - "io.argoproj.events.v1alpha1.GitlabEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitlabEventSource": { "properties": { "accessToken": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -1855,7 +1858,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "gitlabBaseURL": { @@ -1884,7 +1887,7 @@ "items": { "type": "string" }, - "title": "List of project IDs or project namespace paths like \"whynowy/test\". Projects and groups cannot be empty at the same time.\n+optional", + "title": "List of project IDs or project namespace paths like \"whynowy/test\".\nIf neither a project nor a group is defined, the EventSource will not manage webhooks.\n+optional", "type": "array" }, "secretToken": { @@ -1892,14 +1895,14 @@ "title": "SecretToken references to k8 secret which holds the Secret Token used by webhook config" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook holds configuration to run a http server" } }, "title": "GitlabEventSource refers to event-source related to Gitlab events", "type": "object" }, - "io.argoproj.events.v1alpha1.HDFSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HDFSEventSource": { "properties": { "addresses": { "items": { @@ -1912,7 +1915,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "hdfsUser": { @@ -1955,16 +1958,16 @@ "type": "string" }, "watchPathConfig": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig" } }, "title": "HDFSEventSource refers to event-source for HDFS related events", "type": "object" }, - "io.argoproj.events.v1alpha1.HTTPTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HTTPTrigger": { "properties": { "basicAuth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "BasicAuth configuration for the http request.\n+optional" }, "headers": { @@ -1981,19 +1984,19 @@ "parameters": { "description": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe HTTP trigger resource.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "payload": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "secureHeaders": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SecureHeader" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader" }, "title": "Secure Headers stored in Kubernetes Secrets for the HTTP requests.\n+optional", "type": "array" @@ -2003,7 +2006,7 @@ "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the HTTP client.\n+optional" }, "url": { @@ -2014,7 +2017,7 @@ "title": "HTTPTrigger is the trigger for the HTTP request", "type": "object" }, - "io.argoproj.events.v1alpha1.Int64OrString": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Int64OrString": { "properties": { "int64Val": { "type": "string" @@ -2028,10 +2031,20 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.K8SResourcePolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResource": { + "description": "K8SResource represent arbitrary structured data.", + "properties": { + "value": { + "format": "byte", + "type": "string" + } + }, + "type": "object" + }, + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResourcePolicy": { "properties": { "backoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff before checking resource state" }, "errorOnBackoffTimeout": { @@ -2049,7 +2062,7 @@ "title": "K8SResourcePolicy refers to the policy used to check the state of K8s based triggers using labels", "type": "object" }, - "io.argoproj.events.v1alpha1.KafkaConsumerGroup": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaConsumerGroup": { "properties": { "groupName": { "title": "The name for the consumer group to use", @@ -2066,22 +2079,22 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.KafkaEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaEventSource": { "properties": { "config": { "description": "Yaml format Sarama config for Kafka connection.\nIt follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go\ne.g.\n\nconsumer:\n fetch:\n min: 1\nnet:\n MaxOpenRequests: 5\n\n+optional", "type": "string" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "description": "Backoff holds parameters applied to connection." }, "consumerGroup": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaConsumerGroup", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaConsumerGroup", "title": "Consumer group for kafka client\n+optional" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -2104,11 +2117,11 @@ "type": "string" }, "sasl": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SASLConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig", "title": "SASL configuration for the kafka client\n+optional" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the kafka client.\n+optional" }, "topic": { @@ -2127,7 +2140,7 @@ "title": "KafkaEventSource refers to event-source for Kafka related events", "type": "object" }, - "io.argoproj.events.v1alpha1.KafkaTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaTrigger": { "description": "KafkaTrigger refers to the specification of the Kafka trigger.", "properties": { "compress": { @@ -2138,10 +2151,17 @@ "title": "FlushFrequency refers to the frequency in milliseconds to flush batches.\nDefaults to 500 milliseconds.\n+optional", "type": "integer" }, + "headers": { + "additionalProperties": { + "type": "string" + }, + "title": "Headers for the Kafka Messages.\n+optional", + "type": "object" + }, "parameters": { "description": "Parameters is the list of parameters that is applied to resolved Kafka trigger object.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -2156,7 +2176,7 @@ "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -2165,15 +2185,22 @@ "type": "integer" }, "sasl": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SASLConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig", "title": "SASL configuration for the kafka client\n+optional" }, "schemaRegistry": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SchemaRegistryConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SchemaRegistryConfig", "title": "Schema Registry configuration to producer message with avro format\n+optional" }, + "secureHeaders": { + "items": { + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader" + }, + "title": "Secure Headers stored in Kubernetes Secrets for the Kafka messages.\n+optional", + "type": "array" + }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the Kafka producer.\n+optional" }, "topic": { @@ -2191,7 +2218,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.LogTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.LogTrigger": { "properties": { "intervalSeconds": { "format": "uint64", @@ -2201,10 +2228,10 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.MQTTEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.MQTTEventSource": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "Auth hosts secret selectors for username and password\n+optional" }, "clientId": { @@ -2212,11 +2239,11 @@ "type": "string" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "description": "ConnectionBackoff holds backoff applied to connection." }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -2231,7 +2258,7 @@ "type": "object" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the mqtt client.\n+optional" }, "topic": { @@ -2246,7 +2273,7 @@ "title": "MQTTEventSource refers to event-source for MQTT related events", "type": "object" }, - "io.argoproj.events.v1alpha1.Metadata": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata": { "properties": { "annotations": { "additionalProperties": { @@ -2264,10 +2291,10 @@ "title": "Metadata holds the annotations and labels of an event source pod", "type": "object" }, - "io.argoproj.events.v1alpha1.NATSAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth": { "properties": { "basic": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "Baisc auth with username and password\n+optional" }, "credential": { @@ -2286,18 +2313,18 @@ "title": "NATSAuth refers to the auth info for NATS EventSource", "type": "object" }, - "io.argoproj.events.v1alpha1.NATSEventsSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSEventsSource": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth", "title": "Auth information\n+optional" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "description": "ConnectionBackoff holds backoff applied to connection." }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -2311,12 +2338,16 @@ "title": "Metadata holds the user defined metadata which will passed along the event payload.\n+optional", "type": "object" }, + "queue": { + "title": "Queue is the name of the queue group to subscribe as if specified. Uses QueueSubscribe\nlogic to subscribe as queue group. If the queue is empty, uses default Subscribe logic.\n+optional", + "type": "string" + }, "subject": { "title": "Subject holds the name of the subject onto which messages are published", "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the nats client.\n+optional" }, "url": { @@ -2327,18 +2358,22 @@ "title": "NATSEventsSource refers to event-source for NATS related events", "type": "object" }, - "io.argoproj.events.v1alpha1.NATSTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSTrigger": { "description": "NATSTrigger refers to the specification of the NATS trigger.", "properties": { + "auth": { + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth", + "title": "AuthInformation\n+optional" + }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "payload": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -2347,7 +2382,7 @@ "type": "string" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the NATS producer.\n+optional" }, "url": { @@ -2357,18 +2392,18 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.NSQEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NSQEventSource": { "properties": { "channel": { "title": "Channel used for subscription", "type": "string" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff holds parameters applied to connection.\n+optional" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "hostAddress": { @@ -2387,7 +2422,7 @@ "type": "object" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the nsq client.\n+optional" }, "topic": { @@ -2398,7 +2433,7 @@ "title": "NSQEventSource describes the event source for NSQ PubSub\nMore info at https://godoc.org/github.com/nsqio/go-nsq", "type": "object" }, - "io.argoproj.events.v1alpha1.OpenWhiskTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OpenWhiskTrigger": { "description": "OpenWhiskTrigger refers to the specification of the OpenWhisk trigger.", "properties": { "actionName": { @@ -2419,7 +2454,7 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" @@ -2427,7 +2462,7 @@ "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -2438,7 +2473,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.OwnedRepositories": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OwnedRepositories": { "properties": { "names": { "items": { @@ -2454,7 +2489,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.PayloadField": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PayloadField": { "description": "PayloadField binds a value at path within the event payload against a name.", "properties": { "name": { @@ -2468,7 +2503,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.PubSubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PubSubEventSource": { "description": "PubSubEventSource refers to event-source for GCP PubSub related events.", "properties": { "credentialSecret": { @@ -2480,7 +2515,7 @@ "type": "boolean" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -2513,7 +2548,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.PulsarEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarEventSource": { "properties": { "authAthenzParams": { "additionalProperties": { @@ -2531,11 +2566,11 @@ "title": "Authentication token for the pulsar client.\nEither token or athenz can be set to use auth.\n+optional" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff holds parameters applied to connection.\n+optional" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -2550,7 +2585,7 @@ "type": "object" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the pulsar client.\n+optional" }, "tlsAllowInsecureConnection": { @@ -2584,7 +2619,7 @@ "title": "PulsarEventSource describes the event source for Apache Pulsar", "type": "object" }, - "io.argoproj.events.v1alpha1.PulsarTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarTrigger": { "description": "PulsarTrigger refers to the specification of the Pulsar trigger.", "properties": { "authAthenzParams": { @@ -2603,25 +2638,25 @@ "title": "Authentication token for the pulsar client.\nEither token or athenz can be set to use auth.\n+optional" }, "connectionBackoff": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Backoff holds parameters applied to connection.\n+optional" }, "parameters": { "description": "Parameters is the list of parameters that is applied to resolved Kafka trigger object.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the pulsar client.\n+optional" }, "tlsAllowInsecureConnection": { @@ -2647,7 +2682,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.RateLimit": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RateLimit": { "properties": { "requestsPerUnit": { "type": "integer" @@ -2659,7 +2694,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.RedisEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisEventSource": { "properties": { "channels": { "items": { @@ -2672,7 +2707,7 @@ "type": "integer" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "hostAddress": { @@ -2699,7 +2734,7 @@ "title": "Password required for authentication if any.\n+optional" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the redis client.\n+optional" }, "username": { @@ -2710,7 +2745,7 @@ "title": "RedisEventSource describes an event source for the Redis PubSub.\nMore info at https://godoc.org/github.com/go-redis/redis#example-PubSub", "type": "object" }, - "io.argoproj.events.v1alpha1.RedisStreamEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisStreamEventSource": { "properties": { "consumerGroup": { "title": "ConsumerGroup refers to the Redis stream consumer group that will be\ncreated on all redis streams. Messages are read through this group. Defaults to 'argo-events-cg'\n+optional", @@ -2721,7 +2756,7 @@ "type": "integer" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "hostAddress": { @@ -2751,7 +2786,7 @@ "type": "array" }, "tls": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig", "title": "TLS configuration for the redis client.\n+optional" }, "username": { @@ -2762,17 +2797,7 @@ "title": "RedisStreamEventSource describes an event source for\nRedis streams (https://redis.io/topics/streams-intro)", "type": "object" }, - "io.argoproj.events.v1alpha1.Resource": { - "description": "Resource represent arbitrary structured data.", - "properties": { - "value": { - "format": "byte", - "type": "string" - } - }, - "type": "object" - }, - "io.argoproj.events.v1alpha1.ResourceEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceEventSource": { "description": "ResourceEventSource refers to a event-source for K8s resource related events.", "properties": { "eventTypes": { @@ -2783,7 +2808,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ResourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceFilter", "title": "Filter is applied on the metadata of the resource\nIf you apply filter, then the internal event informer will only monitor objects that pass the filter.\n+optional" }, "groupVersionResource": { @@ -2804,7 +2829,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.ResourceFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceFilter": { "properties": { "afterStart": { "title": "If the resource is created after the start time then the event is treated as valid.\n+optional", @@ -2816,14 +2841,14 @@ }, "fields": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Selector" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector" }, "title": "Fields provide field filters similar to K8s field selector\n(see https://kubernetes.io/docs/concepts/overview/working-with-objects/field-selectors/).\nUnlike K8s field selector, it supports arbitrary fileds like \"spec.serviceAccountName\",\nand the value could be a string or a regex.\nSame as K8s field selector, operator \"=\", \"==\" and \"!=\" are supported.\n+optional", "type": "array" }, "labels": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Selector" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector" }, "title": "Labels provide listing options to K8s API to watch resource/s.\nRefer https://kubernetes.io/docs/concepts/overview/working-with-objects/label-selectors/ for more io.argoproj.workflow.v1alpha1.\nUnlike K8s field selector, multiple values are passed as comma separated values instead of list of values.\nEg: value: value1,value2.\nSame as K8s label selector, operator \"=\", \"==\", \"!=\", \"exists\", \"!\", \"notin\", \"in\", \"gt\" and \"lt\"\nare supported\n+optional", "type": "array" @@ -2836,13 +2861,13 @@ "title": "ResourceFilter contains K8s ObjectMeta information to further filter resource event objects", "type": "object" }, - "io.argoproj.events.v1alpha1.S3Artifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact": { "properties": { "accessKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" }, "bucket": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Bucket" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Bucket" }, "caCertificate": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" @@ -2857,7 +2882,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Filter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Filter" }, "insecure": { "type": "boolean" @@ -2878,7 +2903,7 @@ "title": "S3Artifact contains information about an S3 connection and bucket", "type": "object" }, - "io.argoproj.events.v1alpha1.S3Bucket": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Bucket": { "properties": { "key": { "type": "string" @@ -2890,7 +2915,7 @@ "title": "S3Bucket contains information to describe an S3 Bucket", "type": "object" }, - "io.argoproj.events.v1alpha1.S3Filter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Filter": { "properties": { "prefix": { "type": "string" @@ -2902,7 +2927,7 @@ "title": "S3Filter represents filters to apply to bucket notifications for specifying constraints on objects", "type": "object" }, - "io.argoproj.events.v1alpha1.SASLConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig": { "properties": { "mechanism": { "title": "SASLMechanism is the name of the enabled SASL mechanism.\nPossible values: OAUTHBEARER, PLAIN (defaults to PLAIN).\n+optional", @@ -2920,7 +2945,7 @@ "title": "SASLConfig refers to SASL configuration for a client", "type": "object" }, - "io.argoproj.events.v1alpha1.SFTPEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SFTPEventSource": { "description": "SFTPEventSource describes an event-source for sftp related events.", "properties": { "address": { @@ -2932,7 +2957,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -2952,20 +2977,20 @@ }, "sshKeySecret": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", - "title": "SSHKeySecret refers to the secret that contains SSH key" + "description": "SSHKeySecret refers to the secret that contains SSH key. Key needs to contain private key and public key." }, "username": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", "description": "Username required for authentication if any." }, "watchPathConfig": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig", "title": "WatchPathConfig contains configuration about the file path to watch" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.SNSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SNSEventSource": { "properties": { "accessKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -2976,7 +3001,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -3007,14 +3032,14 @@ "type": "boolean" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook configuration for http server" } }, "title": "SNSEventSource refers to event-source for AWS SNS related events", "type": "object" }, - "io.argoproj.events.v1alpha1.SQSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SQSEventSource": { "properties": { "accessKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -3029,7 +3054,7 @@ "type": "string" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "jsonBody": { @@ -3075,10 +3100,10 @@ "title": "SQSEventSource refers to event-source for AWS SQS related events", "type": "object" }, - "io.argoproj.events.v1alpha1.SchemaRegistryConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SchemaRegistryConfig": { "properties": { "auth": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth", "title": "+optional\nSchemaRegistry - basic authentication" }, "schemaId": { @@ -3093,20 +3118,20 @@ "title": "SchemaRegistryConfig refers to configuration for a client", "type": "object" }, - "io.argoproj.events.v1alpha1.SecureHeader": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader": { "properties": { "name": { "type": "string" }, "valueFrom": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ValueFromSource", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ValueFromSource", "title": "Values can be read from either secrets or configmaps" } }, "title": "SecureHeader refers to HTTP Headers with auth tokens as values", "type": "object" }, - "io.argoproj.events.v1alpha1.Selector": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector": { "description": "Selector represents conditional operation to select K8s objects.", "properties": { "key": { @@ -3124,27 +3149,27 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.Sensor": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor": { "properties": { "metadata": { "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" }, "spec": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SensorSpec" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorSpec" }, "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SensorStatus", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorStatus", "title": "+optional" } }, "title": "Sensor is the definition of a sensor resource\n+genclient\n+genclient:noStatus\n+kubebuilder:resource:shortName=sn\n+kubebuilder:subresource:status\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.SensorList": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorList": { "properties": { "items": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" }, "type": "array" }, @@ -3155,12 +3180,12 @@ "title": "SensorList is the list of Sensor resources\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object", "type": "object" }, - "io.argoproj.events.v1alpha1.SensorSpec": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorSpec": { "properties": { "dependencies": { "description": "Dependencies is a list of the events that this sensor is dependent on.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependency" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependency" }, "type": "array" }, @@ -3188,13 +3213,13 @@ "type": "integer" }, "template": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Template", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template", "title": "Template is the pod specification for the sensor\n+optional" }, "triggers": { "description": "Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Trigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger" }, "type": "array" } @@ -3202,21 +3227,25 @@ "title": "SensorSpec represents desired sensor state", "type": "object" }, - "io.argoproj.events.v1alpha1.SensorStatus": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorStatus": { "description": "SensorStatus contains information about the status of a sensor.", "properties": { "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Status" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.Service": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Service": { "properties": { "clusterIP": { "title": "clusterIP is the IP address of the service and is usually assigned\nrandomly by the master. If an address is specified manually and is not in\nuse by others, it will be allocated to the service; otherwise, creation\nof the service will fail. This field can not be changed through updates.\nValid values are \"None\", empty string (\"\"), or a valid IP address. \"None\"\ncan be specified for headless services when proxying is not required.\nMore info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies\n+optional", "type": "string" }, + "metadata": { + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata", + "title": "Metadata sets the pods's metadata, i.e. annotations and labels\ndefault={annotations: {}, labels: {}}" + }, "ports": { "items": { "$ref": "#/definitions/io.k8s.api.core.v1.ServicePort" @@ -3228,10 +3257,10 @@ "title": "Service holds the service information eventsource exposes", "type": "object" }, - "io.argoproj.events.v1alpha1.SlackEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackEventSource": { "properties": { "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "metadata": { @@ -3250,14 +3279,14 @@ "title": "Token for URL verification handshake" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook holds configuration for a REST endpoint" } }, "title": "SlackEventSource refers to event-source for Slack related events", "type": "object" }, - "io.argoproj.events.v1alpha1.SlackSender": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackSender": { "properties": { "icon": { "title": "Icon is the Slack application's icon, e.g. :robot_face: or https://example.com/image.png\n+optional", @@ -3270,7 +3299,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.SlackThread": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackThread": { "properties": { "broadcastMessageToChannel": { "title": "BroadcastMessageToChannel allows to also broadcast the message from the thread to the channel\n+optional", @@ -3283,7 +3312,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.SlackTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackTrigger": { "description": "SlackTrigger refers to the specification of the slack notification trigger.", "properties": { "attachments": { @@ -3304,13 +3333,13 @@ }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "type": "array" }, "sender": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackSender", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackSender", "title": "Sender refers to additional configuration of the Slack application that sends the message.\n+optional" }, "slackToken": { @@ -3318,13 +3347,13 @@ "description": "SlackToken refers to the Kubernetes secret that holds the slack token required to send messages." }, "thread": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackThread", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackThread", "title": "Thread refers to additional options for sending messages to a Slack thread.\n+optional" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.StandardK8STrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StandardK8STrigger": { "properties": { "liveObject": { "title": "LiveObject specifies whether the resource should be directly fetched from K8s instead\nof being marshaled from the resource artifact. If set to true, the resource artifact\nmust contain the information required to uniquely identify the resource in the cluster,\nthat is, you must specify \"apiVersion\", \"kind\" as well as \"name\" and \"namespace\" meta\ndata.\nOnly valid for operation type `update`\n+optional", @@ -3337,7 +3366,7 @@ "parameters": { "description": "Parameters is the list of parameters that is applied to resolved K8s trigger object.", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "type": "array" }, @@ -3346,19 +3375,19 @@ "type": "string" }, "source": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArtifactLocation", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation", "title": "Source of the K8s resource file(s)" } }, "title": "StandardK8STrigger is the standard Kubernetes resource trigger", "type": "object" }, - "io.argoproj.events.v1alpha1.Status": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status": { "description": "Status is a common structure which can be used for Status field.", "properties": { "conditions": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Condition" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Condition" }, "title": "Conditions are the latest available observations of a resource's current state.\n+optional\n+patchMergeKey=type\n+patchStrategy=merge", "type": "array" @@ -3366,7 +3395,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.StatusPolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StatusPolicy": { "properties": { "allow": { "items": { @@ -3379,7 +3408,7 @@ "title": "StatusPolicy refers to the policy used to check the state of the trigger using response status", "type": "object" }, - "io.argoproj.events.v1alpha1.StorageGridEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridEventSource": { "properties": { "apiURL": { "description": "APIURL is the url of the storagegrid api.", @@ -3400,7 +3429,7 @@ "type": "array" }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StorageGridFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridFilter", "description": "Filter on object key which caused the notification." }, "metadata": { @@ -3419,14 +3448,14 @@ "type": "string" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook holds configuration for a REST endpoint" } }, "title": "StorageGridEventSource refers to event-source for StorageGrid related events", "type": "object" }, - "io.argoproj.events.v1alpha1.StorageGridFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridFilter": { "properties": { "prefix": { "type": "string" @@ -3438,7 +3467,7 @@ "title": "StorageGridFilter represents filters to apply to bucket notifications for specifying constraints on objects\n+k8s:openapi-gen=true", "type": "object" }, - "io.argoproj.events.v1alpha1.StripeEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StripeEventSource": { "properties": { "apiKey": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -3463,14 +3492,14 @@ "type": "object" }, "webhook": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext", "title": "Webhook holds configuration for a REST endpoint" } }, "title": "StripeEventSource describes the event source for stripe webhook notifications\nMore info at https://stripe.com/docs/webhooks", "type": "object" }, - "io.argoproj.events.v1alpha1.TLSConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig": { "description": "TLSConfig refers to TLS configuration for a client.", "properties": { "caCertSecret": { @@ -3492,14 +3521,14 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.Template": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template": { "properties": { "affinity": { "$ref": "#/definitions/io.k8s.api.core.v1.Affinity", "title": "If specified, the pod's scheduling constraints\n+optional" }, "container": { - "$ref": "#/definitions/io.k8s.api.core.v1.Container", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Container", "title": "Container is the main container image to run in the sensor pod\n+optional" }, "imagePullSecrets": { @@ -3510,7 +3539,7 @@ "type": "array" }, "metadata": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Metadata", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata", "title": "Metadata sets the pods's metadata, i.e. annotations and labels" }, "nodeSelector": { @@ -3551,10 +3580,10 @@ "type": "array" } }, - "title": "Template holds the information of a sensor deployment template", + "title": "Template holds the information of a deployment template", "type": "object" }, - "io.argoproj.events.v1alpha1.TimeFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TimeFilter": { "description": "TimeFilter describes a window in time.\nIt filters out events that occur outside the time limits.\nIn other words, only events that occur after Start and before Stop\nwill pass this filter.", "properties": { "start": { @@ -3568,40 +3597,44 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.Trigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger": { "properties": { "atLeastOnce": { "title": "AtLeastOnce determines the trigger execution semantics.\nDefaults to false. Trigger execution will use at-most-once semantics.\nIf set to true, Trigger execution will switch to at-least-once semantics.\n+kubebuilder:default=false\n+optional", "type": "boolean" }, + "dlqTrigger": { + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger", + "title": "If the trigger fails, it will retry up to the configured number of\nretries. If the maximum retries are reached and the trigger is set to\nexecute atLeastOnce, the dead letter queue (DLQ) trigger will be invoked if\nspecified. Invoking the dead letter queue trigger helps prevent data\nloss.\n+optional" + }, "parameters": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" }, "title": "Parameters is the list of parameters applied to the trigger template definition", "type": "array" }, "policy": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerPolicy", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerPolicy", "title": "Policy to configure backoff and execution criteria for the trigger\n+optional" }, "rateLimit": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RateLimit", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RateLimit", "title": "Rate limit, default unit is Second\n+optional" }, "retryStrategy": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff", "title": "Retry strategy, defaults to no retry\n+optional" }, "template": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerTemplate", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerTemplate", "description": "Template describes the trigger specification." } }, "title": "Trigger is an action taken, output produced, an event created, a message sent", "type": "object" }, - "io.argoproj.events.v1alpha1.TriggerParameter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter": { "properties": { "dest": { "description": "Dest is the JSONPath of a resource key.\nA path is a series of keys separated by a dot. The colon character can be escaped with '.'\nThe -1 key can be used to append a value to an existing array.\nSee https://github.com/tidwall/sjson#path-syntax for more information about how this is used.", @@ -3612,14 +3645,14 @@ "type": "string" }, "src": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameterSource", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameterSource", "title": "Src contains a source reference to the value of the parameter from a dependency" } }, "title": "TriggerParameter indicates a passed parameter to a service template", "type": "object" }, - "io.argoproj.events.v1alpha1.TriggerParameterSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameterSource": { "properties": { "contextKey": { "description": "ContextKey is the JSONPath of the event's (JSON decoded) context key\nContextKey is a series of keys separated by a dot. A key may contain wildcard characters '*' and '?'.\nTo access an array value use the index as the key. The dot and wildcard characters can be escaped with '\\\\'.\nSee https://github.com/tidwall/gjson#path-syntax for more information on how to use this.", @@ -3653,37 +3686,37 @@ "title": "TriggerParameterSource defines the source for a parameter from a event event", "type": "object" }, - "io.argoproj.events.v1alpha1.TriggerPolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerPolicy": { "properties": { "k8s": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.K8SResourcePolicy", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResourcePolicy", "title": "K8SResourcePolicy refers to the policy used to check the state of K8s based triggers using using labels" }, "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StatusPolicy", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StatusPolicy", "title": "Status refers to the policy used to check the state of the trigger using response status" } }, "title": "TriggerPolicy dictates the policy for the trigger retries", "type": "object" }, - "io.argoproj.events.v1alpha1.TriggerTemplate": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerTemplate": { "description": "TriggerTemplate is the template that describes trigger specification.", "properties": { "argoWorkflow": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArgoWorkflowTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArgoWorkflowTrigger", "title": "ArgoWorkflow refers to the trigger that can perform various operations on an Argo io.argoproj.workflow.v1alpha1.\n+optional" }, "awsLambda": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AWSLambdaTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AWSLambdaTrigger", "title": "AWSLambda refers to the trigger designed to invoke AWS Lambda function with with on-the-fly constructable payload.\n+optional" }, "azureEventHubs": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureEventHubsTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventHubsTrigger", "title": "AzureEventHubs refers to the trigger send an event to an Azure Event Hub.\n+optional" }, "azureServiceBus": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureServiceBusTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusTrigger", "title": "AzureServiceBus refers to the trigger designed to place messages on Azure Service Bus\n+optional" }, "conditions": { @@ -3692,33 +3725,33 @@ }, "conditionsReset": { "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConditionsResetCriteria" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetCriteria" }, "title": "Criteria to reset the conditons\n+optional", "type": "array" }, "custom": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CustomTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CustomTrigger", "title": "CustomTrigger refers to the trigger designed to connect to a gRPC trigger server and execute a custom trigger.\n+optional" }, "email": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EmailTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmailTrigger", "title": "Email refers to the trigger designed to send an email notification\n+optional" }, "http": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.HTTPTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HTTPTrigger", "title": "HTTP refers to the trigger designed to dispatch a HTTP request with on-the-fly constructable payload.\n+optional" }, "k8s": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StandardK8STrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StandardK8STrigger", "title": "StandardK8STrigger refers to the trigger designed to create or update a generic Kubernetes resource.\n+optional" }, "kafka": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaTrigger", "description": "Kafka refers to the trigger designed to place messages on Kafka topic.\n+optional." }, "log": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.LogTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.LogTrigger", "title": "Log refers to the trigger designed to invoke log the io.argoproj.workflow.v1alpha1.\n+optional" }, "name": { @@ -3726,25 +3759,25 @@ "type": "string" }, "nats": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSTrigger", "description": "NATS refers to the trigger designed to place message on NATS subject.\n+optional." }, "openWhisk": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.OpenWhiskTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OpenWhiskTrigger", "title": "OpenWhisk refers to the trigger designed to invoke OpenWhisk action.\n+optional" }, "pulsar": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PulsarTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarTrigger", "title": "Pulsar refers to the trigger designed to place messages on Pulsar topic.\n+optional" }, "slack": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackTrigger", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackTrigger", "title": "Slack refers to the trigger designed to send slack notification message.\n+optional" } }, "type": "object" }, - "io.argoproj.events.v1alpha1.URLArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.URLArtifact": { "description": "URLArtifact contains information about an artifact at an http endpoint.", "properties": { "path": { @@ -3758,7 +3791,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.ValueFromSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ValueFromSource": { "properties": { "configMapKeyRef": { "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapKeySelector" @@ -3770,7 +3803,7 @@ "title": "ValueFromSource allows you to reference keys from either a Configmap or Secret", "type": "object" }, - "io.argoproj.events.v1alpha1.WatchPathConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig": { "properties": { "directory": { "title": "Directory to watch for events", @@ -3787,7 +3820,7 @@ }, "type": "object" }, - "io.argoproj.events.v1alpha1.WebhookContext": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext": { "properties": { "authSecret": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector", @@ -3832,19 +3865,74 @@ "title": "WebhookContext holds a general purpose REST API context", "type": "object" }, - "io.argoproj.events.v1alpha1.WebhookEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookEventSource": { "properties": { "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter", "title": "Filter\n+optional" }, "webhookContext": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } }, "title": "CalendarEventSource describes an HTTP based EventSource", "type": "object" }, + "google.protobuf.Any": { + "properties": { + "type_url": { + "type": "string" + }, + "value": { + "format": "byte", + "type": "string" + } + }, + "type": "object" + }, + "grpc.gateway.runtime.Error": { + "properties": { + "code": { + "type": "integer" + }, + "details": { + "items": { + "$ref": "#/definitions/google.protobuf.Any" + }, + "type": "array" + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "type": "object" + }, + "grpc.gateway.runtime.StreamError": { + "properties": { + "details": { + "items": { + "$ref": "#/definitions/google.protobuf.Any" + }, + "type": "array" + }, + "grpc_code": { + "type": "integer" + }, + "http_code": { + "type": "integer" + }, + "http_status": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "type": "object" + }, "io.argoproj.workflow.v1alpha1.Amount": { "description": "Amount represent a numeric amount.", "type": "number" @@ -9347,6 +9435,7 @@ "type": "object" }, "io.k8s.api.core.v1.GRPCAction": { + "description": "GRPCAction specifies an action involving a GRPC service.", "properties": { "port": { "description": "Port number of the gRPC service. Number must be in the range 1 to 65535.", @@ -9553,6 +9642,20 @@ ], "type": "object" }, + "io.k8s.api.core.v1.ImageVolumeSource": { + "description": "ImageVolumeSource represents a image volume resource.", + "properties": { + "pullPolicy": { + "description": "Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise.", + "type": "string" + }, + "reference": { + "description": "Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "type": "string" + } + }, + "type": "object" + }, "io.k8s.api.core.v1.KeyToPath": { "description": "Maps a string key to a path within a volume.", "properties": { @@ -9594,19 +9697,19 @@ "properties": { "exec": { "$ref": "#/definitions/io.k8s.api.core.v1.ExecAction", - "description": "Exec specifies the action to take." + "description": "Exec specifies a command to execute in the container." }, "httpGet": { "$ref": "#/definitions/io.k8s.api.core.v1.HTTPGetAction", - "description": "HTTPGet specifies the http request to perform." + "description": "HTTPGet specifies an HTTP GET request to perform." }, "sleep": { "$ref": "#/definitions/io.k8s.api.core.v1.SleepAction", - "description": "Sleep represents the duration that the container should sleep before being terminated." + "description": "Sleep represents a duration that the container should sleep." }, "tcpSocket": { "$ref": "#/definitions/io.k8s.api.core.v1.TCPSocketAction", - "description": "Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for the backward compatibility. There are no validation of this field and lifecycle hooks will fail in runtime when tcp handler is specified." + "description": "Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for backward compatibility. There is no validation of this field and lifecycle hooks will fail at runtime when it is specified." } }, "type": "object" @@ -9852,9 +9955,11 @@ "type": "string" }, "status": { + "description": "Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required", "type": "string" }, "type": { + "description": "Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about", "type": "string" } }, @@ -9896,7 +10001,7 @@ "type": "string" }, "volumeAttributesClassName": { - "description": "volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.", + "description": "volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).", "type": "string" }, "volumeMode": { @@ -9957,12 +10062,12 @@ "x-kubernetes-patch-strategy": "merge" }, "currentVolumeAttributesClassName": { - "description": "currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature.", + "description": "currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default).", "type": "string" }, "modifyVolumeStatus": { "$ref": "#/definitions/io.k8s.api.core.v1.ModifyVolumeStatus", - "description": "ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is an alpha field and requires enabling VolumeAttributesClass feature." + "description": "ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is a beta field and requires enabling VolumeAttributesClass feature (off by default)." }, "phase": { "description": "phase represents the current phase of PersistentVolumeClaim.", @@ -10052,7 +10157,7 @@ "description": "A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods." }, "matchLabelKeys": { - "description": "MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.", + "description": "MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).", "items": { "type": "string" }, @@ -10060,7 +10165,7 @@ "x-kubernetes-list-type": "atomic" }, "mismatchLabelKeys": { - "description": "MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.", + "description": "MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).", "items": { "type": "string" }, @@ -10145,10 +10250,11 @@ "description": "PodDNSConfigOption defines DNS resolver options of a pod.", "properties": { "name": { - "description": "Required.", + "description": "Name is this DNS resolver option's name. Required.", "type": "string" }, "value": { + "description": "Value is this DNS resolver option's value.", "type": "string" } }, @@ -10181,6 +10287,10 @@ "description": "The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.", "type": "integer" }, + "seLinuxChangePolicy": { + "description": "seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are \"MountOption\" and \"Recursive\".\n\n\"Recursive\" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node.\n\n\"MountOption\" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. \"MountOption\" value is allowed only when SELinuxMount feature gate is enabled.\n\nIf not specified and SELinuxMount feature gate is enabled, \"MountOption\" is used. If not specified and SELinuxMount feature gate is disabled, \"MountOption\" is used for ReadWriteOncePod volumes and \"Recursive\" for all other volumes.\n\nThis field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers.\n\nAll Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows.", + "type": "string" + }, "seLinuxOptions": { "$ref": "#/definitions/io.k8s.api.core.v1.SELinuxOptions", "description": "The SELinux context to be applied to all containers. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows." @@ -10190,7 +10300,7 @@ "description": "The seccomp options to use by the containers in this pod. Note that this field cannot be set when spec.os.name is windows." }, "supplementalGroups": { - "description": "A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows.", + "description": "A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows.", "items": { "format": "int64", "type": "integer" @@ -10198,6 +10308,10 @@ "type": "array", "x-kubernetes-list-type": "atomic" }, + "supplementalGroupsPolicy": { + "description": "Defines how supplemental groups of the first container processes are calculated. Valid values are \"Merge\" and \"Strict\". If not specified, \"Merge\" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows.", + "type": "string" + }, "sysctls": { "description": "Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.", "items": { @@ -10257,7 +10371,7 @@ "properties": { "exec": { "$ref": "#/definitions/io.k8s.api.core.v1.ExecAction", - "description": "Exec specifies the action to take." + "description": "Exec specifies a command to execute in the container." }, "failureThreshold": { "description": "Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.", @@ -10265,11 +10379,11 @@ }, "grpc": { "$ref": "#/definitions/io.k8s.api.core.v1.GRPCAction", - "description": "GRPC specifies an action involving a GRPC port." + "description": "GRPC specifies a GRPC HealthCheckRequest." }, "httpGet": { "$ref": "#/definitions/io.k8s.api.core.v1.HTTPGetAction", - "description": "HTTPGet specifies the http request to perform." + "description": "HTTPGet specifies an HTTP GET request to perform." }, "initialDelaySeconds": { "description": "Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes", @@ -10285,7 +10399,7 @@ }, "tcpSocket": { "$ref": "#/definitions/io.k8s.api.core.v1.TCPSocketAction", - "description": "TCPSocket specifies an action involving a TCP port." + "description": "TCPSocket specifies a connection to a TCP port." }, "terminationGracePeriodSeconds": { "description": "Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset.", @@ -10306,7 +10420,7 @@ "type": "integer" }, "sources": { - "description": "sources is the list of volume projections", + "description": "sources is the list of volume projections. Each entry in this list handles one source.", "items": { "$ref": "#/definitions/io.k8s.api.core.v1.VolumeProjection" }, @@ -10402,6 +10516,10 @@ "name": { "description": "Name must match the name of one entry in pod.spec.resourceClaims of the Pod where this field is used. It makes that resource available inside a container.", "type": "string" + }, + "request": { + "description": "Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request.", + "type": "string" } }, "required": [ @@ -10664,7 +10782,7 @@ "type": "boolean" }, "procMount": { - "description": "procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.", + "description": "procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.", "type": "string" }, "readOnlyRootFilesystem": { @@ -10876,6 +10994,7 @@ "x-kubernetes-map-type": "atomic" }, "io.k8s.api.core.v1.TypedObjectReference": { + "description": "TypedObjectReference contains enough information to let you locate the typed referenced object", "properties": { "apiGroup": { "description": "APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required.", @@ -10905,23 +11024,23 @@ "properties": { "awsElasticBlockStore": { "$ref": "#/definitions/io.k8s.api.core.v1.AWSElasticBlockStoreVolumeSource", - "description": "awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore" + "description": "awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree awsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore" }, "azureDisk": { "$ref": "#/definitions/io.k8s.api.core.v1.AzureDiskVolumeSource", - "description": "azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod." + "description": "azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. Deprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type are redirected to the disk.csi.azure.com CSI driver." }, "azureFile": { "$ref": "#/definitions/io.k8s.api.core.v1.AzureFileVolumeSource", - "description": "azureFile represents an Azure File Service mount on the host and bind mount to the pod." + "description": "azureFile represents an Azure File Service mount on the host and bind mount to the pod. Deprecated: AzureFile is deprecated. All operations for the in-tree azureFile type are redirected to the file.csi.azure.com CSI driver." }, "cephfs": { "$ref": "#/definitions/io.k8s.api.core.v1.CephFSVolumeSource", - "description": "cephFS represents a Ceph FS mount on the host that shares a pod's lifetime" + "description": "cephFS represents a Ceph FS mount on the host that shares a pod's lifetime. Deprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported." }, "cinder": { "$ref": "#/definitions/io.k8s.api.core.v1.CinderVolumeSource", - "description": "cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md" + "description": "cinder represents a cinder volume attached and mounted on kubelets host machine. Deprecated: Cinder is deprecated. All operations for the in-tree cinder type are redirected to the cinder.csi.openstack.org CSI driver. More info: https://examples.k8s.io/mysql-cinder-pd/README.md" }, "configMap": { "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapVolumeSource", @@ -10929,7 +11048,7 @@ }, "csi": { "$ref": "#/definitions/io.k8s.api.core.v1.CSIVolumeSource", - "description": "csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature)." + "description": "csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers." }, "downwardAPI": { "$ref": "#/definitions/io.k8s.api.core.v1.DownwardAPIVolumeSource", @@ -10949,28 +11068,32 @@ }, "flexVolume": { "$ref": "#/definitions/io.k8s.api.core.v1.FlexVolumeSource", - "description": "flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin." + "description": "flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. Deprecated: FlexVolume is deprecated. Consider using a CSIDriver instead." }, "flocker": { "$ref": "#/definitions/io.k8s.api.core.v1.FlockerVolumeSource", - "description": "flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running" + "description": "flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running. Deprecated: Flocker is deprecated and the in-tree flocker type is no longer supported." }, "gcePersistentDisk": { "$ref": "#/definitions/io.k8s.api.core.v1.GCEPersistentDiskVolumeSource", - "description": "gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk" + "description": "gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: GCEPersistentDisk is deprecated. All operations for the in-tree gcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk" }, "gitRepo": { "$ref": "#/definitions/io.k8s.api.core.v1.GitRepoVolumeSource", - "description": "gitRepo represents a git repository at a particular revision. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container." + "description": "gitRepo represents a git repository at a particular revision. Deprecated: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container." }, "glusterfs": { "$ref": "#/definitions/io.k8s.api.core.v1.GlusterfsVolumeSource", - "description": "glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/glusterfs/README.md" + "description": "glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. Deprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported. More info: https://examples.k8s.io/volumes/glusterfs/README.md" }, "hostPath": { "$ref": "#/definitions/io.k8s.api.core.v1.HostPathVolumeSource", "description": "hostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath" }, + "image": { + "$ref": "#/definitions/io.k8s.api.core.v1.ImageVolumeSource", + "description": "image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine. The volume is resolved at pod startup depending on which PullPolicy value is provided:\n\n- Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. - Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. - IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails.\n\nThe volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation. A failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message. The types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field. The OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images. The volume will be mounted read-only (ro) and non-executable files (noexec). Sub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath). The field spec.securityContext.fsGroupChangePolicy has no effect on this volume type." + }, "iscsi": { "$ref": "#/definitions/io.k8s.api.core.v1.ISCSIVolumeSource", "description": "iscsi represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://examples.k8s.io/volumes/iscsi/README.md" @@ -10989,11 +11112,11 @@ }, "photonPersistentDisk": { "$ref": "#/definitions/io.k8s.api.core.v1.PhotonPersistentDiskVolumeSource", - "description": "photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine" + "description": "photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine. Deprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported." }, "portworxVolume": { "$ref": "#/definitions/io.k8s.api.core.v1.PortworxVolumeSource", - "description": "portworxVolume represents a portworx volume attached and mounted on kubelets host machine" + "description": "portworxVolume represents a portworx volume attached and mounted on kubelets host machine. Deprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type are redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate is on." }, "projected": { "$ref": "#/definitions/io.k8s.api.core.v1.ProjectedVolumeSource", @@ -11001,15 +11124,15 @@ }, "quobyte": { "$ref": "#/definitions/io.k8s.api.core.v1.QuobyteVolumeSource", - "description": "quobyte represents a Quobyte mount on the host that shares a pod's lifetime" + "description": "quobyte represents a Quobyte mount on the host that shares a pod's lifetime. Deprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported." }, "rbd": { "$ref": "#/definitions/io.k8s.api.core.v1.RBDVolumeSource", - "description": "rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/rbd/README.md" + "description": "rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. Deprecated: RBD is deprecated and the in-tree rbd type is no longer supported. More info: https://examples.k8s.io/volumes/rbd/README.md" }, "scaleIO": { "$ref": "#/definitions/io.k8s.api.core.v1.ScaleIOVolumeSource", - "description": "scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes." + "description": "scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. Deprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported." }, "secret": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretVolumeSource", @@ -11017,11 +11140,11 @@ }, "storageos": { "$ref": "#/definitions/io.k8s.api.core.v1.StorageOSVolumeSource", - "description": "storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes." + "description": "storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. Deprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported." }, "vsphereVolume": { "$ref": "#/definitions/io.k8s.api.core.v1.VsphereVirtualDiskVolumeSource", - "description": "vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine" + "description": "vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine. Deprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type are redirected to the csi.vsphere.vmware.com CSI driver." } }, "required": [ @@ -11086,7 +11209,7 @@ "type": "object" }, "io.k8s.api.core.v1.VolumeProjection": { - "description": "Projection that may be projected along with other supported volume types", + "description": "Projection that may be projected along with other supported volume types. Exactly one of these fields must be set.", "properties": { "clusterTrustBundle": { "$ref": "#/definitions/io.k8s.api.core.v1.ClusterTrustBundleProjection", @@ -11517,7 +11640,7 @@ "type": "string" }, "sensor": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "type": "object" @@ -11561,7 +11684,7 @@ "sensor.SensorWatchEvent": { "properties": { "object": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" }, "type": { "type": "string" @@ -11578,7 +11701,7 @@ "type": "string" }, "sensor": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "type": "object" diff --git a/api/openapi-spec/swagger.json b/api/openapi-spec/swagger.json index 632b1abd7284..7560ea62b8c5 100644 --- a/api/openapi-spec/swagger.json +++ b/api/openapi-spec/swagger.json @@ -654,6 +654,12 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional\n+listType=atomic.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" } ], "responses": { @@ -980,6 +986,12 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional\n+listType=atomic.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" } ], "responses": { @@ -1166,7 +1178,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceList" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceList" } }, "default": { @@ -1202,7 +1214,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" } }, "default": { @@ -1238,7 +1250,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" } }, "default": { @@ -1280,7 +1292,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" } }, "default": { @@ -1349,6 +1361,12 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional\n+listType=atomic.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" } ], "responses": { @@ -1516,7 +1534,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SensorList" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorList" } }, "default": { @@ -1552,7 +1570,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "default": { @@ -1594,7 +1612,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "default": { @@ -1636,7 +1654,7 @@ "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "default": { @@ -1705,6 +1723,12 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional\n+listType=atomic.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" } ], "responses": { @@ -1909,7 +1933,7 @@ { "type": "string", "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", + "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+optional.", "name": "podLogOptions.tailLines", "in": "query" }, @@ -1925,6 +1949,12 @@ "description": "insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the\nserving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver\nand the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real\nkubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the\nconnection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept\nthe actual log data coming from the real kubelet).\n+optional.", "name": "podLogOptions.insecureSkipTLSVerifyBackend", "in": "query" + }, + { + "type": "string", + "description": "Specify which container log stream to return to the client.\nAcceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr\nare returned interleaved.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+featureGate=PodLogsQuerySplitStreams\n+optional.", + "name": "podLogOptions.stream", + "in": "query" } ], "responses": { @@ -2233,7 +2263,7 @@ { "type": "string", "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", + "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+optional.", "name": "podLogOptions.tailLines", "in": "query" }, @@ -2249,6 +2279,12 @@ "description": "insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the\nserving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver\nand the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real\nkubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the\nconnection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept\nthe actual log data coming from the real kubelet).\n+optional.", "name": "podLogOptions.insecureSkipTLSVerifyBackend", "in": "query" + }, + { + "type": "string", + "description": "Specify which container log stream to return to the client.\nAcceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr\nare returned interleaved.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+featureGate=PodLogsQuerySplitStreams\n+optional.", + "name": "podLogOptions.stream", + "in": "query" } ], "responses": { @@ -2863,6 +2899,12 @@ "description": "When present, indicates that modifications should not be\npersisted. An invalid or unrecognized dryRun directive will\nresult in an error response and no further processing of the\nrequest. Valid values are:\n- All: all dry run stages will be processed\n+optional\n+listType=atomic.", "name": "deleteOptions.dryRun", "in": "query" + }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" } ], "responses": { @@ -3203,6 +3245,12 @@ "name": "deleteOptions.dryRun", "in": "query" }, + { + "type": "boolean", + "description": "if set to true, it will trigger an unsafe deletion of the resource in\ncase the normal deletion flow fails with a corrupt object error.\nA resource is considered corrupt if it can not be retrieved from\nthe underlying storage successfully because of a) its data can\nnot be transformed e.g. decryption failure, or b) it fails\nto decode into an object.\nNOTE: unsafe deletion ignores finalizer constraints, skips\nprecondition checks, and removes the object from the storage.\nWARNING: This may potentially break the cluster if the workload\nassociated with the resource being unsafe-deleted relies on normal\ndeletion flow. Use only if you REALLY know what you are doing.\nThe default value is false, and the user must opt in to enable it\n+optional.", + "name": "deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential", + "in": "query" + }, { "type": "boolean", "name": "force", @@ -3297,7 +3345,7 @@ { "type": "string", "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", + "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+optional.", "name": "logOptions.tailLines", "in": "query" }, @@ -3314,6 +3362,12 @@ "name": "logOptions.insecureSkipTLSVerifyBackend", "in": "query" }, + { + "type": "string", + "description": "Specify which container log stream to return to the client.\nAcceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr\nare returned interleaved.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+featureGate=PodLogsQuerySplitStreams\n+optional.", + "name": "logOptions.stream", + "in": "query" + }, { "type": "string", "name": "grep", @@ -3732,7 +3786,7 @@ { "type": "string", "format": "int64", - "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime\n+optional.", + "description": "If set, the number of lines from the end of the logs to show. If not specified,\nlogs are shown from the creation of the container or sinceSeconds or sinceTime.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+optional.", "name": "logOptions.tailLines", "in": "query" }, @@ -3749,6 +3803,12 @@ "name": "logOptions.insecureSkipTLSVerifyBackend", "in": "query" }, + { + "type": "string", + "description": "Specify which container log stream to return to the client.\nAcceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr\nare returned interleaved.\nNote that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\".\n+featureGate=PodLogsQuerySplitStreams\n+optional.", + "name": "logOptions.stream", + "in": "query" + }, { "type": "string", "name": "grep", @@ -4132,7 +4192,7 @@ "type": "object", "properties": { "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "namespace": { "type": "string" @@ -4146,7 +4206,7 @@ "type": "object", "properties": { "object": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "type": { "type": "string" @@ -4186,7 +4246,7 @@ "type": "object", "properties": { "eventSource": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" }, "name": { "type": "string" @@ -4196,62 +4256,7 @@ } } }, - "google.protobuf.Any": { - "type": "object", - "properties": { - "type_url": { - "type": "string" - }, - "value": { - "type": "string", - "format": "byte" - } - } - }, - "grpc.gateway.runtime.Error": { - "type": "object", - "properties": { - "code": { - "type": "integer" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/google.protobuf.Any" - } - }, - "error": { - "type": "string" - }, - "message": { - "type": "string" - } - } - }, - "grpc.gateway.runtime.StreamError": { - "type": "object", - "properties": { - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/google.protobuf.Any" - } - }, - "grpc_code": { - "type": "integer" - }, - "http_code": { - "type": "integer" - }, - "http_status": { - "type": "string" - }, - "message": { - "type": "string" - } - } - }, - "io.argoproj.events.v1alpha1.AMQPConsumeConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPConsumeConfig": { "type": "object", "title": "AMQPConsumeConfig holds the configuration to immediately starts delivering queued messages\n+k8s:openapi-gen=true", "properties": { @@ -4277,25 +4282,25 @@ } } }, - "io.argoproj.events.v1alpha1.AMQPEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPEventSource": { "type": "object", "title": "AMQPEventSource refers to an event-source for AMQP stream events", "properties": { "auth": { "title": "Auth hosts secret selectors for username and password\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "connectionBackoff": { "title": "Backoff holds parameters applied to connection.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "consume": { "title": "Consume holds the configuration to immediately starts delivering queued messages\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.Consume\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPConsumeConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPConsumeConfig" }, "exchangeDeclare": { "title": "ExchangeDeclare holds the configuration for the exchange on the server\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.ExchangeDeclare\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPExchangeDeclareConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPExchangeDeclareConfig" }, "exchangeName": { "type": "string", @@ -4307,7 +4312,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -4322,11 +4327,11 @@ }, "queueBind": { "title": "QueueBind holds the configuration that binds an exchange to a queue so that publishings to the\nexchange will be routed to the queue when the publishing routing key matches the binding routing key\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.QueueBind\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPQueueBindConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueBindConfig" }, "queueDeclare": { "title": "QueueDeclare holds the configuration of a queue to hold messages and deliver to consumers.\nDeclaring creates a queue if it doesn't already exist, or ensures that an existing queue matches\nthe same parameters\nFor more information, visit https://pkg.go.dev/github.com/rabbitmq/amqp091-go#Channel.QueueDeclare\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPQueueDeclareConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueDeclareConfig" }, "routingKey": { "type": "string", @@ -4334,7 +4339,7 @@ }, "tls": { "title": "TLS configuration for the amqp client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "url": { "type": "string", @@ -4346,7 +4351,7 @@ } } }, - "io.argoproj.events.v1alpha1.AMQPExchangeDeclareConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPExchangeDeclareConfig": { "type": "object", "title": "AMQPExchangeDeclareConfig holds the configuration for the exchange on the server\n+k8s:openapi-gen=true", "properties": { @@ -4368,7 +4373,7 @@ } } }, - "io.argoproj.events.v1alpha1.AMQPQueueBindConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueBindConfig": { "type": "object", "title": "AMQPQueueBindConfig holds the configuration that binds an exchange to a queue so that publishings to the\nexchange will be routed to the queue when the publishing routing key matches the binding routing key\n+k8s:openapi-gen=true", "properties": { @@ -4378,7 +4383,7 @@ } } }, - "io.argoproj.events.v1alpha1.AMQPQueueDeclareConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPQueueDeclareConfig": { "type": "object", "title": "AMQPQueueDeclareConfig holds the configuration of a queue to hold messages and deliver to consumers.\nDeclaring creates a queue if it doesn't already exist, or ensures that an existing queue matches\nthe same parameters\n+k8s:openapi-gen=true", "properties": { @@ -4408,7 +4413,7 @@ } } }, - "io.argoproj.events.v1alpha1.AWSLambdaTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AWSLambdaTrigger": { "type": "object", "title": "AWSLambdaTrigger refers to specification of the trigger to invoke an AWS Lambda function", "properties": { @@ -4428,14 +4433,14 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "region": { @@ -4452,7 +4457,7 @@ } } }, - "io.argoproj.events.v1alpha1.Amount": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount": { "description": "Amount represent a numeric amount.", "type": "object", "properties": { @@ -4462,7 +4467,7 @@ } } }, - "io.argoproj.events.v1alpha1.ArgoWorkflowTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArgoWorkflowTrigger": { "type": "object", "title": "ArgoWorkflowTrigger is the trigger for the Argo Workflow", "properties": { @@ -4481,16 +4486,16 @@ "type": "array", "title": "Parameters is the list of parameters to pass to resolved Argo Workflow object", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "source": { "title": "Source of the K8s resource file(s)", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArtifactLocation" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation" } } }, - "io.argoproj.events.v1alpha1.ArtifactLocation": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation": { "type": "object", "title": "ArtifactLocation describes the source location for an external artifact", "properties": { @@ -4500,11 +4505,11 @@ }, "file": { "title": "File artifact is artifact stored in a file", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.FileArtifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileArtifact" }, "git": { "title": "Git repository hosting the artifact", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitArtifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitArtifact" }, "inline": { "type": "string", @@ -4512,19 +4517,19 @@ }, "resource": { "title": "Resource is generic template for K8s resource", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Resource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResource" }, "s3": { "title": "S3 compliant artifact", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Artifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact" }, "url": { "title": "URL to fetch the artifact from", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.URLArtifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.URLArtifact" } } }, - "io.argoproj.events.v1alpha1.AzureEventHubsTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventHubsTrigger": { "type": "object", "title": "AzureEventHubsTrigger refers to specification of the Azure Event Hubs Trigger", "properties": { @@ -4540,14 +4545,14 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "sharedAccessKey": { @@ -4560,13 +4565,13 @@ } } }, - "io.argoproj.events.v1alpha1.AzureEventsHubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventsHubEventSource": { "type": "object", "title": "AzureEventsHubEventSource describes the event source for azure events hub\nMore info at https://docs.microsoft.com/en-us/azure/event-hubs/", "properties": { "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "fqdn": { "type": "string", @@ -4584,16 +4589,16 @@ } }, "sharedAccessKey": { - "title": "SharedAccessKey is the generated value of the key", + "title": "SharedAccessKey is the generated value of the key. If both this field and SharedAccessKeyName are not provided\nit will try to access via Azure AD with DefaultAzureCredential, FQDN and HubName.\n+optional", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" }, "sharedAccessKeyName": { - "title": "SharedAccessKeyName is the name you chose for your application's SAS keys", + "title": "SharedAccessKeyName is the name you chose for your application's SAS keys. If both this field and SharedAccessKey are not provided\nit will try to access via Azure AD with DefaultAzureCredential, FQDN and HubName.\n+optional", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" } } }, - "io.argoproj.events.v1alpha1.AzureQueueStorageEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureQueueStorageEventSource": { "type": "object", "title": "AzureQueueStorageEventSource describes the event source for azure queue storage\nmore info at https://learn.microsoft.com/en-us/azure/storage/queues/", "properties": { @@ -4611,7 +4616,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -4638,7 +4643,7 @@ } } }, - "io.argoproj.events.v1alpha1.AzureServiceBusEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusEventSource": { "type": "object", "title": "AzureServiceBusEventSource describes the event source for azure service bus\nMore info at https://docs.microsoft.com/en-us/azure/service-bus-messaging/", "properties": { @@ -4648,7 +4653,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "fullyQualifiedNamespace": { "type": "string", @@ -4675,7 +4680,7 @@ }, "tls": { "title": "TLS configuration for the service bus client\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topicName": { "type": "string", @@ -4683,7 +4688,7 @@ } } }, - "io.argoproj.events.v1alpha1.AzureServiceBusTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusTrigger": { "type": "object", "properties": { "connectionString": { @@ -4694,14 +4699,14 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "queueName": { @@ -4714,7 +4719,7 @@ }, "tls": { "title": "TLS configuration for the service bus client\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topicName": { "type": "string", @@ -4722,21 +4727,21 @@ } } }, - "io.argoproj.events.v1alpha1.Backoff": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff": { "type": "object", "title": "Backoff for an operation", "properties": { "duration": { "title": "The initial duration in nanoseconds or strings like \"1s\", \"3m\"\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Int64OrString" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Int64OrString" }, "factor": { "title": "Duration is multiplied by factor each iteration\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Amount" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount" }, "jitter": { "title": "The amount of jitter applied each iteration\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Amount" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Amount" }, "steps": { "type": "integer", @@ -4744,7 +4749,7 @@ } } }, - "io.argoproj.events.v1alpha1.BasicAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth": { "type": "object", "title": "BasicAuth contains the reference to K8s secrets that holds the username and password", "properties": { @@ -4758,13 +4763,13 @@ } } }, - "io.argoproj.events.v1alpha1.BitbucketAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketAuth": { "type": "object", "title": "BitbucketAuth holds the different auth strategies for connecting to Bitbucket", "properties": { "basic": { "title": "Basic is BasicAuth auth strategy.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketBasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketBasicAuth" }, "oauthToken": { "title": "OAuthToken refers to the K8s secret that holds the OAuth Bearer token.\n+optional", @@ -4772,9 +4777,9 @@ } } }, - "io.argoproj.events.v1alpha1.BitbucketBasicAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketBasicAuth": { "type": "object", - "title": "BasicAuth holds the information required to authenticate user via basic auth mechanism", + "title": "BitbucketBasicAuth holds the information required to authenticate user via basic auth mechanism", "properties": { "password": { "description": "Password refers to the K8s secret that holds the password.", @@ -4786,13 +4791,13 @@ } } }, - "io.argoproj.events.v1alpha1.BitbucketEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketEventSource": { "type": "object", "title": "BitbucketEventSource describes the event source for Bitbucket", "properties": { "auth": { "description": "Auth information required to connect to Bitbucket.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketAuth" }, "deleteHookOnFinish": { "type": "boolean", @@ -4807,7 +4812,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -4828,7 +4833,7 @@ "type": "array", "title": "Repositories holds a list of repositories for which integration needs to set up\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketRepository" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketRepository" } }, "repositorySlug": { @@ -4837,11 +4842,11 @@ }, "webhook": { "title": "Webhook refers to the configuration required to run an http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.BitbucketRepository": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketRepository": { "type": "object", "properties": { "owner": { @@ -4854,17 +4859,21 @@ } } }, - "io.argoproj.events.v1alpha1.BitbucketServerEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerEventSource": { "type": "object", "title": "BitbucketServerEventSource refers to event-source related to Bitbucket Server events", "properties": { "accessToken": { - "title": "AccessToken is reference to K8s secret which holds the bitbucket api access information", + "title": "AccessToken is reference to K8s secret which holds the bitbucket api access information.\n+optional", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" }, "bitbucketserverBaseURL": { + "description": "BitbucketServerBaseURL is the base URL for API requests to a custom endpoint.", + "type": "string" + }, + "checkInterval": { "type": "string", - "title": "BitbucketServerBaseURL is the base URL for API requests to a custom endpoint" + "title": "CheckInterval is a duration in which to wait before checking that the webhooks exist, e.g. 1s, 30m, 2h... (defaults to 1m)\n+optional" }, "deleteHookOnFinish": { "type": "boolean", @@ -4872,14 +4881,14 @@ }, "events": { "type": "array", - "title": "Events are bitbucket event to listen to.\nRefer https://confluence.atlassian.com/bitbucketserver/event-payload-938025882.html", + "title": "Events are bitbucket event to listen to.\nRefer https://confluence.atlassian.com/bitbucketserver/event-payload-938025882.html\n+optional", "items": { "type": "string" } }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -4888,49 +4897,64 @@ "type": "string" } }, + "oneEventPerChange": { + "type": "boolean", + "title": "OneEventPerChange controls whether to process each change in a repo:refs_changed webhook event as a separate io.argoproj.workflow.v1alpha1. This setting is useful when multiple tags are\npushed simultaneously for the same commit, and each tag needs to independently trigger an action, such as a distinct workflow in Argo Workflows. When enabled, the\nBitbucketServerEventSource publishes an individual BitbucketServerEventData for each change, ensuring independent processing of each tag or reference update in a\nsingle webhook event.\n+optional" + }, "projectKey": { "type": "string", - "title": "DeprecatedProjectKey is the key of project for which integration needs to set up\nDeprecated: use Repositories instead. Will be unsupported in v1.8\n+optional" + "title": "DeprecatedProjectKey is the key of project for which integration needs to set up.\nDeprecated: use Repositories instead. Will be unsupported in v1.8.\n+optional" + }, + "projects": { + "type": "array", + "title": "Projects holds a list of projects for which integration needs to set up, this will add the webhook to all repositories in the project.\n+optional", + "items": { + "type": "string" + } }, "repositories": { "type": "array", - "title": "Repositories holds a list of repositories for which integration needs to set up\n+optional", + "title": "Repositories holds a list of repositories for which integration needs to set up.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketServerRepository" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerRepository" } }, "repositorySlug": { "type": "string", - "title": "DeprecatedRepositorySlug is the slug of the repository for which integration needs to set up\nDeprecated: use Repositories instead. Will be unsupported in v1.8\n+optional" + "title": "DeprecatedRepositorySlug is the slug of the repository for which integration needs to set up.\nDeprecated: use Repositories instead. Will be unsupported in v1.8.\n+optional" + }, + "skipBranchRefsChangedOnOpenPR": { + "type": "boolean", + "title": "SkipBranchRefsChangedOnOpenPR bypasses the event repo:refs_changed for branches whenever there's an associated open pull request.\nThis helps in optimizing the event handling process by avoiding unnecessary triggers for branch reference changes that are already part of a pull request under review.\n+optional" }, "tls": { "title": "TLS configuration for the bitbucketserver client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "webhook": { - "title": "Webhook holds configuration to run a http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "description": "Webhook holds configuration to run a http server.", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" }, "webhookSecret": { - "title": "WebhookSecret is reference to K8s secret which holds the bitbucket webhook secret (for HMAC validation)", + "title": "WebhookSecret is reference to K8s secret which holds the bitbucket webhook secret (for HMAC validation).\n+optional", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" } } }, - "io.argoproj.events.v1alpha1.BitbucketServerRepository": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerRepository": { "type": "object", "properties": { "projectKey": { - "type": "string", - "title": "ProjectKey is the key of project for which integration needs to set up" + "description": "ProjectKey is the key of project for which integration needs to set up.", + "type": "string" }, "repositorySlug": { - "type": "string", - "title": "RepositorySlug is the slug of the repository for which integration needs to set up" + "description": "RepositorySlug is the slug of the repository for which integration needs to set up.", + "type": "string" } } }, - "io.argoproj.events.v1alpha1.CalendarEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CalendarEventSource": { "type": "object", "title": "CalendarEventSource describes a time based dependency. One of the fields (schedule, interval, or recurrence) must be passed.\nSchedule takes precedence over interval; interval takes precedence over recurrence", "properties": { @@ -4943,7 +4967,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "interval": { "type": "string", @@ -4958,7 +4982,7 @@ }, "persistence": { "title": "Persistence hold the configuration for event persistence", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventPersistence" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventPersistence" }, "schedule": { "type": "string", @@ -4970,7 +4994,7 @@ } } }, - "io.argoproj.events.v1alpha1.CatchupConfiguration": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CatchupConfiguration": { "type": "object", "properties": { "enabled": { @@ -4983,7 +5007,7 @@ } } }, - "io.argoproj.events.v1alpha1.Condition": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Condition": { "type": "object", "title": "Condition contains details about resource state", "properties": { @@ -5009,7 +5033,7 @@ } } }, - "io.argoproj.events.v1alpha1.ConditionsResetByTime": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetByTime": { "type": "object", "properties": { "cron": { @@ -5022,16 +5046,16 @@ } } }, - "io.argoproj.events.v1alpha1.ConditionsResetCriteria": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetCriteria": { "type": "object", "properties": { "byTime": { "title": "Schedule is a cron-like expression. For reference, see: https://en.wikipedia.org/wiki/Cron", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConditionsResetByTime" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetByTime" } } }, - "io.argoproj.events.v1alpha1.ConfigMapPersistence": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConfigMapPersistence": { "type": "object", "properties": { "createIfNotExist": { @@ -5044,7 +5068,46 @@ } } }, - "io.argoproj.events.v1alpha1.CustomTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Container": { + "type": "object", + "title": "Container defines customized spec for a container", + "properties": { + "env": { + "type": "array", + "title": "+optional", + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.EnvVar" + } + }, + "envFrom": { + "type": "array", + "title": "+optional", + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.EnvFromSource" + } + }, + "imagePullPolicy": { + "type": "string", + "title": "+optional" + }, + "resources": { + "title": "+optional", + "$ref": "#/definitions/io.k8s.api.core.v1.ResourceRequirements" + }, + "securityContext": { + "title": "+optional", + "$ref": "#/definitions/io.k8s.api.core.v1.SecurityContext" + }, + "volumeMounts": { + "type": "array", + "title": "+optional", + "items": { + "$ref": "#/definitions/io.k8s.api.core.v1.VolumeMount" + } + } + } + }, + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CustomTrigger": { "description": "CustomTrigger refers to the specification of the custom trigger.", "type": "object", "properties": { @@ -5056,14 +5119,14 @@ "description": "Parameters is the list of parameters that is applied to resolved custom trigger trigger object.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "secure": { @@ -5087,7 +5150,7 @@ } } }, - "io.argoproj.events.v1alpha1.DataFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.DataFilter": { "type": "object", "title": "DataFilter describes constraints and filters for event data\nRegular Expressions are purposefully not a feature as they are overkill for our uses here\nSee Rob Pike's Post: https://commandcenter.blogspot.com/2011/08/regular-expressions-in-lexing-and.html", "properties": { @@ -5116,7 +5179,7 @@ } } }, - "io.argoproj.events.v1alpha1.EmailTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmailTrigger": { "description": "EmailTrigger refers to the specification of the email notification trigger.", "type": "object", "properties": { @@ -5136,7 +5199,7 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "port": { @@ -5164,7 +5227,7 @@ } } }, - "io.argoproj.events.v1alpha1.EmitterEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmitterEventSource": { "type": "object", "title": "EmitterEventSource describes the event source for emitter\nMore info at https://emitter.io/develop/getting-started/", "properties": { @@ -5182,11 +5245,11 @@ }, "connectionBackoff": { "title": "Backoff holds parameters applied to connection.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -5205,7 +5268,7 @@ }, "tls": { "title": "TLS configuration for the emitter client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "username": { "title": "Username to use to connect to broker\n+optional", @@ -5213,7 +5276,7 @@ } } }, - "io.argoproj.events.v1alpha1.EventContext": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventContext": { "type": "object", "title": "EventContext holds the context of the cloudevent received from an event source.\n+protobuf.options.(gogoproto.goproto_stringer)=false", "properties": { @@ -5247,7 +5310,7 @@ } } }, - "io.argoproj.events.v1alpha1.EventDependency": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependency": { "type": "object", "title": "EventDependency describes a dependency", "properties": { @@ -5261,7 +5324,7 @@ }, "filters": { "title": "Filters and rules governing toleration of success and constraints on the context and data of an event", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependencyFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyFilter" }, "filtersLogicalOperator": { "description": "FiltersLogicalOperator defines how different filters are evaluated together.\nAvailable values: and (\u0026\u0026), or (||)\nIs optional and if left blank treated as and (\u0026\u0026).", @@ -5273,23 +5336,23 @@ }, "transform": { "title": "Transform transforms the event data", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependencyTransformer" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyTransformer" } } }, - "io.argoproj.events.v1alpha1.EventDependencyFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyFilter": { "description": "EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1.", "type": "object", "properties": { "context": { "title": "Context filter constraints", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventContext" }, "data": { "type": "array", "title": "Data filter constraints with escalation", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.DataFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.DataFilter" } }, "dataLogicalOperator": { @@ -5304,7 +5367,7 @@ "description": "Exprs contains the list of expressions evaluated against the event payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ExprFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ExprFilter" } }, "script": { @@ -5313,11 +5376,11 @@ }, "time": { "title": "Time filter on the event with escalation", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TimeFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TimeFilter" } } }, - "io.argoproj.events.v1alpha1.EventDependencyTransformer": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependencyTransformer": { "type": "object", "title": "EventDependencyTransformer transforms the event", "properties": { @@ -5331,20 +5394,20 @@ } } }, - "io.argoproj.events.v1alpha1.EventPersistence": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventPersistence": { "type": "object", "properties": { "catchup": { "title": "Catchup enables to triggered the missed schedule when eventsource restarts", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CatchupConfiguration" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CatchupConfiguration" }, "configMap": { "title": "ConfigMap holds configmap details for persistence", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConfigMapPersistence" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConfigMapPersistence" } } }, - "io.argoproj.events.v1alpha1.EventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource": { "type": "object", "title": "EventSource is the definition of a eventsource resource\n+genclient\n+kubebuilder:resource:shortName=es\n+kubebuilder:subresource:status\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object\n+k8s:openapi-gen=true", "properties": { @@ -5352,15 +5415,15 @@ "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" }, "spec": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceSpec" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceSpec" }, "status": { "title": "+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceStatus" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceStatus" } } }, - "io.argoproj.events.v1alpha1.EventSourceFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter": { "type": "object", "properties": { "expression": { @@ -5368,14 +5431,14 @@ } } }, - "io.argoproj.events.v1alpha1.EventSourceList": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceList": { "type": "object", "title": "EventSourceList is the list of eventsource resources\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object", "properties": { "items": { "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource" } }, "metadata": { @@ -5383,7 +5446,7 @@ } } }, - "io.argoproj.events.v1alpha1.EventSourceSpec": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceSpec": { "type": "object", "title": "EventSourceSpec refers to specification of event-source resource", "properties": { @@ -5391,56 +5454,56 @@ "type": "object", "title": "AMQP event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AMQPEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AMQPEventSource" } }, "azureEventsHub": { "type": "object", "title": "AzureEventsHub event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureEventsHubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventsHubEventSource" } }, "azureQueueStorage": { "type": "object", "title": "AzureQueueStorage event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureQueueStorageEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureQueueStorageEventSource" } }, "azureServiceBus": { "type": "object", "title": "Azure Service Bus event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureServiceBusEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusEventSource" } }, "bitbucket": { "type": "object", "title": "Bitbucket event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketEventSource" } }, "bitbucketserver": { "type": "object", "title": "Bitbucket Server event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BitbucketServerEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BitbucketServerEventSource" } }, "calendar": { "type": "object", "title": "Calendar event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CalendarEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CalendarEventSource" } }, "emitter": { "type": "object", "title": "Emitter event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EmitterEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmitterEventSource" } }, "eventBusName": { @@ -5451,105 +5514,105 @@ "type": "object", "title": "File event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.FileEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileEventSource" } }, "generic": { "type": "object", "title": "Generic event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GenericEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GenericEventSource" } }, "gerrit": { "type": "object", "title": "Gerrit event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GerritEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GerritEventSource" } }, "github": { "type": "object", "title": "Github event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GithubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubEventSource" } }, "gitlab": { "type": "object", "title": "Gitlab event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitlabEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitlabEventSource" } }, "hdfs": { "type": "object", "title": "HDFS event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.HDFSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HDFSEventSource" } }, "kafka": { "type": "object", "title": "Kafka event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaEventSource" } }, "minio": { "type": "object", "title": "Minio event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Artifact" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact" } }, "mqtt": { "type": "object", "title": "MQTT event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.MQTTEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.MQTTEventSource" } }, "nats": { "type": "object", "title": "NATS event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSEventsSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSEventsSource" } }, "nsq": { "type": "object", "title": "NSQ event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NSQEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NSQEventSource" } }, "pubSub": { "type": "object", "title": "PubSub event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PubSubEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PubSubEventSource" } }, "pulsar": { "type": "object", "title": "Pulsar event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PulsarEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarEventSource" } }, "redis": { "type": "object", "title": "Redis event source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RedisEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisEventSource" } }, "redisStream": { "type": "object", "title": "Redis stream source", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RedisStreamEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisStreamEventSource" } }, "replicas": { @@ -5560,78 +5623,78 @@ "type": "object", "title": "Resource event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ResourceEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceEventSource" } }, "service": { "title": "Service is the specifications of the service to expose the event source\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Service" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Service" }, "sftp": { "type": "object", "title": "SFTP event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SFTPEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SFTPEventSource" } }, "slack": { "type": "object", "title": "Slack event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackEventSource" } }, "sns": { "type": "object", "title": "SNS event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SNSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SNSEventSource" } }, "sqs": { "type": "object", "title": "SQS event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SQSEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SQSEventSource" } }, "storageGrid": { "type": "object", "title": "StorageGrid event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StorageGridEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridEventSource" } }, "stripe": { "type": "object", "title": "Stripe event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StripeEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StripeEventSource" } }, "template": { "title": "Template is the pod specification for the event source\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Template" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template" }, "webhook": { "type": "object", "title": "Webhook event sources", "additionalProperties": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookEventSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookEventSource" } } } }, - "io.argoproj.events.v1alpha1.EventSourceStatus": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceStatus": { "type": "object", "title": "EventSourceStatus holds the status of the event-source resource", "properties": { "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Status" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status" } } }, - "io.argoproj.events.v1alpha1.ExprFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ExprFilter": { "type": "object", "properties": { "expr": { @@ -5642,12 +5705,12 @@ "description": "Fields refers to set of keys that refer to the paths within event payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PayloadField" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PayloadField" } } } }, - "io.argoproj.events.v1alpha1.FileArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileArtifact": { "type": "object", "title": "FileArtifact contains information about an artifact in a filesystem", "properties": { @@ -5656,7 +5719,7 @@ } } }, - "io.argoproj.events.v1alpha1.FileEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.FileEventSource": { "description": "FileEventSource describes an event-source for file related events.", "type": "object", "properties": { @@ -5666,7 +5729,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -5681,11 +5744,11 @@ }, "watchPathConfig": { "title": "WatchPathConfig contains configuration about the file path to watch", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig" } } }, - "io.argoproj.events.v1alpha1.GenericEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GenericEventSource": { "description": "GenericEventSource refers to a generic event source. It can be used to implement a custom event source.", "type": "object", "properties": { @@ -5699,7 +5762,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "insecure": { "description": "Insecure determines the type of connection.", @@ -5722,13 +5785,13 @@ } } }, - "io.argoproj.events.v1alpha1.GerritEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GerritEventSource": { "type": "object", "title": "GerritEventSource refers to event-source related to gerrit events", "properties": { "auth": { "title": "Auth hosts secret selectors for username and password\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "deleteHookOnFinish": { "type": "boolean", @@ -5743,7 +5806,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "gerritBaseURL": { "type": "string", @@ -5773,11 +5836,11 @@ }, "webhook": { "title": "Webhook holds configuration to run a http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.GitArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitArtifact": { "type": "object", "title": "GitArtifact contains information about an artifact stored in git", "properties": { @@ -5791,7 +5854,7 @@ }, "creds": { "title": "Creds contain reference to git username and password\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitCreds" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitCreds" }, "filePath": { "type": "string", @@ -5807,7 +5870,7 @@ }, "remote": { "title": "Remote to manage set of tracked repositories. Defaults to \"origin\".\nRefer https://git-scm.com/docs/git-remote\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GitRemoteConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitRemoteConfig" }, "sshKeySecret": { "title": "SSHKeySecret refers to the secret that contains SSH key", @@ -5823,7 +5886,7 @@ } } }, - "io.argoproj.events.v1alpha1.GitCreds": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitCreds": { "type": "object", "title": "GitCreds contain reference to git username and password", "properties": { @@ -5835,7 +5898,7 @@ } } }, - "io.argoproj.events.v1alpha1.GitRemoteConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitRemoteConfig": { "type": "object", "title": "GitRemoteConfig contains the configuration of a Git remote", "properties": { @@ -5852,7 +5915,7 @@ } } }, - "io.argoproj.events.v1alpha1.GithubAppCreds": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubAppCreds": { "type": "object", "properties": { "appID": { @@ -5869,7 +5932,7 @@ } } }, - "io.argoproj.events.v1alpha1.GithubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubEventSource": { "type": "object", "title": "GithubEventSource refers to event-source for github related events", "properties": { @@ -5898,11 +5961,11 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "githubApp": { "title": "GitHubApp holds the GitHub app credentials\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.GithubAppCreds" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GithubAppCreds" }, "githubBaseURL": { "type": "string", @@ -5942,7 +6005,7 @@ "description": "Repositories holds the information of repositories, which uses repo owner as the key,\nand list of repo names as the value. Not required if Organizations is set.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.OwnedRepositories" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OwnedRepositories" } }, "repository": { @@ -5951,7 +6014,7 @@ }, "webhook": { "title": "Webhook refers to the configuration required to run a http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" }, "webhookSecret": { "title": "WebhookSecret refers to K8s secret containing GitHub webhook secret\nhttps://developer.github.com/webhooks/securing/\n+optional", @@ -5959,7 +6022,7 @@ } } }, - "io.argoproj.events.v1alpha1.GitlabEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.GitlabEventSource": { "type": "object", "title": "GitlabEventSource refers to event-source related to Gitlab events", "properties": { @@ -5984,7 +6047,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "gitlabBaseURL": { "type": "string", @@ -6010,7 +6073,7 @@ }, "projects": { "type": "array", - "title": "List of project IDs or project namespace paths like \"whynowy/test\". Projects and groups cannot be empty at the same time.\n+optional", + "title": "List of project IDs or project namespace paths like \"whynowy/test\".\nIf neither a project nor a group is defined, the EventSource will not manage webhooks.\n+optional", "items": { "type": "string" } @@ -6021,11 +6084,11 @@ }, "webhook": { "title": "Webhook holds configuration to run a http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.HDFSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HDFSEventSource": { "type": "object", "title": "HDFSEventSource refers to event-source for HDFS related events", "properties": { @@ -6041,7 +6104,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "hdfsUser": { "description": "HDFSUser is the user to access HDFS file system.\nIt is ignored if either ccache or keytab is used.", @@ -6083,17 +6146,17 @@ "title": "Type of file operations to watch" }, "watchPathConfig": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig" } } }, - "io.argoproj.events.v1alpha1.HTTPTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HTTPTrigger": { "type": "object", "title": "HTTPTrigger is the trigger for the HTTP request", "properties": { "basicAuth": { "title": "BasicAuth configuration for the http request.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "headers": { "type": "object", @@ -6110,20 +6173,20 @@ "description": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe HTTP trigger resource.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "secureHeaders": { "type": "array", "title": "Secure Headers stored in Kubernetes Secrets for the HTTP requests.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SecureHeader" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader" } }, "timeout": { @@ -6132,7 +6195,7 @@ }, "tls": { "title": "TLS configuration for the HTTP client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "url": { "description": "URL refers to the URL to send HTTP request to.", @@ -6140,7 +6203,7 @@ } } }, - "io.argoproj.events.v1alpha1.Int64OrString": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Int64OrString": { "type": "object", "properties": { "int64Val": { @@ -6154,13 +6217,23 @@ } } }, - "io.argoproj.events.v1alpha1.K8SResourcePolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResource": { + "description": "K8SResource represent arbitrary structured data.", + "type": "object", + "properties": { + "value": { + "type": "string", + "format": "byte" + } + } + }, + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResourcePolicy": { "type": "object", "title": "K8SResourcePolicy refers to the policy used to check the state of K8s based triggers using labels", "properties": { "backoff": { "title": "Backoff before checking resource state", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "errorOnBackoffTimeout": { "type": "boolean", @@ -6175,7 +6248,7 @@ } } }, - "io.argoproj.events.v1alpha1.KafkaConsumerGroup": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaConsumerGroup": { "type": "object", "properties": { "groupName": { @@ -6192,7 +6265,7 @@ } } }, - "io.argoproj.events.v1alpha1.KafkaEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaEventSource": { "type": "object", "title": "KafkaEventSource refers to event-source for Kafka related events", "properties": { @@ -6202,15 +6275,15 @@ }, "connectionBackoff": { "description": "Backoff holds parameters applied to connection.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "consumerGroup": { "title": "Consumer group for kafka client\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaConsumerGroup" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaConsumerGroup" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -6233,11 +6306,11 @@ }, "sasl": { "title": "SASL configuration for the kafka client\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SASLConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig" }, "tls": { "title": "TLS configuration for the kafka client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topic": { "type": "string", @@ -6253,7 +6326,7 @@ } } }, - "io.argoproj.events.v1alpha1.KafkaTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaTrigger": { "description": "KafkaTrigger refers to the specification of the Kafka trigger.", "type": "object", "properties": { @@ -6265,11 +6338,18 @@ "type": "integer", "title": "FlushFrequency refers to the frequency in milliseconds to flush batches.\nDefaults to 500 milliseconds.\n+optional" }, + "headers": { + "type": "object", + "title": "Headers for the Kafka Messages.\n+optional", + "additionalProperties": { + "type": "string" + } + }, "parameters": { "description": "Parameters is the list of parameters that is applied to resolved Kafka trigger object.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "partition": { @@ -6284,7 +6364,7 @@ "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "requiredAcks": { @@ -6293,15 +6373,22 @@ }, "sasl": { "title": "SASL configuration for the kafka client\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SASLConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig" }, "schemaRegistry": { "title": "Schema Registry configuration to producer message with avro format\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SchemaRegistryConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SchemaRegistryConfig" + }, + "secureHeaders": { + "type": "array", + "title": "Secure Headers stored in Kubernetes Secrets for the Kafka messages.\n+optional", + "items": { + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader" + } }, "tls": { "title": "TLS configuration for the Kafka producer.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topic": { "type": "string", @@ -6317,7 +6404,7 @@ } } }, - "io.argoproj.events.v1alpha1.LogTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.LogTrigger": { "type": "object", "properties": { "intervalSeconds": { @@ -6327,13 +6414,13 @@ } } }, - "io.argoproj.events.v1alpha1.MQTTEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.MQTTEventSource": { "type": "object", "title": "MQTTEventSource refers to event-source for MQTT related events", "properties": { "auth": { "title": "Auth hosts secret selectors for username and password\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "clientId": { "type": "string", @@ -6341,11 +6428,11 @@ }, "connectionBackoff": { "description": "ConnectionBackoff holds backoff applied to connection.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -6360,7 +6447,7 @@ }, "tls": { "title": "TLS configuration for the mqtt client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topic": { "type": "string", @@ -6372,7 +6459,7 @@ } } }, - "io.argoproj.events.v1alpha1.Metadata": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata": { "type": "object", "title": "Metadata holds the annotations and labels of an event source pod", "properties": { @@ -6390,13 +6477,13 @@ } } }, - "io.argoproj.events.v1alpha1.NATSAuth": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth": { "type": "object", "title": "NATSAuth refers to the auth info for NATS EventSource", "properties": { "basic": { "title": "Baisc auth with username and password\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "credential": { "title": "credential used to connect\n+optional", @@ -6412,21 +6499,21 @@ } } }, - "io.argoproj.events.v1alpha1.NATSEventsSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSEventsSource": { "type": "object", "title": "NATSEventsSource refers to event-source for NATS related events", "properties": { "auth": { "title": "Auth information\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth" }, "connectionBackoff": { "description": "ConnectionBackoff holds backoff applied to connection.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -6439,13 +6526,17 @@ "type": "string" } }, + "queue": { + "type": "string", + "title": "Queue is the name of the queue group to subscribe as if specified. Uses QueueSubscribe\nlogic to subscribe as queue group. If the queue is empty, uses default Subscribe logic.\n+optional" + }, "subject": { "type": "string", "title": "Subject holds the name of the subject onto which messages are published" }, "tls": { "title": "TLS configuration for the nats client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "url": { "type": "string", @@ -6453,20 +6544,24 @@ } } }, - "io.argoproj.events.v1alpha1.NATSTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSTrigger": { "description": "NATSTrigger refers to the specification of the NATS trigger.", "type": "object", "properties": { + "auth": { + "title": "AuthInformation\n+optional", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSAuth" + }, "parameters": { "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "subject": { @@ -6475,7 +6570,7 @@ }, "tls": { "title": "TLS configuration for the NATS producer.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "url": { "description": "URL of the NATS cluster.", @@ -6483,7 +6578,7 @@ } } }, - "io.argoproj.events.v1alpha1.NSQEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NSQEventSource": { "type": "object", "title": "NSQEventSource describes the event source for NSQ PubSub\nMore info at https://godoc.org/github.com/nsqio/go-nsq", "properties": { @@ -6493,11 +6588,11 @@ }, "connectionBackoff": { "title": "Backoff holds parameters applied to connection.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "hostAddress": { "type": "string", @@ -6516,7 +6611,7 @@ }, "tls": { "title": "TLS configuration for the nsq client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "topic": { "description": "Topic to subscribe to.", @@ -6524,7 +6619,7 @@ } } }, - "io.argoproj.events.v1alpha1.OpenWhiskTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OpenWhiskTrigger": { "description": "OpenWhiskTrigger refers to the specification of the OpenWhisk trigger.", "type": "object", "properties": { @@ -6548,14 +6643,14 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "version": { @@ -6564,7 +6659,7 @@ } } }, - "io.argoproj.events.v1alpha1.OwnedRepositories": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OwnedRepositories": { "type": "object", "properties": { "names": { @@ -6580,7 +6675,7 @@ } } }, - "io.argoproj.events.v1alpha1.PayloadField": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PayloadField": { "description": "PayloadField binds a value at path within the event payload against a name.", "type": "object", "properties": { @@ -6594,7 +6689,7 @@ } } }, - "io.argoproj.events.v1alpha1.PubSubEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PubSubEventSource": { "description": "PubSubEventSource refers to event-source for GCP PubSub related events.", "type": "object", "properties": { @@ -6608,7 +6703,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -6639,7 +6734,7 @@ } } }, - "io.argoproj.events.v1alpha1.PulsarEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarEventSource": { "type": "object", "title": "PulsarEventSource describes the event source for Apache Pulsar", "properties": { @@ -6660,11 +6755,11 @@ }, "connectionBackoff": { "title": "Backoff holds parameters applied to connection.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -6679,7 +6774,7 @@ }, "tls": { "title": "TLS configuration for the pulsar client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "tlsAllowInsecureConnection": { "type": "boolean", @@ -6710,7 +6805,7 @@ } } }, - "io.argoproj.events.v1alpha1.PulsarTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarTrigger": { "description": "PulsarTrigger refers to the specification of the Pulsar trigger.", "type": "object", "properties": { @@ -6731,25 +6826,25 @@ }, "connectionBackoff": { "title": "Backoff holds parameters applied to connection.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "parameters": { "description": "Parameters is the list of parameters that is applied to resolved Kafka trigger object.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "payload": { "description": "Payload is the list of key-value extracted from an event payload to construct the request payload.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "tls": { "title": "TLS configuration for the pulsar client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "tlsAllowInsecureConnection": { "type": "boolean", @@ -6773,7 +6868,7 @@ } } }, - "io.argoproj.events.v1alpha1.RateLimit": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RateLimit": { "type": "object", "properties": { "requestsPerUnit": { @@ -6785,7 +6880,7 @@ } } }, - "io.argoproj.events.v1alpha1.RedisEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisEventSource": { "type": "object", "title": "RedisEventSource describes an event source for the Redis PubSub.\nMore info at https://godoc.org/github.com/go-redis/redis#example-PubSub", "properties": { @@ -6801,7 +6896,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "hostAddress": { "type": "string", @@ -6828,7 +6923,7 @@ }, "tls": { "title": "TLS configuration for the redis client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "username": { "type": "string", @@ -6836,7 +6931,7 @@ } } }, - "io.argoproj.events.v1alpha1.RedisStreamEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RedisStreamEventSource": { "type": "object", "title": "RedisStreamEventSource describes an event source for\nRedis streams (https://redis.io/topics/streams-intro)", "properties": { @@ -6850,7 +6945,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "hostAddress": { "type": "string", @@ -6880,7 +6975,7 @@ }, "tls": { "title": "TLS configuration for the redis client.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TLSConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig" }, "username": { "type": "string", @@ -6888,17 +6983,7 @@ } } }, - "io.argoproj.events.v1alpha1.Resource": { - "description": "Resource represent arbitrary structured data.", - "type": "object", - "properties": { - "value": { - "type": "string", - "format": "byte" - } - } - }, - "io.argoproj.events.v1alpha1.ResourceEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceEventSource": { "description": "ResourceEventSource refers to a event-source for K8s resource related events.", "type": "object", "properties": { @@ -6911,7 +6996,7 @@ }, "filter": { "title": "Filter is applied on the metadata of the resource\nIf you apply filter, then the internal event informer will only monitor objects that pass the filter.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ResourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceFilter" }, "groupVersionResource": { "title": "Group of the resource", @@ -6930,7 +7015,7 @@ } } }, - "io.argoproj.events.v1alpha1.ResourceFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ResourceFilter": { "type": "object", "title": "ResourceFilter contains K8s ObjectMeta information to further filter resource event objects", "properties": { @@ -6946,14 +7031,14 @@ "type": "array", "title": "Fields provide field filters similar to K8s field selector\n(see https://kubernetes.io/docs/concepts/overview/working-with-objects/field-selectors/).\nUnlike K8s field selector, it supports arbitrary fileds like \"spec.serviceAccountName\",\nand the value could be a string or a regex.\nSame as K8s field selector, operator \"=\", \"==\" and \"!=\" are supported.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Selector" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector" } }, "labels": { "type": "array", "title": "Labels provide listing options to K8s API to watch resource/s.\nRefer https://kubernetes.io/docs/concepts/overview/working-with-objects/label-selectors/ for more io.argoproj.workflow.v1alpha1.\nUnlike K8s field selector, multiple values are passed as comma separated values instead of list of values.\nEg: value: value1,value2.\nSame as K8s label selector, operator \"=\", \"==\", \"!=\", \"exists\", \"!\", \"notin\", \"in\", \"gt\" and \"lt\"\nare supported\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Selector" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector" } }, "prefix": { @@ -6962,7 +7047,7 @@ } } }, - "io.argoproj.events.v1alpha1.S3Artifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Artifact": { "type": "object", "title": "S3Artifact contains information about an S3 connection and bucket", "properties": { @@ -6970,7 +7055,7 @@ "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" }, "bucket": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Bucket" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Bucket" }, "caCertificate": { "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" @@ -6985,7 +7070,7 @@ } }, "filter": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.S3Filter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Filter" }, "insecure": { "type": "boolean" @@ -7004,7 +7089,7 @@ } } }, - "io.argoproj.events.v1alpha1.S3Bucket": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Bucket": { "type": "object", "title": "S3Bucket contains information to describe an S3 Bucket", "properties": { @@ -7016,7 +7101,7 @@ } } }, - "io.argoproj.events.v1alpha1.S3Filter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.S3Filter": { "type": "object", "title": "S3Filter represents filters to apply to bucket notifications for specifying constraints on objects", "properties": { @@ -7028,7 +7113,7 @@ } } }, - "io.argoproj.events.v1alpha1.SASLConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SASLConfig": { "type": "object", "title": "SASLConfig refers to SASL configuration for a client", "properties": { @@ -7046,7 +7131,7 @@ } } }, - "io.argoproj.events.v1alpha1.SFTPEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SFTPEventSource": { "description": "SFTPEventSource describes an event-source for sftp related events.", "type": "object", "properties": { @@ -7060,7 +7145,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -7078,7 +7163,7 @@ "title": "PollIntervalDuration the interval at which to poll the SFTP server\ndefaults to 10 seconds\n+optional" }, "sshKeySecret": { - "title": "SSHKeySecret refers to the secret that contains SSH key", + "description": "SSHKeySecret refers to the secret that contains SSH key. Key needs to contain private key and public key.", "$ref": "#/definitions/io.k8s.api.core.v1.SecretKeySelector" }, "username": { @@ -7087,11 +7172,11 @@ }, "watchPathConfig": { "title": "WatchPathConfig contains configuration about the file path to watch", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WatchPathConfig" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig" } } }, - "io.argoproj.events.v1alpha1.SNSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SNSEventSource": { "type": "object", "title": "SNSEventSource refers to event-source for AWS SNS related events", "properties": { @@ -7105,7 +7190,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -7136,11 +7221,11 @@ }, "webhook": { "title": "Webhook configuration for http server", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.SQSEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SQSEventSource": { "type": "object", "title": "SQSEventSource refers to event-source for AWS SQS related events", "properties": { @@ -7158,7 +7243,7 @@ }, "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "jsonBody": { "type": "boolean", @@ -7201,13 +7286,13 @@ } } }, - "io.argoproj.events.v1alpha1.SchemaRegistryConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SchemaRegistryConfig": { "type": "object", "title": "SchemaRegistryConfig refers to configuration for a client", "properties": { "auth": { "title": "+optional\nSchemaRegistry - basic authentication", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.BasicAuth" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.BasicAuth" }, "schemaId": { "type": "integer", @@ -7219,7 +7304,7 @@ } } }, - "io.argoproj.events.v1alpha1.SecureHeader": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SecureHeader": { "type": "object", "title": "SecureHeader refers to HTTP Headers with auth tokens as values", "properties": { @@ -7228,11 +7313,11 @@ }, "valueFrom": { "title": "Values can be read from either secrets or configmaps", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ValueFromSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ValueFromSource" } } }, - "io.argoproj.events.v1alpha1.Selector": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Selector": { "description": "Selector represents conditional operation to select K8s objects.", "type": "object", "properties": { @@ -7250,7 +7335,7 @@ } } }, - "io.argoproj.events.v1alpha1.Sensor": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor": { "type": "object", "title": "Sensor is the definition of a sensor resource\n+genclient\n+genclient:noStatus\n+kubebuilder:resource:shortName=sn\n+kubebuilder:subresource:status\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object\n+k8s:openapi-gen=true", "properties": { @@ -7258,22 +7343,22 @@ "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta" }, "spec": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SensorSpec" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorSpec" }, "status": { "title": "+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SensorStatus" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorStatus" } } }, - "io.argoproj.events.v1alpha1.SensorList": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorList": { "type": "object", "title": "SensorList is the list of Sensor resources\n+k8s:deepcopy-gen:interfaces=io.k8s.apimachinery/pkg/runtime.Object", "properties": { "items": { "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } }, "metadata": { @@ -7281,7 +7366,7 @@ } } }, - "io.argoproj.events.v1alpha1.SensorSpec": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorSpec": { "type": "object", "title": "SensorSpec represents desired sensor state", "properties": { @@ -7289,7 +7374,7 @@ "description": "Dependencies is a list of the events that this sensor is dependent on.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventDependency" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventDependency" } }, "errorOnFailedRound": { @@ -7317,27 +7402,27 @@ }, "template": { "title": "Template is the pod specification for the sensor\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Template" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template" }, "triggers": { "description": "Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Trigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger" } } } }, - "io.argoproj.events.v1alpha1.SensorStatus": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorStatus": { "description": "SensorStatus contains information about the status of a sensor.", "type": "object", "properties": { "status": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Status" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status" } } }, - "io.argoproj.events.v1alpha1.Service": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Service": { "type": "object", "title": "Service holds the service information eventsource exposes", "properties": { @@ -7345,6 +7430,10 @@ "type": "string", "title": "clusterIP is the IP address of the service and is usually assigned\nrandomly by the master. If an address is specified manually and is not in\nuse by others, it will be allocated to the service; otherwise, creation\nof the service will fail. This field can not be changed through updates.\nValid values are \"None\", empty string (\"\"), or a valid IP address. \"None\"\ncan be specified for headless services when proxying is not required.\nMore info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies\n+optional" }, + "metadata": { + "title": "Metadata sets the pods's metadata, i.e. annotations and labels\ndefault={annotations: {}, labels: {}}", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata" + }, "ports": { "type": "array", "title": "The list of ports that are exposed by this ClusterIP service.\n+patchMergeKey=port\n+patchStrategy=merge\n+listType=map\n+listMapKey=port\n+listMapKey=protocol", @@ -7354,13 +7443,13 @@ } } }, - "io.argoproj.events.v1alpha1.SlackEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackEventSource": { "type": "object", "title": "SlackEventSource refers to event-source for Slack related events", "properties": { "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "metadata": { "type": "object", @@ -7379,11 +7468,11 @@ }, "webhook": { "title": "Webhook holds configuration for a REST endpoint", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.SlackSender": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackSender": { "type": "object", "properties": { "icon": { @@ -7396,7 +7485,7 @@ } } }, - "io.argoproj.events.v1alpha1.SlackThread": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackThread": { "type": "object", "properties": { "broadcastMessageToChannel": { @@ -7409,7 +7498,7 @@ } } }, - "io.argoproj.events.v1alpha1.SlackTrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackTrigger": { "description": "SlackTrigger refers to the specification of the slack notification trigger.", "type": "object", "properties": { @@ -7433,12 +7522,12 @@ "type": "array", "title": "Parameters is the list of key-value extracted from event's payload that are applied to\nthe trigger resource.\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "sender": { "title": "Sender refers to additional configuration of the Slack application that sends the message.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackSender" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackSender" }, "slackToken": { "description": "SlackToken refers to the Kubernetes secret that holds the slack token required to send messages.", @@ -7446,11 +7535,11 @@ }, "thread": { "title": "Thread refers to additional options for sending messages to a Slack thread.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackThread" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackThread" } } }, - "io.argoproj.events.v1alpha1.StandardK8STrigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StandardK8STrigger": { "type": "object", "title": "StandardK8STrigger is the standard Kubernetes resource trigger", "properties": { @@ -7466,7 +7555,7 @@ "description": "Parameters is the list of parameters that is applied to resolved K8s trigger object.", "type": "array", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "patchStrategy": { @@ -7475,11 +7564,11 @@ }, "source": { "title": "Source of the K8s resource file(s)", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArtifactLocation" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArtifactLocation" } } }, - "io.argoproj.events.v1alpha1.Status": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Status": { "description": "Status is a common structure which can be used for Status field.", "type": "object", "properties": { @@ -7487,12 +7576,12 @@ "type": "array", "title": "Conditions are the latest available observations of a resource's current state.\n+optional\n+patchMergeKey=type\n+patchStrategy=merge", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Condition" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Condition" } } } }, - "io.argoproj.events.v1alpha1.StatusPolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StatusPolicy": { "type": "object", "title": "StatusPolicy refers to the policy used to check the state of the trigger using response status", "properties": { @@ -7505,7 +7594,7 @@ } } }, - "io.argoproj.events.v1alpha1.StorageGridEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridEventSource": { "type": "object", "title": "StorageGridEventSource refers to event-source for StorageGrid related events", "properties": { @@ -7529,7 +7618,7 @@ }, "filter": { "description": "Filter on object key which caused the notification.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StorageGridFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridFilter" }, "metadata": { "type": "object", @@ -7548,11 +7637,11 @@ }, "webhook": { "title": "Webhook holds configuration for a REST endpoint", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.StorageGridFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StorageGridFilter": { "type": "object", "title": "StorageGridFilter represents filters to apply to bucket notifications for specifying constraints on objects\n+k8s:openapi-gen=true", "properties": { @@ -7564,7 +7653,7 @@ } } }, - "io.argoproj.events.v1alpha1.StripeEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StripeEventSource": { "type": "object", "title": "StripeEventSource describes the event source for stripe webhook notifications\nMore info at https://stripe.com/docs/webhooks", "properties": { @@ -7592,11 +7681,11 @@ }, "webhook": { "title": "Webhook holds configuration for a REST endpoint", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" } } }, - "io.argoproj.events.v1alpha1.TLSConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TLSConfig": { "description": "TLSConfig refers to TLS configuration for a client.", "type": "object", "properties": { @@ -7618,9 +7707,9 @@ } } }, - "io.argoproj.events.v1alpha1.Template": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Template": { "type": "object", - "title": "Template holds the information of a sensor deployment template", + "title": "Template holds the information of a deployment template", "properties": { "affinity": { "title": "If specified, the pod's scheduling constraints\n+optional", @@ -7628,7 +7717,7 @@ }, "container": { "title": "Container is the main container image to run in the sensor pod\n+optional", - "$ref": "#/definitions/io.k8s.api.core.v1.Container" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Container" }, "imagePullSecrets": { "type": "array", @@ -7639,7 +7728,7 @@ }, "metadata": { "title": "Metadata sets the pods's metadata, i.e. annotations and labels", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Metadata" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Metadata" }, "nodeSelector": { "type": "object", @@ -7680,7 +7769,7 @@ } } }, - "io.argoproj.events.v1alpha1.TimeFilter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TimeFilter": { "description": "TimeFilter describes a window in time.\nIt filters out events that occur outside the time limits.\nIn other words, only events that occur after Start and before Stop\nwill pass this filter.", "type": "object", "properties": { @@ -7694,7 +7783,7 @@ } } }, - "io.argoproj.events.v1alpha1.Trigger": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger": { "type": "object", "title": "Trigger is an action taken, output produced, an event created, a message sent", "properties": { @@ -7702,32 +7791,36 @@ "type": "boolean", "title": "AtLeastOnce determines the trigger execution semantics.\nDefaults to false. Trigger execution will use at-most-once semantics.\nIf set to true, Trigger execution will switch to at-least-once semantics.\n+kubebuilder:default=false\n+optional" }, + "dlqTrigger": { + "title": "If the trigger fails, it will retry up to the configured number of\nretries. If the maximum retries are reached and the trigger is set to\nexecute atLeastOnce, the dead letter queue (DLQ) trigger will be invoked if\nspecified. Invoking the dead letter queue trigger helps prevent data\nloss.\n+optional", + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Trigger" + }, "parameters": { "type": "array", "title": "Parameters is the list of parameters applied to the trigger template definition", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter" } }, "policy": { "title": "Policy to configure backoff and execution criteria for the trigger\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerPolicy" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerPolicy" }, "rateLimit": { "title": "Rate limit, default unit is Second\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.RateLimit" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.RateLimit" }, "retryStrategy": { "title": "Retry strategy, defaults to no retry\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Backoff" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Backoff" }, "template": { "description": "Template describes the trigger specification.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerTemplate" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerTemplate" } } }, - "io.argoproj.events.v1alpha1.TriggerParameter": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameter": { "type": "object", "title": "TriggerParameter indicates a passed parameter to a service template", "properties": { @@ -7741,11 +7834,11 @@ }, "src": { "title": "Src contains a source reference to the value of the parameter from a dependency", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.TriggerParameterSource" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameterSource" } } }, - "io.argoproj.events.v1alpha1.TriggerParameterSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerParameterSource": { "type": "object", "title": "TriggerParameterSource defines the source for a parameter from a event event", "properties": { @@ -7779,39 +7872,39 @@ } } }, - "io.argoproj.events.v1alpha1.TriggerPolicy": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerPolicy": { "type": "object", "title": "TriggerPolicy dictates the policy for the trigger retries", "properties": { "k8s": { "title": "K8SResourcePolicy refers to the policy used to check the state of K8s based triggers using using labels", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.K8SResourcePolicy" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.K8SResourcePolicy" }, "status": { "title": "Status refers to the policy used to check the state of the trigger using response status", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StatusPolicy" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StatusPolicy" } } }, - "io.argoproj.events.v1alpha1.TriggerTemplate": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.TriggerTemplate": { "description": "TriggerTemplate is the template that describes trigger specification.", "type": "object", "properties": { "argoWorkflow": { "title": "ArgoWorkflow refers to the trigger that can perform various operations on an Argo io.argoproj.workflow.v1alpha1.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ArgoWorkflowTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ArgoWorkflowTrigger" }, "awsLambda": { "title": "AWSLambda refers to the trigger designed to invoke AWS Lambda function with with on-the-fly constructable payload.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AWSLambdaTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AWSLambdaTrigger" }, "azureEventHubs": { "title": "AzureEventHubs refers to the trigger send an event to an Azure Event Hub.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureEventHubsTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureEventHubsTrigger" }, "azureServiceBus": { "title": "AzureServiceBus refers to the trigger designed to place messages on Azure Service Bus\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.AzureServiceBusTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.AzureServiceBusTrigger" }, "conditions": { "type": "string", @@ -7821,32 +7914,32 @@ "type": "array", "title": "Criteria to reset the conditons\n+optional", "items": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.ConditionsResetCriteria" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ConditionsResetCriteria" } }, "custom": { "title": "CustomTrigger refers to the trigger designed to connect to a gRPC trigger server and execute a custom trigger.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.CustomTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.CustomTrigger" }, "email": { "title": "Email refers to the trigger designed to send an email notification\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EmailTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EmailTrigger" }, "http": { "title": "HTTP refers to the trigger designed to dispatch a HTTP request with on-the-fly constructable payload.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.HTTPTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.HTTPTrigger" }, "k8s": { "title": "StandardK8STrigger refers to the trigger designed to create or update a generic Kubernetes resource.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.StandardK8STrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.StandardK8STrigger" }, "kafka": { "description": "Kafka refers to the trigger designed to place messages on Kafka topic.\n+optional.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.KafkaTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.KafkaTrigger" }, "log": { "title": "Log refers to the trigger designed to invoke log the io.argoproj.workflow.v1alpha1.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.LogTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.LogTrigger" }, "name": { "description": "Name is a unique name of the action to take.", @@ -7854,23 +7947,23 @@ }, "nats": { "description": "NATS refers to the trigger designed to place message on NATS subject.\n+optional.", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.NATSTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.NATSTrigger" }, "openWhisk": { "title": "OpenWhisk refers to the trigger designed to invoke OpenWhisk action.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.OpenWhiskTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.OpenWhiskTrigger" }, "pulsar": { "title": "Pulsar refers to the trigger designed to place messages on Pulsar topic.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.PulsarTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.PulsarTrigger" }, "slack": { "title": "Slack refers to the trigger designed to send slack notification message.\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.SlackTrigger" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SlackTrigger" } } }, - "io.argoproj.events.v1alpha1.URLArtifact": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.URLArtifact": { "description": "URLArtifact contains information about an artifact at an http endpoint.", "type": "object", "properties": { @@ -7884,7 +7977,7 @@ } } }, - "io.argoproj.events.v1alpha1.ValueFromSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.ValueFromSource": { "type": "object", "title": "ValueFromSource allows you to reference keys from either a Configmap or Secret", "properties": { @@ -7896,7 +7989,7 @@ } } }, - "io.argoproj.events.v1alpha1.WatchPathConfig": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WatchPathConfig": { "type": "object", "properties": { "directory": { @@ -7913,7 +8006,7 @@ } } }, - "io.argoproj.events.v1alpha1.WebhookContext": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext": { "type": "object", "title": "WebhookContext holds a general purpose REST API context", "properties": { @@ -7958,16 +8051,71 @@ } } }, - "io.argoproj.events.v1alpha1.WebhookEventSource": { + "github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookEventSource": { "type": "object", "title": "CalendarEventSource describes an HTTP based EventSource", "properties": { "filter": { "title": "Filter\n+optional", - "$ref": "#/definitions/io.argoproj.events.v1alpha1.EventSourceFilter" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceFilter" }, "webhookContext": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.WebhookContext" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.WebhookContext" + } + } + }, + "google.protobuf.Any": { + "type": "object", + "properties": { + "type_url": { + "type": "string" + }, + "value": { + "type": "string", + "format": "byte" + } + } + }, + "grpc.gateway.runtime.Error": { + "type": "object", + "properties": { + "code": { + "type": "integer" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/google.protobuf.Any" + } + }, + "error": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "grpc.gateway.runtime.StreamError": { + "type": "object", + "properties": { + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/google.protobuf.Any" + } + }, + "grpc_code": { + "type": "integer" + }, + "http_code": { + "type": "integer" + }, + "http_status": { + "type": "string" + }, + "message": { + "type": "string" } } }, @@ -13428,6 +13576,7 @@ } }, "io.k8s.api.core.v1.GRPCAction": { + "description": "GRPCAction specifies an action involving a GRPC service.", "type": "object", "required": [ "port" @@ -13634,6 +13783,20 @@ } } }, + "io.k8s.api.core.v1.ImageVolumeSource": { + "description": "ImageVolumeSource represents a image volume resource.", + "type": "object", + "properties": { + "pullPolicy": { + "description": "Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise.", + "type": "string" + }, + "reference": { + "description": "Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.", + "type": "string" + } + } + }, "io.k8s.api.core.v1.KeyToPath": { "description": "Maps a string key to a path within a volume.", "type": "object", @@ -13675,19 +13838,19 @@ "type": "object", "properties": { "exec": { - "description": "Exec specifies the action to take.", + "description": "Exec specifies a command to execute in the container.", "$ref": "#/definitions/io.k8s.api.core.v1.ExecAction" }, "httpGet": { - "description": "HTTPGet specifies the http request to perform.", + "description": "HTTPGet specifies an HTTP GET request to perform.", "$ref": "#/definitions/io.k8s.api.core.v1.HTTPGetAction" }, "sleep": { - "description": "Sleep represents the duration that the container should sleep before being terminated.", + "description": "Sleep represents a duration that the container should sleep.", "$ref": "#/definitions/io.k8s.api.core.v1.SleepAction" }, "tcpSocket": { - "description": "Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for the backward compatibility. There are no validation of this field and lifecycle hooks will fail in runtime when tcp handler is specified.", + "description": "Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for backward compatibility. There is no validation of this field and lifecycle hooks will fail at runtime when it is specified.", "$ref": "#/definitions/io.k8s.api.core.v1.TCPSocketAction" } } @@ -13938,9 +14101,11 @@ "type": "string" }, "status": { + "description": "Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required", "type": "string" }, "type": { + "description": "Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about", "type": "string" } } @@ -13978,7 +14143,7 @@ "type": "string" }, "volumeAttributesClassName": { - "description": "volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.", + "description": "volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).", "type": "string" }, "volumeMode": { @@ -14039,11 +14204,11 @@ "x-kubernetes-patch-strategy": "merge" }, "currentVolumeAttributesClassName": { - "description": "currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature.", + "description": "currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default).", "type": "string" }, "modifyVolumeStatus": { - "description": "ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is an alpha field and requires enabling VolumeAttributesClass feature.", + "description": "ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is a beta field and requires enabling VolumeAttributesClass feature (off by default).", "$ref": "#/definitions/io.k8s.api.core.v1.ModifyVolumeStatus" }, "phase": { @@ -14137,7 +14302,7 @@ "$ref": "#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.LabelSelector" }, "matchLabelKeys": { - "description": "MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.", + "description": "MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).", "type": "array", "items": { "type": "string" @@ -14145,7 +14310,7 @@ "x-kubernetes-list-type": "atomic" }, "mismatchLabelKeys": { - "description": "MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.", + "description": "MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).", "type": "array", "items": { "type": "string" @@ -14227,10 +14392,11 @@ "type": "object", "properties": { "name": { - "description": "Required.", + "description": "Name is this DNS resolver option's name. Required.", "type": "string" }, "value": { + "description": "Value is this DNS resolver option's value.", "type": "string" } } @@ -14263,6 +14429,10 @@ "description": "The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.", "type": "integer" }, + "seLinuxChangePolicy": { + "description": "seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are \"MountOption\" and \"Recursive\".\n\n\"Recursive\" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node.\n\n\"MountOption\" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. \"MountOption\" value is allowed only when SELinuxMount feature gate is enabled.\n\nIf not specified and SELinuxMount feature gate is enabled, \"MountOption\" is used. If not specified and SELinuxMount feature gate is disabled, \"MountOption\" is used for ReadWriteOncePod volumes and \"Recursive\" for all other volumes.\n\nThis field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers.\n\nAll Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows.", + "type": "string" + }, "seLinuxOptions": { "description": "The SELinux context to be applied to all containers. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.", "$ref": "#/definitions/io.k8s.api.core.v1.SELinuxOptions" @@ -14272,7 +14442,7 @@ "$ref": "#/definitions/io.k8s.api.core.v1.SeccompProfile" }, "supplementalGroups": { - "description": "A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows.", + "description": "A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows.", "type": "array", "items": { "type": "integer", @@ -14280,6 +14450,10 @@ }, "x-kubernetes-list-type": "atomic" }, + "supplementalGroupsPolicy": { + "description": "Defines how supplemental groups of the first container processes are calculated. Valid values are \"Merge\" and \"Strict\". If not specified, \"Merge\" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows.", + "type": "string" + }, "sysctls": { "description": "Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.", "type": "array", @@ -14338,7 +14512,7 @@ "type": "object", "properties": { "exec": { - "description": "Exec specifies the action to take.", + "description": "Exec specifies a command to execute in the container.", "$ref": "#/definitions/io.k8s.api.core.v1.ExecAction" }, "failureThreshold": { @@ -14346,11 +14520,11 @@ "type": "integer" }, "grpc": { - "description": "GRPC specifies an action involving a GRPC port.", + "description": "GRPC specifies a GRPC HealthCheckRequest.", "$ref": "#/definitions/io.k8s.api.core.v1.GRPCAction" }, "httpGet": { - "description": "HTTPGet specifies the http request to perform.", + "description": "HTTPGet specifies an HTTP GET request to perform.", "$ref": "#/definitions/io.k8s.api.core.v1.HTTPGetAction" }, "initialDelaySeconds": { @@ -14366,7 +14540,7 @@ "type": "integer" }, "tcpSocket": { - "description": "TCPSocket specifies an action involving a TCP port.", + "description": "TCPSocket specifies a connection to a TCP port.", "$ref": "#/definitions/io.k8s.api.core.v1.TCPSocketAction" }, "terminationGracePeriodSeconds": { @@ -14388,7 +14562,7 @@ "type": "integer" }, "sources": { - "description": "sources is the list of volume projections", + "description": "sources is the list of volume projections. Each entry in this list handles one source.", "type": "array", "items": { "$ref": "#/definitions/io.k8s.api.core.v1.VolumeProjection" @@ -14487,6 +14661,10 @@ "name": { "description": "Name must match the name of one entry in pod.spec.resourceClaims of the Pod where this field is used. It makes that resource available inside a container.", "type": "string" + }, + "request": { + "description": "Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request.", + "type": "string" } } }, @@ -14746,7 +14924,7 @@ "type": "boolean" }, "procMount": { - "description": "procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.", + "description": "procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.", "type": "string" }, "readOnlyRootFilesystem": { @@ -14957,6 +15135,7 @@ "x-kubernetes-map-type": "atomic" }, "io.k8s.api.core.v1.TypedObjectReference": { + "description": "TypedObjectReference contains enough information to let you locate the typed referenced object", "type": "object", "required": [ "kind", @@ -14989,23 +15168,23 @@ ], "properties": { "awsElasticBlockStore": { - "description": "awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + "description": "awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree awsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", "$ref": "#/definitions/io.k8s.api.core.v1.AWSElasticBlockStoreVolumeSource" }, "azureDisk": { - "description": "azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.", + "description": "azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. Deprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type are redirected to the disk.csi.azure.com CSI driver.", "$ref": "#/definitions/io.k8s.api.core.v1.AzureDiskVolumeSource" }, "azureFile": { - "description": "azureFile represents an Azure File Service mount on the host and bind mount to the pod.", + "description": "azureFile represents an Azure File Service mount on the host and bind mount to the pod. Deprecated: AzureFile is deprecated. All operations for the in-tree azureFile type are redirected to the file.csi.azure.com CSI driver.", "$ref": "#/definitions/io.k8s.api.core.v1.AzureFileVolumeSource" }, "cephfs": { - "description": "cephFS represents a Ceph FS mount on the host that shares a pod's lifetime", + "description": "cephFS represents a Ceph FS mount on the host that shares a pod's lifetime. Deprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.CephFSVolumeSource" }, "cinder": { - "description": "cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md", + "description": "cinder represents a cinder volume attached and mounted on kubelets host machine. Deprecated: Cinder is deprecated. All operations for the in-tree cinder type are redirected to the cinder.csi.openstack.org CSI driver. More info: https://examples.k8s.io/mysql-cinder-pd/README.md", "$ref": "#/definitions/io.k8s.api.core.v1.CinderVolumeSource" }, "configMap": { @@ -15013,7 +15192,7 @@ "$ref": "#/definitions/io.k8s.api.core.v1.ConfigMapVolumeSource" }, "csi": { - "description": "csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).", + "description": "csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers.", "$ref": "#/definitions/io.k8s.api.core.v1.CSIVolumeSource" }, "downwardAPI": { @@ -15033,29 +15212,33 @@ "$ref": "#/definitions/io.k8s.api.core.v1.FCVolumeSource" }, "flexVolume": { - "description": "flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin.", + "description": "flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. Deprecated: FlexVolume is deprecated. Consider using a CSIDriver instead.", "$ref": "#/definitions/io.k8s.api.core.v1.FlexVolumeSource" }, "flocker": { - "description": "flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running", + "description": "flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running. Deprecated: Flocker is deprecated and the in-tree flocker type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.FlockerVolumeSource" }, "gcePersistentDisk": { - "description": "gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + "description": "gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: GCEPersistentDisk is deprecated. All operations for the in-tree gcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", "$ref": "#/definitions/io.k8s.api.core.v1.GCEPersistentDiskVolumeSource" }, "gitRepo": { - "description": "gitRepo represents a git repository at a particular revision. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container.", + "description": "gitRepo represents a git repository at a particular revision. Deprecated: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container.", "$ref": "#/definitions/io.k8s.api.core.v1.GitRepoVolumeSource" }, "glusterfs": { - "description": "glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/glusterfs/README.md", + "description": "glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. Deprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported. More info: https://examples.k8s.io/volumes/glusterfs/README.md", "$ref": "#/definitions/io.k8s.api.core.v1.GlusterfsVolumeSource" }, "hostPath": { "description": "hostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath", "$ref": "#/definitions/io.k8s.api.core.v1.HostPathVolumeSource" }, + "image": { + "description": "image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine. The volume is resolved at pod startup depending on which PullPolicy value is provided:\n\n- Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. - Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. - IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails.\n\nThe volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation. A failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message. The types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field. The OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images. The volume will be mounted read-only (ro) and non-executable files (noexec). Sub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath). The field spec.securityContext.fsGroupChangePolicy has no effect on this volume type.", + "$ref": "#/definitions/io.k8s.api.core.v1.ImageVolumeSource" + }, "iscsi": { "description": "iscsi represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://examples.k8s.io/volumes/iscsi/README.md", "$ref": "#/definitions/io.k8s.api.core.v1.ISCSIVolumeSource" @@ -15073,11 +15256,11 @@ "$ref": "#/definitions/io.k8s.api.core.v1.PersistentVolumeClaimVolumeSource" }, "photonPersistentDisk": { - "description": "photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", + "description": "photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine. Deprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.PhotonPersistentDiskVolumeSource" }, "portworxVolume": { - "description": "portworxVolume represents a portworx volume attached and mounted on kubelets host machine", + "description": "portworxVolume represents a portworx volume attached and mounted on kubelets host machine. Deprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type are redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate is on.", "$ref": "#/definitions/io.k8s.api.core.v1.PortworxVolumeSource" }, "projected": { @@ -15085,15 +15268,15 @@ "$ref": "#/definitions/io.k8s.api.core.v1.ProjectedVolumeSource" }, "quobyte": { - "description": "quobyte represents a Quobyte mount on the host that shares a pod's lifetime", + "description": "quobyte represents a Quobyte mount on the host that shares a pod's lifetime. Deprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.QuobyteVolumeSource" }, "rbd": { - "description": "rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/rbd/README.md", + "description": "rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. Deprecated: RBD is deprecated and the in-tree rbd type is no longer supported. More info: https://examples.k8s.io/volumes/rbd/README.md", "$ref": "#/definitions/io.k8s.api.core.v1.RBDVolumeSource" }, "scaleIO": { - "description": "scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.", + "description": "scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. Deprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.ScaleIOVolumeSource" }, "secret": { @@ -15101,11 +15284,11 @@ "$ref": "#/definitions/io.k8s.api.core.v1.SecretVolumeSource" }, "storageos": { - "description": "storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.", + "description": "storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. Deprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported.", "$ref": "#/definitions/io.k8s.api.core.v1.StorageOSVolumeSource" }, "vsphereVolume": { - "description": "vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine", + "description": "vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine. Deprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type are redirected to the csi.vsphere.vmware.com CSI driver.", "$ref": "#/definitions/io.k8s.api.core.v1.VsphereVirtualDiskVolumeSource" } } @@ -15167,7 +15350,7 @@ } }, "io.k8s.api.core.v1.VolumeProjection": { - "description": "Projection that may be projected along with other supported volume types", + "description": "Projection that may be projected along with other supported volume types. Exactly one of these fields must be set.", "type": "object", "properties": { "clusterTrustBundle": { @@ -15599,7 +15782,7 @@ "type": "string" }, "sensor": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } } }, @@ -15643,7 +15826,7 @@ "type": "object", "properties": { "object": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" }, "type": { "type": "string" @@ -15660,7 +15843,7 @@ "type": "string" }, "sensor": { - "$ref": "#/definitions/io.argoproj.events.v1alpha1.Sensor" + "$ref": "#/definitions/github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor" } } } diff --git a/cmd/argo/commands/archive/get.go b/cmd/argo/commands/archive/get.go index 07dcbd860ef4..5f891e2e5bf6 100644 --- a/cmd/argo/commands/archive/get.go +++ b/cmd/argo/commands/archive/get.go @@ -70,8 +70,8 @@ func printWorkflow(wf *wfv1.Workflow, output string) { fmt.Println(string(output)) default: const fmtStr = "%-20s %v\n" - fmt.Printf(fmtStr, "Name:", wf.ObjectMeta.Name) - fmt.Printf(fmtStr, "Namespace:", wf.ObjectMeta.Namespace) + fmt.Printf(fmtStr, "Name:", wf.Name) + fmt.Printf(fmtStr, "Namespace:", wf.Namespace) serviceAccount := wf.GetExecSpec().ServiceAccountName if serviceAccount == "" { // if serviceAccountName was not specified in a submitted Workflow, we will @@ -85,7 +85,7 @@ func printWorkflow(wf *wfv1.Workflow, output string) { if wf.Status.Message != "" { fmt.Printf(fmtStr, "Message:", wf.Status.Message) } - fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.ObjectMeta.CreationTimestamp.Time)) + fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.CreationTimestamp.Time)) if !wf.Status.StartedAt.IsZero() { fmt.Printf(fmtStr, "Started:", humanize.Timestamp(wf.Status.StartedAt.Time)) } diff --git a/cmd/argo/commands/clustertemplate/list.go b/cmd/argo/commands/clustertemplate/list.go index 9bd2cee48362..b5ade00cc8bf 100644 --- a/cmd/argo/commands/clustertemplate/list.go +++ b/cmd/argo/commands/clustertemplate/list.go @@ -48,7 +48,7 @@ func NewListCommand() *cobra.Command { printTable(cwftmplList.Items) case "name": for _, cwftmp := range cwftmplList.Items { - fmt.Println(cwftmp.ObjectMeta.Name) + fmt.Println(cwftmp.Name) } default: return fmt.Errorf("Unknown output mode: %s", output.String()) @@ -65,7 +65,7 @@ func printTable(wfList []wfv1.ClusterWorkflowTemplate) { _, _ = fmt.Fprint(w, "NAME") _, _ = fmt.Fprint(w, "\n") for _, wf := range wfList { - _, _ = fmt.Fprintf(w, "%s\t", wf.ObjectMeta.Name) + _, _ = fmt.Fprintf(w, "%s\t", wf.Name) _, _ = fmt.Fprintf(w, "\n") } _ = w.Flush() diff --git a/cmd/argo/commands/clustertemplate/util.go b/cmd/argo/commands/clustertemplate/util.go index e2e414fa23ed..5b9e71910f07 100644 --- a/cmd/argo/commands/clustertemplate/util.go +++ b/cmd/argo/commands/clustertemplate/util.go @@ -58,7 +58,7 @@ func unmarshalClusterWorkflowTemplates(wfBytes []byte, strict bool) ([]wfv1.Clus func printClusterWorkflowTemplate(wf *wfv1.ClusterWorkflowTemplate, outFmt string) { switch outFmt { case "name": - fmt.Println(wf.ObjectMeta.Name) + fmt.Println(wf.Name) case "json": outBytes, _ := json.MarshalIndent(wf, "", " ") fmt.Println(string(outBytes)) @@ -74,6 +74,6 @@ func printClusterWorkflowTemplate(wf *wfv1.ClusterWorkflowTemplate, outFmt strin func printClusterWorkflowTemplateHelper(wf *wfv1.ClusterWorkflowTemplate) { const fmtStr = "%-20s %v\n" - fmt.Printf(fmtStr, "Name:", wf.ObjectMeta.Name) - fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.ObjectMeta.CreationTimestamp.Time)) + fmt.Printf(fmtStr, "Name:", wf.Name) + fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.CreationTimestamp.Time)) } diff --git a/cmd/argo/commands/common/common.go b/cmd/argo/commands/common/common.go index e6d96e67f66a..16d05af59727 100644 --- a/cmd/argo/commands/common/common.go +++ b/cmd/argo/commands/common/common.go @@ -86,7 +86,7 @@ func ansiColorCode(s string) int { for _, c := range s { i += int(c) } - colors := []int{FgRed, FgGreen, FgYellow, FgBlue, FgMagenta, FgCyan, FgWhite} + colors := []int{FgGreen, FgYellow, FgBlue, FgMagenta, FgCyan, FgWhite} return colors[i%len(colors)] } diff --git a/cmd/argo/commands/common/common_test.go b/cmd/argo/commands/common/common_test.go index 65eb5c3b66ca..e38961e9faed 100644 --- a/cmd/argo/commands/common/common_test.go +++ b/cmd/argo/commands/common/common_test.go @@ -8,8 +8,7 @@ import ( func Test_ansiColorCode(t *testing.T) { // check we get a nice range of colours - assert.Equal(t, FgYellow, ansiColorCode("foo")) - assert.Equal(t, FgGreen, ansiColorCode("bar")) - assert.Equal(t, FgYellow, ansiColorCode("baz")) - assert.Equal(t, FgRed, ansiColorCode("qux")) + assert.Equal(t, FgGreen, ansiColorCode("foo")) + assert.Equal(t, FgMagenta, ansiColorCode("bar")) + assert.Equal(t, FgWhite, ansiColorCode("baz")) } diff --git a/cmd/argo/commands/common/get.go b/cmd/argo/commands/common/get.go index 4e2422ad86de..6b194ad73e92 100644 --- a/cmd/argo/commands/common/get.go +++ b/cmd/argo/commands/common/get.go @@ -52,8 +52,8 @@ func (g GetFlags) shouldPrint(node wfv1.NodeStatus) bool { func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { const fmtStr = "%-20s %v\n" out := "" - out += fmt.Sprintf(fmtStr, "Name:", wf.ObjectMeta.Name) - out += fmt.Sprintf(fmtStr, "Namespace:", wf.ObjectMeta.Namespace) + out += fmt.Sprintf(fmtStr, "Name:", wf.Name) + out += fmt.Sprintf(fmtStr, "Namespace:", wf.Namespace) serviceAccount := wf.GetExecSpec().ServiceAccountName if serviceAccount == "" { // if serviceAccountName was not specified in a submitted Workflow, we will @@ -74,7 +74,7 @@ func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { if len(wf.Status.Conditions) > 0 { out += wf.Status.Conditions.DisplayString(fmtStr, WorkflowConditionIconMap) } - out += fmt.Sprintf(fmtStr, "Created:", humanize.Timestamp(wf.ObjectMeta.CreationTimestamp.Time)) + out += fmt.Sprintf(fmtStr, "Created:", humanize.Timestamp(wf.CreationTimestamp.Time)) if !wf.Status.StartedAt.IsZero() { out += fmt.Sprintf(fmtStr, "Started:", humanize.Timestamp(wf.Status.StartedAt.Time)) } @@ -139,7 +139,7 @@ func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { printTree := true if wf.Status.Nodes == nil { printTree = false - } else if _, ok := wf.Status.Nodes[wf.ObjectMeta.Name]; !ok { + } else if _, ok := wf.Status.Nodes[wf.Name]; !ok { printTree = false } if printTree { @@ -159,13 +159,13 @@ func PrintWorkflowHelper(wf *wfv1.Workflow, getArgs GetFlags) string { roots := convertToRenderTrees(wf) // Print main and onExit Trees - mainRoot := roots[wf.ObjectMeta.Name] + mainRoot := roots[wf.Name] if mainRoot == nil { panic("failed to get the entrypoint node") } mainRoot.renderNodes(w, wf, 0, " ", " ", getArgs) - onExitID := wf.NodeID(wf.ObjectMeta.Name + "." + onExitSuffix) + onExitID := wf.NodeID(wf.Name + "." + onExitSuffix) if onExitRoot, ok := roots[onExitID]; ok { _, _ = fmt.Fprintf(w, "\t\t\t\t\t\n") onExitRoot.renderNodes(w, wf, 0, " ", " ", getArgs) @@ -523,7 +523,7 @@ func (nodeInfo *boundaryNode) renderNodes(w *tabwriter.Writer, wf *wfv1.Workflow filtered, childIndent := filterNode(nodeInfo.getNodeStatus(wf), getArgs) if !filtered { version := util.GetWorkflowPodNameVersion(wf) - printNode(w, nodeInfo.getNodeStatus(wf), wf.ObjectMeta.Name, nodePrefix, getArgs, version) + printNode(w, nodeInfo.getNodeStatus(wf), wf.Name, nodePrefix, getArgs, version) } for i, nInfo := range nodeInfo.boundaryContained { @@ -537,7 +537,7 @@ func (nodeInfo *nonBoundaryParentNode) renderNodes(w *tabwriter.Writer, wf *wfv1 filtered, childIndent := filterNode(nodeInfo.getNodeStatus(wf), getArgs) if !filtered { version := util.GetWorkflowPodNameVersion(wf) - printNode(w, nodeInfo.getNodeStatus(wf), wf.ObjectMeta.Name, nodePrefix, getArgs, version) + printNode(w, nodeInfo.getNodeStatus(wf), wf.Name, nodePrefix, getArgs, version) } for i, nInfo := range nodeInfo.children { @@ -551,7 +551,7 @@ func (nodeInfo *executionNode) renderNodes(w *tabwriter.Writer, wf *wfv1.Workflo filtered, _ := filterNode(nodeInfo.getNodeStatus(wf), getArgs) if !filtered { version := util.GetWorkflowPodNameVersion(wf) - printNode(w, nodeInfo.getNodeStatus(wf), wf.ObjectMeta.Name, nodePrefix, getArgs, version) + printNode(w, nodeInfo.getNodeStatus(wf), wf.Name, nodePrefix, getArgs, version) } } diff --git a/cmd/argo/commands/common/submit.go b/cmd/argo/commands/common/submit.go index 0867c980dfda..e56469a7d0af 100644 --- a/cmd/argo/commands/common/submit.go +++ b/cmd/argo/commands/common/submit.go @@ -41,7 +41,7 @@ func WaitWatchOrLog(ctx context.Context, serviceClient workflowpkg.WorkflowServi } } if cliSubmitOpts.Wait { - WaitWorkflows(ctx, serviceClient, namespace, workflowNames, false, !(cliSubmitOpts.Output.String() == "" || cliSubmitOpts.Output.String() == "wide")) + WaitWorkflows(ctx, serviceClient, namespace, workflowNames, false, cliSubmitOpts.Output.String() != "" && cliSubmitOpts.Output.String() != "wide") } else if cliSubmitOpts.Watch { for _, workflow := range workflowNames { if err := WatchWorkflow(ctx, serviceClient, namespace, workflow, cliSubmitOpts.GetArgs); err != nil { diff --git a/cmd/argo/commands/cron/create.go b/cmd/argo/commands/cron/create.go index 592289d0beb8..273a6db0d327 100644 --- a/cmd/argo/commands/cron/create.go +++ b/cmd/argo/commands/cron/create.go @@ -73,13 +73,13 @@ func CreateCronWorkflows(ctx context.Context, filePaths []string, cliOpts *cliCr // We have only copied the workflow spec to the cron workflow but not the metadata // that includes name and generateName. Here we copy the metadata to the cron // workflow's metadata and remove the unnecessary and mutually exclusive part. - if generateName := newWf.ObjectMeta.GenerateName; generateName != "" { - cronWf.ObjectMeta.GenerateName = generateName - cronWf.ObjectMeta.Name = "" + if generateName := newWf.GenerateName; generateName != "" { + cronWf.GenerateName = generateName + cronWf.Name = "" } - if name := newWf.ObjectMeta.Name; name != "" { - cronWf.ObjectMeta.Name = name - cronWf.ObjectMeta.GenerateName = "" + if name := newWf.Name; name != "" { + cronWf.Name = name + cronWf.GenerateName = "" } if cronWf.Namespace == "" { cronWf.Namespace = client.Namespace() diff --git a/cmd/argo/commands/cron/list.go b/cmd/argo/commands/cron/list.go index f1c2c272293a..ab857e1076a1 100644 --- a/cmd/argo/commands/cron/list.go +++ b/cmd/argo/commands/cron/list.go @@ -57,7 +57,7 @@ func NewListCommand() *cobra.Command { printTable(ctx, cronWfList.Items, &listArgs) case "name": for _, cronWf := range cronWfList.Items { - fmt.Println(cronWf.ObjectMeta.Name) + fmt.Println(cronWf.Name) } default: return fmt.Errorf("Unknown output mode: %s", listArgs.output.String()) @@ -80,7 +80,7 @@ func printTable(ctx context.Context, wfList []wfv1.CronWorkflow, listArgs *listF _, _ = fmt.Fprint(w, "\n") for _, cwf := range wfList { if listArgs.allNamespaces { - _, _ = fmt.Fprintf(w, "%s\t", cwf.ObjectMeta.Namespace) + _, _ = fmt.Fprintf(w, "%s\t", cwf.Namespace) } var cleanLastScheduledTime string if cwf.Status.LastScheduledTime != nil { @@ -94,7 +94,7 @@ func printTable(ctx context.Context, wfList []wfv1.CronWorkflow, listArgs *listF } else { cleanNextScheduledTime = "N/A" } - _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\t%t", cwf.ObjectMeta.Name, humanize.RelativeDurationShort(cwf.ObjectMeta.CreationTimestamp.Time, time.Now()), cleanLastScheduledTime, cleanNextScheduledTime, cwf.Spec.GetScheduleString(), cwf.Spec.Timezone, cwf.Spec.Suspend) + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\t%t", cwf.Name, humanize.RelativeDurationShort(cwf.CreationTimestamp.Time, time.Now()), cleanLastScheduledTime, cleanNextScheduledTime, cwf.Spec.GetScheduleString(), cwf.Spec.Timezone, cwf.Spec.Suspend) _, _ = fmt.Fprintf(w, "\n") } _ = w.Flush() diff --git a/cmd/argo/commands/cron/util.go b/cmd/argo/commands/cron/util.go index 82db7983dcde..bcf410925d4f 100644 --- a/cmd/argo/commands/cron/util.go +++ b/cmd/argo/commands/cron/util.go @@ -18,7 +18,6 @@ import ( "github.com/argoproj/pkg/humanize" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) // GetNextRuntime returns the next time the workflow should run in local time. It assumes the workflow-controller is in @@ -60,15 +59,15 @@ func generateCronWorkflows(filePaths []string, strict bool) []v1alpha1.CronWorkf } // unmarshalCronWorkflows unmarshals the input bytes as either json or yaml -func unmarshalCronWorkflows(wfBytes []byte, strict bool) []wfv1.CronWorkflow { - var cronWf wfv1.CronWorkflow +func unmarshalCronWorkflows(wfBytes []byte, strict bool) []v1alpha1.CronWorkflow { + var cronWf v1alpha1.CronWorkflow var jsonOpts []argoJson.JSONOpt if strict { jsonOpts = append(jsonOpts, argoJson.DisallowUnknownFields) } err := argoJson.Unmarshal(wfBytes, &cronWf, jsonOpts...) if err == nil { - return []wfv1.CronWorkflow{cronWf} + return []v1alpha1.CronWorkflow{cronWf} } yamlWfs, err := common.SplitCronWorkflowYAMLFile(wfBytes, strict) if err == nil { @@ -78,10 +77,10 @@ func unmarshalCronWorkflows(wfBytes []byte, strict bool) []wfv1.CronWorkflow { return nil } -func printCronWorkflow(ctx context.Context, wf *wfv1.CronWorkflow, outFmt string) { +func printCronWorkflow(ctx context.Context, wf *v1alpha1.CronWorkflow, outFmt string) { switch outFmt { case "name": - fmt.Println(wf.ObjectMeta.Name) + fmt.Println(wf.Name) case "json": outBytes, _ := json.MarshalIndent(wf, "", " ") fmt.Println(string(outBytes)) @@ -95,13 +94,13 @@ func printCronWorkflow(ctx context.Context, wf *wfv1.CronWorkflow, outFmt string } } -func getCronWorkflowGet(ctx context.Context, cwf *wfv1.CronWorkflow) string { +func getCronWorkflowGet(ctx context.Context, cwf *v1alpha1.CronWorkflow) string { const fmtStr = "%-30s %v\n" out := "" - out += fmt.Sprintf(fmtStr, "Name:", cwf.ObjectMeta.Name) - out += fmt.Sprintf(fmtStr, "Namespace:", cwf.ObjectMeta.Namespace) - out += fmt.Sprintf(fmtStr, "Created:", humanize.Timestamp(cwf.ObjectMeta.CreationTimestamp.Time)) + out += fmt.Sprintf(fmtStr, "Name:", cwf.Name) + out += fmt.Sprintf(fmtStr, "Namespace:", cwf.Namespace) + out += fmt.Sprintf(fmtStr, "Created:", humanize.Timestamp(cwf.CreationTimestamp.Time)) out += fmt.Sprintf(fmtStr, "Schedules:", cwf.Spec.GetScheduleString()) out += fmt.Sprintf(fmtStr, "Suspended:", cwf.Spec.Suspend) if cwf.Spec.Timezone != "" { @@ -130,7 +129,7 @@ func getCronWorkflowGet(ctx context.Context, cwf *wfv1.CronWorkflow) string { out += fmt.Sprintf(fmtStr, "Active Workflows:", strings.Join(activeWfNames, ", ")) } if len(cwf.Status.Conditions) > 0 { - out += cwf.Status.Conditions.DisplayString(fmtStr, map[wfv1.ConditionType]string{wfv1.ConditionTypeSubmissionError: "✖"}) + out += cwf.Status.Conditions.DisplayString(fmtStr, map[v1alpha1.ConditionType]string{v1alpha1.ConditionTypeSubmissionError: "✖"}) } if len(cwf.Spec.WorkflowSpec.Arguments.Parameters) > 0 { out += fmt.Sprintf(fmtStr, "Workflow Parameters:", "") diff --git a/cmd/argo/commands/get.go b/cmd/argo/commands/get.go index e5c34ddb995b..389d3509caaf 100644 --- a/cmd/argo/commands/get.go +++ b/cmd/argo/commands/get.go @@ -65,7 +65,7 @@ func NewGetCommand() *cobra.Command { func printWorkflow(wf *wfv1.Workflow, getArgs common.GetFlags) error { switch getArgs.Output.String() { case "name": - fmt.Println(wf.ObjectMeta.Name) + fmt.Println(wf.Name) case "json": outBytes, _ := json.MarshalIndent(wf, "", " ") fmt.Println(string(outBytes)) diff --git a/cmd/argo/commands/list.go b/cmd/argo/commands/list.go index db0a311c6a2d..19a4019e1ad5 100644 --- a/cmd/argo/commands/list.go +++ b/cmd/argo/commands/list.go @@ -179,7 +179,7 @@ func listWorkflows(ctx context.Context, serviceClient workflowpkg.WorkflowServic } workflows = workflows. Filter(func(wf wfv1.Workflow) bool { - return strings.HasPrefix(wf.ObjectMeta.Name, flags.prefix) + return strings.HasPrefix(wf.Name, flags.prefix) }) if flags.createdSince != "" && flags.finishedBefore != "" { startTime, err := argotime.ParseSince(flags.createdSince) diff --git a/cmd/argo/commands/server.go b/cmd/argo/commands/server.go index c98792e5f44f..7e472160082a 100644 --- a/cmd/argo/commands/server.go +++ b/cmd/argo/commands/server.go @@ -8,8 +8,7 @@ import ( "strings" "time" - eventsource "github.com/argoproj/argo-events/pkg/client/eventsource/clientset/versioned" - sensor "github.com/argoproj/argo-events/pkg/client/sensor/clientset/versioned" + events "github.com/argoproj/argo-events/pkg/client/clientset/versioned" "github.com/argoproj/pkg/stats" log "github.com/sirupsen/logrus" "github.com/skratchdot/open-golang/open" @@ -81,11 +80,10 @@ See %s`, help.ArgoServer()), namespace := client.Namespace() clients := &types.Clients{ - Dynamic: dynamic.NewForConfigOrDie(config), - EventSource: eventsource.NewForConfigOrDie(config), - Kubernetes: kubernetes.NewForConfigOrDie(config), - Sensor: sensor.NewForConfigOrDie(config), - Workflow: wfclientset.NewForConfigOrDie(config), + Dynamic: dynamic.NewForConfigOrDie(config), + Events: events.NewForConfigOrDie(config), + Kubernetes: kubernetes.NewForConfigOrDie(config), + Workflow: wfclientset.NewForConfigOrDie(config), } ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/cmd/argo/commands/template/create.go b/cmd/argo/commands/template/create.go index cd36e87fb344..cac1e19fb5cb 100644 --- a/cmd/argo/commands/template/create.go +++ b/cmd/argo/commands/template/create.go @@ -55,7 +55,7 @@ func CreateWorkflowTemplates(ctx context.Context, filePaths []string, cliOpts *c Template: &wftmpl, }) if err != nil { - return fmt.Errorf("Failed to create workflow template: %v", err) + return fmt.Errorf("failed to create workflow template: %v", err) } printWorkflowTemplate(created, cliOpts.output.String()) } diff --git a/cmd/argo/commands/template/list.go b/cmd/argo/commands/template/list.go index 3dad60149e13..377eb987a379 100644 --- a/cmd/argo/commands/template/list.go +++ b/cmd/argo/commands/template/list.go @@ -59,7 +59,7 @@ func NewListCommand() *cobra.Command { printTable(wftmplList.Items, &listArgs) case "name": for _, wftmp := range wftmplList.Items { - fmt.Println(wftmp.ObjectMeta.Name) + fmt.Println(wftmp.Name) } default: return fmt.Errorf("Unknown output mode: %s", listArgs.output) @@ -82,9 +82,9 @@ func printTable(wfList []wfv1.WorkflowTemplate, listArgs *listFlags) { _, _ = fmt.Fprint(w, "\n") for _, wf := range wfList { if listArgs.allNamespaces { - _, _ = fmt.Fprintf(w, "%s\t", wf.ObjectMeta.Namespace) + _, _ = fmt.Fprintf(w, "%s\t", wf.Namespace) } - _, _ = fmt.Fprintf(w, "%s\t", wf.ObjectMeta.Name) + _, _ = fmt.Fprintf(w, "%s\t", wf.Name) _, _ = fmt.Fprintf(w, "\n") } _ = w.Flush() diff --git a/cmd/argo/commands/template/util.go b/cmd/argo/commands/template/util.go index 7abb358c1a7a..db3c55135d33 100644 --- a/cmd/argo/commands/template/util.go +++ b/cmd/argo/commands/template/util.go @@ -56,7 +56,7 @@ func unmarshalWorkflowTemplates(wfBytes []byte, strict bool) []wfv1.WorkflowTemp func printWorkflowTemplate(wf *wfv1.WorkflowTemplate, outFmt string) { switch outFmt { case "name": - fmt.Println(wf.ObjectMeta.Name) + fmt.Println(wf.Name) case "json": outBytes, _ := json.MarshalIndent(wf, "", " ") fmt.Println(string(outBytes)) @@ -72,7 +72,7 @@ func printWorkflowTemplate(wf *wfv1.WorkflowTemplate, outFmt string) { func printWorkflowTemplateHelper(wf *wfv1.WorkflowTemplate) { const fmtStr = "%-20s %v\n" - fmt.Printf(fmtStr, "Name:", wf.ObjectMeta.Name) - fmt.Printf(fmtStr, "Namespace:", wf.ObjectMeta.Namespace) - fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.ObjectMeta.CreationTimestamp.Time)) + fmt.Printf(fmtStr, "Name:", wf.Name) + fmt.Printf(fmtStr, "Namespace:", wf.Namespace) + fmt.Printf(fmtStr, "Created:", humanize.Timestamp(wf.CreationTimestamp.Time)) } diff --git a/cmd/argoexec/commands/emissary.go b/cmd/argoexec/commands/emissary.go index 659140d026b7..ef06f97e7eae 100644 --- a/cmd/argoexec/commands/emissary.go +++ b/cmd/argoexec/commands/emissary.go @@ -228,7 +228,7 @@ func startCommand(name string, args []string, template *wfv1.Template) (*exec.Cm command := exec.Command(name, args...) command.Env = os.Environ() - var closer func() = func() {} + var closer = func() {} var stdout io.Writer = os.Stdout var stderr io.Writer = os.Stderr diff --git a/cmd/argoexec/commands/root.go b/cmd/argoexec/commands/root.go index 7a49cc7e1fc9..73b50cd1f114 100644 --- a/cmd/argoexec/commands/root.go +++ b/cmd/argoexec/commands/root.go @@ -55,6 +55,14 @@ func NewRootCommand() *cobra.Command { }, PersistentPreRun: func(cmd *cobra.Command, args []string) { initConfig() + + // Disable printing of usage string on errors, except for argument validation errors + // (i.e. when the "Args" function returns an error). + // + // This is set here instead of directly in "command" because Cobra + // executes PersistentPreRun after performing argument validation: + // https://github.com/spf13/cobra/blob/3a5efaede9d389703a792e2f7bfe3a64bc82ced9/command.go#L939-L957 + cmd.SilenceUsage = true }, } diff --git a/cmd/workflow-controller/main.go b/cmd/workflow-controller/main.go index 9b2247c62fd4..ad24683d01ce 100644 --- a/cmd/workflow-controller/main.go +++ b/cmd/workflow-controller/main.go @@ -70,7 +70,7 @@ func NewRootCommand() *cobra.Command { Use: CLIName, Short: "workflow-controller is the controller to operate on workflows", RunE: func(c *cobra.Command, args []string) error { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(context.Background(), runtimeutil.PanicHandlers...) cli.SetLogLevel(logLevel) cmdutil.SetGLogLevel(glogLevel) diff --git a/docs/argo-server.md b/docs/argo-server.md index 2c20a6cb50a0..9609cec46a6e 100644 --- a/docs/argo-server.md +++ b/docs/argo-server.md @@ -178,3 +178,7 @@ Argo Server by default rate limits to 1000 per IP per minute, you can configure * `X-Rate-Limit-Remaining` - the number of requests left for the current rate-limit window. * `X-Rate-Limit-Reset` - the time at which the rate limit resets, specified in UTC time. * `Retry-After` - indicate when a client should retry requests (when the rate limit expires), in UTC time. + +### GRPC ALPN + +The grpc library wants to enforce ALPN, but we are not prepared for this so the argo-server binary is built with `GRPC_ENFORCE_ALPN_ENABLED` set to `false` in the docker image as a short term workaround, as documented in https://github.com/grpc/grpc-go/issues/434 diff --git a/docs/executor_swagger.md b/docs/executor_swagger.md index f2bc26e2c1b2..2c4750c68aaf 100644 --- a/docs/executor_swagger.md +++ b/docs/executor_swagger.md @@ -431,9 +431,9 @@ of a single workflow step, which the executor will use as a default location to | cachingMode | [AzureDataDiskCachingMode](#azure-data-disk-caching-mode)| `AzureDataDiskCachingMode` | | | | | | diskName | string| `string` | | | diskName is the Name of the data disk in the blob storage | | | diskURI | string| `string` | | | diskURI is the URI of data disk in the blob storage | | -| fsType | string| `string` | | | fsType is Filesystem type to mount.
Must be a filesystem type supported by the host operating system.
Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
+optional | | +| fsType | string| `string` | | | fsType is Filesystem type to mount.
Must be a filesystem type supported by the host operating system.
Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
+optional
+default="ext4" | | | kind | [AzureDataDiskKind](#azure-data-disk-kind)| `AzureDataDiskKind` | | | | | -| readOnly | boolean| `bool` | | | readOnly Defaults to false (read/write). ReadOnly here will force
the ReadOnly setting in VolumeMounts.
+optional | | +| readOnly | boolean| `bool` | | | readOnly Defaults to false (read/write). ReadOnly here will force
the ReadOnly setting in VolumeMounts.
+optional
+default=false | | @@ -668,7 +668,7 @@ key-value pairs as environment variables. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | Specify whether the ConfigMap must be defined
+optional | | @@ -688,7 +688,7 @@ key-value pairs as environment variables. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | key | string| `string` | | | The key to select. | | -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | Specify whether the ConfigMap or its key must be defined
+optional | | @@ -712,7 +712,7 @@ mode. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced
ConfigMap will be projected into the volume as a file whose name is the
key and content is the value. If specified, the listed keys will be
projected into the specified paths, and unlisted keys will not be
present. If a key is specified which is not present in the ConfigMap,
the volume setup will error unless it is marked optional. Paths must be
relative and may not contain the '..' path or start with '..'.
+optional
+listType=atomic | | -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | optional specify whether the ConfigMap or its keys must be defined
+optional | | @@ -736,7 +736,7 @@ ConfigMap volumes support ownership management and SELinux relabeling. |------|------|---------|:--------:| ------- |-------------|---------| | defaultMode | int32 (formatted integer)| `int32` | | | defaultMode is optional: mode bits used to set permissions on created files by default.
Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511.
YAML accepts both octal and decimal values, JSON requires decimal values for mode bits.
Defaults to 0644.
Directories within the path are not affected by this setting.
This might be in conflict with other options that affect the file
mode, like fsGroup, and the result can be other mode bits set.
+optional | | | items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced
ConfigMap will be projected into the volume as a file whose name is the
key and content is the value. If specified, the listed keys will be
projected into the specified paths, and unlisted keys will not be
present. If a key is specified which is not present in the ConfigMap,
the volume setup will error unless it is marked optional. Paths must be
relative and may not contain the '..' path or start with '..'.
+optional
+listType=atomic | | -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | optional specify whether the ConfigMap or its keys must be defined
+optional | | @@ -1786,7 +1786,7 @@ ISCSI volumes support ownership management and SELinux relabeling. | fsType | string| `string` | | | fsType is the filesystem type of the volume that you want to mount.
Tip: Ensure that the filesystem type is supported by the host operating system.
Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi
TODO: how do we prevent errors in the filesystem from compromising the machine
+optional | | | initiatorName | string| `string` | | | initiatorName is the custom iSCSI Initiator Name.
If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface
: will be created for the connection.
+optional | | | iqn | string| `string` | | | iqn is the target iSCSI Qualified Name. | | -| iscsiInterface | string| `string` | | | iscsiInterface is the interface Name that uses an iSCSI transport.
Defaults to 'default' (tcp).
+optional | | +| iscsiInterface | string| `string` | | | iscsiInterface is the interface Name that uses an iSCSI transport.
Defaults to 'default' (tcp).
+optional
+default="default" | | | lun | int32 (formatted integer)| `int32` | | | lun represents iSCSI Target Lun number. | | | portals | []string| `[]string` | | | portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port
is other than default (typically TCP ports 860 and 3260).
+optional
+listType=atomic | | | readOnly | boolean| `bool` | | | readOnly here will force the ReadOnly setting in VolumeMounts.
Defaults to false.
+optional | | @@ -1795,6 +1795,22 @@ ISCSI volumes support ownership management and SELinux relabeling. +### ImageVolumeSource + + + + + + +**Properties** + +| Name | Type | Go type | Required | Default | Description | Example | +|------|------|---------|:--------:| ------- |-------------|---------| +| pullPolicy | [PullPolicy](#pull-policy)| `PullPolicy` | | | | | +| reference | string| `string` | | | Required: Image or artifact reference to be used.
Behaves in the same way as pod.spec.containers[*].image.
Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets.
More info: https://kubernetes.io/docs/concepts/containers/images
This field is optional to allow higher level config management to default or override
container images in workload controllers like Deployments and StatefulSets.
+optional | | + + + ### Inputs @@ -1989,8 +2005,16 @@ hook. One and only one of the fields, except TCPSocket must be specified. ### LocalObjectReference -> LocalObjectReference contains enough information to let you locate the -referenced object inside the same namespace. +> New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. +1. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular +restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". +Those cannot be well described when embedded. +2. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. +3. We cannot easily change it. Because this type is embedded in many locations, updates to this type +will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. + +Instead of using this type, create a locally provided and used type that is well-focused on your reference. +For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 . +structType=atomic @@ -2002,7 +2026,7 @@ referenced object inside the same namespace. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | @@ -2562,7 +2586,7 @@ and allows a Source for provider-specific attributes | resources | [VolumeResourceRequirements](#volume-resource-requirements)| `VolumeResourceRequirements` | | | | | | selector | [LabelSelector](#label-selector)| `LabelSelector` | | | | | | storageClassName | string| `string` | | | storageClassName is the name of the StorageClass required by the claim.
More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1
+optional | | -| volumeAttributesClassName | string| `string` | | | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.
If specified, the CSI driver will create or update the volume with the attributes defined
in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,
it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass
will be applied to the claim but it's not allowed to reset this field to empty string once it is set.
If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass
will be set by the persistentvolume controller if it exists.
If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be
set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource
exists.
More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/
(Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.
+featureGate=VolumeAttributesClass
+optional | | +| volumeAttributesClassName | string| `string` | | | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.
If specified, the CSI driver will create or update the volume with the attributes defined
in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,
it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass
will be applied to the claim but it's not allowed to reset this field to empty string once it is set.
If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass
will be set by the persistentvolume controller if it exists.
If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be
set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource
exists.
More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/
(Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).
+featureGate=VolumeAttributesClass
+optional | | | volumeMode | [PersistentVolumeMode](#persistent-volume-mode)| `PersistentVolumeMode` | | | | | | volumeName | string| `string` | | | volumeName is the binding reference to the PersistentVolume backing this claim.
+optional | | @@ -2699,8 +2723,8 @@ a pod of the set of pods is running | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | labelSelector | [LabelSelector](#label-selector)| `LabelSelector` | | | | | -| matchLabelKeys | []string| `[]string` | | | MatchLabelKeys is a set of pod label keys to select which pods will
be taken into consideration. The keys are used to lookup values from the
incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`
to select the group of existing pods which pods will be taken into consideration
for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming
pod labels will be ignored. The default value is empty.
The same key is forbidden to exist in both matchLabelKeys and labelSelector.
Also, matchLabelKeys cannot be set when labelSelector isn't set.
This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.
+listType=atomic
+optional | | -| mismatchLabelKeys | []string| `[]string` | | | MismatchLabelKeys is a set of pod label keys to select which pods will
be taken into consideration. The keys are used to lookup values from the
incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`
to select the group of existing pods which pods will be taken into consideration
for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming
pod labels will be ignored. The default value is empty.
The same key is forbidden to exist in both mismatchLabelKeys and labelSelector.
Also, mismatchLabelKeys cannot be set when labelSelector isn't set.
This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.
+listType=atomic
+optional | | +| matchLabelKeys | []string| `[]string` | | | MatchLabelKeys is a set of pod label keys to select which pods will
be taken into consideration. The keys are used to lookup values from the
incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`
to select the group of existing pods which pods will be taken into consideration
for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming
pod labels will be ignored. The default value is empty.
The same key is forbidden to exist in both matchLabelKeys and labelSelector.
Also, matchLabelKeys cannot be set when labelSelector isn't set.
This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).

+listType=atomic
+optional | | +| mismatchLabelKeys | []string| `[]string` | | | MismatchLabelKeys is a set of pod label keys to select which pods will
be taken into consideration. The keys are used to lookup values from the
incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`
to select the group of existing pods which pods will be taken into consideration
for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming
pod labels will be ignored. The default value is empty.
The same key is forbidden to exist in both mismatchLabelKeys and labelSelector.
Also, mismatchLabelKeys cannot be set when labelSelector isn't set.
This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).

+listType=atomic
+optional | | | namespaceSelector | [LabelSelector](#label-selector)| `LabelSelector` | | | | | | namespaces | []string| `[]string` | | | namespaces specifies a static list of namespace names that the term applies to.
The term is applied to the union of the namespaces listed in this field
and the ones selected by namespaceSelector.
null or empty namespaces list and null namespaceSelector means "this pod's namespace".
+optional
+listType=atomic | | | topologyKey | string| `string` | | | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching
the labelSelector in the specified namespaces, where co-located is defined as running on a node
whose value of the label with key topologyKey matches that of any node on which any of the
selected pods is running.
Empty topologyKey is not allowed. | | @@ -2739,6 +2763,17 @@ when volume is mounted. +### PodSELinuxChangePolicy + + + + +| Name | Type | Go type | Default | Description | Example | +|------|------|---------| ------- |-------------|---------| +| PodSELinuxChangePolicy | string| string | | | | + + + ### PodSecurityContext @@ -2760,9 +2795,11 @@ container.securityContext take precedence over field values of PodSecurityContex | runAsGroup | int64 (formatted integer)| `int64` | | | The GID to run the entrypoint of the container process.
Uses runtime default if unset.
May also be set in SecurityContext. If set in both SecurityContext and
PodSecurityContext, the value specified in SecurityContext takes precedence
for that container.
Note that this field cannot be set when spec.os.name is windows.
+optional | | | runAsNonRoot | boolean| `bool` | | | Indicates that the container must run as a non-root user.
If true, the Kubelet will validate the image at runtime to ensure that it
does not run as UID 0 (root) and fail to start the container if it does.
If unset or false, no such validation will be performed.
May also be set in SecurityContext. If set in both SecurityContext and
PodSecurityContext, the value specified in SecurityContext takes precedence.
+optional | | | runAsUser | int64 (formatted integer)| `int64` | | | The UID to run the entrypoint of the container process.
Defaults to user specified in image metadata if unspecified.
May also be set in SecurityContext. If set in both SecurityContext and
PodSecurityContext, the value specified in SecurityContext takes precedence
for that container.
Note that this field cannot be set when spec.os.name is windows.
+optional | | +| seLinuxChangePolicy | [PodSELinuxChangePolicy](#pod-s-e-linux-change-policy)| `PodSELinuxChangePolicy` | | | | | | seLinuxOptions | [SELinuxOptions](#s-e-linux-options)| `SELinuxOptions` | | | | | | seccompProfile | [SeccompProfile](#seccomp-profile)| `SeccompProfile` | | | | | -| supplementalGroups | []int64 (formatted integer)| `[]int64` | | | A list of groups applied to the first process run in each container, in addition
to the container's primary GID, the fsGroup (if specified), and group memberships
defined in the container image for the uid of the container process. If unspecified,
no additional groups are added to any container. Note that group memberships
defined in the container image for the uid of the container process are still effective,
even if they are not included in this list.
Note that this field cannot be set when spec.os.name is windows.
+optional
+listType=atomic | | +| supplementalGroups | []int64 (formatted integer)| `[]int64` | | | A list of groups applied to the first process run in each container, in
addition to the container's primary GID and fsGroup (if specified). If
the SupplementalGroupsPolicy feature is enabled, the
supplementalGroupsPolicy field determines whether these are in addition
to or instead of any group memberships defined in the container image.
If unspecified, no additional groups are added, though group memberships
defined in the container image may still be used, depending on the
supplementalGroupsPolicy field.
Note that this field cannot be set when spec.os.name is windows.
+optional
+listType=atomic | | +| supplementalGroupsPolicy | [SupplementalGroupsPolicy](#supplemental-groups-policy)| `SupplementalGroupsPolicy` | | | | | | sysctls | [][Sysctl](#sysctl)| `[]*Sysctl` | | | Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported
sysctls (by the container runtime) might fail to launch.
Note that this field cannot be set when spec.os.name is windows.
+optional
+listType=atomic | | | windowsOptions | [WindowsSecurityContextOptions](#windows-security-context-options)| `WindowsSecurityContextOptions` | | | | | @@ -2873,7 +2910,7 @@ alive or ready to receive traffic. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | defaultMode | int32 (formatted integer)| `int32` | | | defaultMode are the mode bits used to set permissions on created files by default.
Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511.
YAML accepts both octal and decimal values, JSON requires decimal values for mode bits.
Directories within the path are not affected by this setting.
This might be in conflict with other options that affect the file
mode, like fsGroup, and the result can be other mode bits set.
+optional | | -| sources | [][VolumeProjection](#volume-projection)| `[]*VolumeProjection` | | | sources is the list of volume projections
+optional
+listType=atomic | | +| sources | [][VolumeProjection](#volume-projection)| `[]*VolumeProjection` | | | sources is the list of volume projections. Each entry in this list
handles one source.
+optional
+listType=atomic | | @@ -3043,12 +3080,12 @@ cause implementors to also use a fixed point implementation. |------|------|---------|:--------:| ------- |-------------|---------| | fsType | string| `string` | | | fsType is the filesystem type of the volume that you want to mount.
Tip: Ensure that the filesystem type is supported by the host operating system.
Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd
TODO: how do we prevent errors in the filesystem from compromising the machine
+optional | | | image | string| `string` | | | image is the rados image name.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it | | -| keyring | string| `string` | | | keyring is the path to key ring for RBDUser.
Default is /etc/ceph/keyring.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional | | +| keyring | string| `string` | | | keyring is the path to key ring for RBDUser.
Default is /etc/ceph/keyring.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional
+default="/etc/ceph/keyring" | | | monitors | []string| `[]string` | | | monitors is a collection of Ceph monitors.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+listType=atomic | | -| pool | string| `string` | | | pool is the rados pool name.
Default is rbd.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional | | +| pool | string| `string` | | | pool is the rados pool name.
Default is rbd.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional
+default="rbd" | | | readOnly | boolean| `bool` | | | readOnly here will force the ReadOnly setting in VolumeMounts.
Defaults to false.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | -| user | string| `string` | | | user is the rados user name.
Default is admin.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional | | +| user | string| `string` | | | user is the rados user name.
Default is admin.
More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
+optional
+default="admin" | | @@ -3093,6 +3130,7 @@ cause implementors to also use a fixed point implementation. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | name | string| `string` | | | Name must match the name of one entry in pod.spec.resourceClaims of
the Pod where this field is used. It makes that resource available
inside a container. | | +| request | string| `string` | | | Request is the name chosen for a request in the referenced claim.
If empty, everything from the claim is made available, otherwise
only the result of this request.

+optional | | @@ -3332,13 +3370,13 @@ cause implementors to also use a fixed point implementation. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| fsType | string| `string` | | | fsType is the filesystem type to mount.
Must be a filesystem type supported by the host operating system.
Ex. "ext4", "xfs", "ntfs".
Default is "xfs".
+optional | | +| fsType | string| `string` | | | fsType is the filesystem type to mount.
Must be a filesystem type supported by the host operating system.
Ex. "ext4", "xfs", "ntfs".
Default is "xfs".
+optional
+default="xfs" | | | gateway | string| `string` | | | gateway is the host address of the ScaleIO API Gateway. | | | protectionDomain | string| `string` | | | protectionDomain is the name of the ScaleIO Protection Domain for the configured storage.
+optional | | | readOnly | boolean| `bool` | | | readOnly Defaults to false (read/write). ReadOnly here will force
the ReadOnly setting in VolumeMounts.
+optional | | | secretRef | [LocalObjectReference](#local-object-reference)| `LocalObjectReference` | | | | | | sslEnabled | boolean| `bool` | | | sslEnabled Flag enable/disable SSL communication with Gateway, default false
+optional | | -| storageMode | string| `string` | | | storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.
Default is ThinProvisioned.
+optional | | +| storageMode | string| `string` | | | storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.
Default is ThinProvisioned.
+optional
+default="ThinProvisioned" | | | storagePool | string| `string` | | | storagePool is the ScaleIO Storage Pool associated with the protection domain.
+optional | | | system | string| `string` | | | system is the name of the storage system as configured in ScaleIO. | | | volumeName | string| `string` | | | volumeName is the name of a volume already created in the ScaleIO system
that is associated with this volume source. | | @@ -3436,7 +3474,7 @@ key-value pairs as environment variables. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | Specify whether the Secret must be defined
+optional | | @@ -3456,7 +3494,7 @@ key-value pairs as environment variables. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | key | string| `string` | | | The key of the secret to select from. Must be a valid secret key. | | -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | Specify whether the Secret or its key must be defined
+optional | | @@ -3479,7 +3517,7 @@ mode. | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| | items | [][KeyToPath](#key-to-path)| `[]*KeyToPath` | | | items if unspecified, each key-value pair in the Data field of the referenced
Secret will be projected into the volume as a file whose name is the
key and content is the value. If specified, the listed keys will be
projected into the specified paths, and unlisted keys will not be
present. If a key is specified which is not present in the Secret,
the volume setup will error unless it is marked optional. Paths must be
relative and may not contain the '..' path or start with '..'.
+optional
+listType=atomic | | -| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
TODO: Add other useful fields. apiVersion, kind, uid?
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | +| name | string| `string` | | | Name of the referent.
This field is effectively required, but due to backwards compatibility is
allowed to be empty. Instances of this type with an empty value here are
almost certainly wrong.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
+optional
+default=""
+kubebuilder:default=""
TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. | | | optional | boolean| `bool` | | | optional field specify whether the Secret or its key must be defined
+optional | | @@ -3646,6 +3684,22 @@ otherwise). +### SupplementalGroupsPolicy + + +> SupplementalGroupsPolicy defines how supplemental groups +of the first container processes are calculated. ++enum + + + + +| Name | Type | Go type | Default | Description | Example | +|------|------|---------| ------- |-------------|---------| +| SupplementalGroupsPolicy | string| string | | SupplementalGroupsPolicy defines how supplemental groups
of the first container processes are calculated.
+enum | | + + + ### SuppliedValueFrom @@ -3941,8 +3995,19 @@ the triple using the matching operator . ### TypedLocalObjectReference -> TypedLocalObjectReference contains enough information to let you locate the -typed referenced object inside the same namespace. +> New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. +1. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular +restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". +Those cannot be well described when embedded. +2. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. +3. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity +during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple +and the version of the actual struct is irrelevant. +4. We cannot easily change it. Because this type is embedded in many locations, updates to this type +will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. + +Instead of using this type, create a locally provided and used type that is well-focused on your reference. +For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 . +structType=atomic @@ -3963,10 +4028,13 @@ typed referenced object inside the same namespace. ### TypedObjectReference +> TypedObjectReference contains enough information to let you locate the typed referenced object + + **Properties** | Name | Type | Go type | Required | Default | Description | Example | @@ -4102,6 +4170,7 @@ intent and helps make sure that UIDs and names do not get conflated. | gitRepo | [GitRepoVolumeSource](#git-repo-volume-source)| `GitRepoVolumeSource` | | | | | | glusterfs | [GlusterfsVolumeSource](#glusterfs-volume-source)| `GlusterfsVolumeSource` | | | | | | hostPath | [HostPathVolumeSource](#host-path-volume-source)| `HostPathVolumeSource` | | | | | +| image | [ImageVolumeSource](#image-volume-source)| `ImageVolumeSource` | | | | | | iscsi | [ISCSIVolumeSource](#i-s-c-s-i-volume-source)| `ISCSIVolumeSource` | | | | | | name | string| `string` | | | name of the volume.
Must be a DNS_LABEL and unique within the pod.
More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names | | | nfs | [NFSVolumeSource](#n-f-s-volume-source)| `NFSVolumeSource` | | | | | @@ -4158,7 +4227,7 @@ intent and helps make sure that UIDs and names do not get conflated. ### VolumeProjection -> Projection that may be projected along with other supported volume types +> Exactly one of these fields must be set. diff --git a/docs/fields.md b/docs/fields.md index c03835b72192..fe6208cc87a3 100644 --- a/docs/fields.md +++ b/docs/fields.md @@ -5187,9 +5187,11 @@ PodSecurityContext holds pod-level security attributes and common container sett |`runAsGroup`|`integer`|The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.| |`runAsNonRoot`|`boolean`|Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.| |`runAsUser`|`integer`|The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.| +|`seLinuxChangePolicy`|`string`|seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are "MountOption" and "Recursive". "Recursive" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node. "MountOption" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. "MountOption" value is allowed only when SELinuxMount feature gate is enabled. If not specified and SELinuxMount feature gate is enabled, "MountOption" is used. If not specified and SELinuxMount feature gate is disabled, "MountOption" is used for ReadWriteOncePod volumes and "Recursive" for all other volumes. This field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers. All Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows.| |`seLinuxOptions`|[`SELinuxOptions`](#selinuxoptions)|The SELinux context to be applied to all containers. If unspecified, the container runtime will allocate a random SELinux context for each container. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.| |`seccompProfile`|[`SeccompProfile`](#seccompprofile)|The seccomp options to use by the containers in this pod. Note that this field cannot be set when spec.os.name is windows.| -|`supplementalGroups`|`Array< integer >`|A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows.| +|`supplementalGroups`|`Array< integer >`|A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows.| +|`supplementalGroupsPolicy`|`string`|Defines how supplemental groups of the first container processes are calculated. Valid values are "Merge" and "Strict". If not specified, "Merge" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows.| |`sysctls`|`Array<`[`Sysctl`](#sysctl)`>`|Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.| |`windowsOptions`|[`WindowsSecurityContextOptions`](#windowssecuritycontextoptions)|The Windows specific settings applied to all containers. If unspecified, the options within a container's SecurityContext will be used. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is linux.| @@ -5262,36 +5264,37 @@ Volume represents a named volume in a pod that may be accessed by any container ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`awsElasticBlockStore`|[`AWSElasticBlockStoreVolumeSource`](#awselasticblockstorevolumesource)|awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore| -|`azureDisk`|[`AzureDiskVolumeSource`](#azurediskvolumesource)|azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.| -|`azureFile`|[`AzureFileVolumeSource`](#azurefilevolumesource)|azureFile represents an Azure File Service mount on the host and bind mount to the pod.| -|`cephfs`|[`CephFSVolumeSource`](#cephfsvolumesource)|cephFS represents a Ceph FS mount on the host that shares a pod's lifetime| -|`cinder`|[`CinderVolumeSource`](#cindervolumesource)|cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md| +|`awsElasticBlockStore`|[`AWSElasticBlockStoreVolumeSource`](#awselasticblockstorevolumesource)|awsElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree awsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore| +|`azureDisk`|[`AzureDiskVolumeSource`](#azurediskvolumesource)|azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. Deprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type are redirected to the disk.csi.azure.com CSI driver.| +|`azureFile`|[`AzureFileVolumeSource`](#azurefilevolumesource)|azureFile represents an Azure File Service mount on the host and bind mount to the pod. Deprecated: AzureFile is deprecated. All operations for the in-tree azureFile type are redirected to the file.csi.azure.com CSI driver.| +|`cephfs`|[`CephFSVolumeSource`](#cephfsvolumesource)|cephFS represents a Ceph FS mount on the host that shares a pod's lifetime. Deprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported.| +|`cinder`|[`CinderVolumeSource`](#cindervolumesource)|cinder represents a cinder volume attached and mounted on kubelets host machine. Deprecated: Cinder is deprecated. All operations for the in-tree cinder type are redirected to the cinder.csi.openstack.org CSI driver. More info: https://examples.k8s.io/mysql-cinder-pd/README.md| |`configMap`|[`ConfigMapVolumeSource`](#configmapvolumesource)|configMap represents a configMap that should populate this volume| -|`csi`|[`CSIVolumeSource`](#csivolumesource)|csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).| +|`csi`|[`CSIVolumeSource`](#csivolumesource)|csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers.| |`downwardAPI`|[`DownwardAPIVolumeSource`](#downwardapivolumesource)|downwardAPI represents downward API about the pod that should populate this volume| |`emptyDir`|[`EmptyDirVolumeSource`](#emptydirvolumesource)|emptyDir represents a temporary directory that shares a pod's lifetime. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir| |`ephemeral`|[`EphemeralVolumeSource`](#ephemeralvolumesource)|ephemeral represents a volume that is handled by a cluster storage driver. The volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts, and deleted when the pod is removed. Use this if: a) the volume is only needed while the pod runs, b) features of normal volumes like restoring from snapshot or capacity tracking are needed, c) the storage driver is specified through a storage class, and d) the storage driver supports dynamic volume provisioning through a PersistentVolumeClaim (see EphemeralVolumeSource for more information on the connection between this volume type and PersistentVolumeClaim). Use PersistentVolumeClaim or one of the vendor-specific APIs for volumes that persist for longer than the lifecycle of an individual pod. Use CSI for light-weight local ephemeral volumes if the CSI driver is meant to be used that way - see the documentation of the driver for more information. A pod can use both types of ephemeral volumes and persistent volumes at the same time.| |`fc`|[`FCVolumeSource`](#fcvolumesource)|fc represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.| -|`flexVolume`|[`FlexVolumeSource`](#flexvolumesource)|flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin.| -|`flocker`|[`FlockerVolumeSource`](#flockervolumesource)|flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running| -|`gcePersistentDisk`|[`GCEPersistentDiskVolumeSource`](#gcepersistentdiskvolumesource)|gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk| -|~~`gitRepo`~~|~~[`GitRepoVolumeSource`](#gitrepovolumesource)~~|~~gitRepo represents a git repository at a particular revision.~~ DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container.| -|`glusterfs`|[`GlusterfsVolumeSource`](#glusterfsvolumesource)|glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/glusterfs/README.md| +|`flexVolume`|[`FlexVolumeSource`](#flexvolumesource)|flexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. Deprecated: FlexVolume is deprecated. Consider using a CSIDriver instead.| +|`flocker`|[`FlockerVolumeSource`](#flockervolumesource)|flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running. Deprecated: Flocker is deprecated and the in-tree flocker type is no longer supported.| +|`gcePersistentDisk`|[`GCEPersistentDiskVolumeSource`](#gcepersistentdiskvolumesource)|gcePersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Deprecated: GCEPersistentDisk is deprecated. All operations for the in-tree gcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk| +|`gitRepo`|[`GitRepoVolumeSource`](#gitrepovolumesource)|gitRepo represents a git repository at a particular revision. Deprecated: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container.| +|`glusterfs`|[`GlusterfsVolumeSource`](#glusterfsvolumesource)|glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. Deprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported. More info: https://examples.k8s.io/volumes/glusterfs/README.md| |`hostPath`|[`HostPathVolumeSource`](#hostpathvolumesource)|hostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath| +|`image`|[`ImageVolumeSource`](#imagevolumesource)|image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine. The volume is resolved at pod startup depending on which PullPolicy value is provided: - Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. - Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. - IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. The volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation. A failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message. The types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field. The OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images. The volume will be mounted read-only (ro) and non-executable files (noexec). Sub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath). The field spec.securityContext.fsGroupChangePolicy has no effect on this volume type.| |`iscsi`|[`ISCSIVolumeSource`](#iscsivolumesource)|iscsi represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://examples.k8s.io/volumes/iscsi/README.md| |`name`|`string`|name of the volume. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names| |`nfs`|[`NFSVolumeSource`](#nfsvolumesource)|nfs represents an NFS mount on the host that shares a pod's lifetime More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs| |`persistentVolumeClaim`|[`PersistentVolumeClaimVolumeSource`](#persistentvolumeclaimvolumesource)|persistentVolumeClaimVolumeSource represents a reference to a PersistentVolumeClaim in the same namespace. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims| -|`photonPersistentDisk`|[`PhotonPersistentDiskVolumeSource`](#photonpersistentdiskvolumesource)|photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine| -|`portworxVolume`|[`PortworxVolumeSource`](#portworxvolumesource)|portworxVolume represents a portworx volume attached and mounted on kubelets host machine| +|`photonPersistentDisk`|[`PhotonPersistentDiskVolumeSource`](#photonpersistentdiskvolumesource)|photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine. Deprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported.| +|`portworxVolume`|[`PortworxVolumeSource`](#portworxvolumesource)|portworxVolume represents a portworx volume attached and mounted on kubelets host machine. Deprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type are redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate is on.| |`projected`|[`ProjectedVolumeSource`](#projectedvolumesource)|projected items for all in one resources secrets, configmaps, and downward API| -|`quobyte`|[`QuobyteVolumeSource`](#quobytevolumesource)|quobyte represents a Quobyte mount on the host that shares a pod's lifetime| -|`rbd`|[`RBDVolumeSource`](#rbdvolumesource)|rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/rbd/README.md| -|`scaleIO`|[`ScaleIOVolumeSource`](#scaleiovolumesource)|scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.| +|`quobyte`|[`QuobyteVolumeSource`](#quobytevolumesource)|quobyte represents a Quobyte mount on the host that shares a pod's lifetime. Deprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported.| +|`rbd`|[`RBDVolumeSource`](#rbdvolumesource)|rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. Deprecated: RBD is deprecated and the in-tree rbd type is no longer supported. More info: https://examples.k8s.io/volumes/rbd/README.md| +|`scaleIO`|[`ScaleIOVolumeSource`](#scaleiovolumesource)|scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. Deprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported.| |`secret`|[`SecretVolumeSource`](#secretvolumesource)|secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret| -|`storageos`|[`StorageOSVolumeSource`](#storageosvolumesource)|storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.| -|`vsphereVolume`|[`VsphereVirtualDiskVolumeSource`](#vspherevirtualdiskvolumesource)|vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine| +|`storageos`|[`StorageOSVolumeSource`](#storageosvolumesource)|storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. Deprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported.| +|`vsphereVolume`|[`VsphereVirtualDiskVolumeSource`](#vspherevirtualdiskvolumesource)|vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine. Deprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type are redirected to the csi.vsphere.vmware.com CSI driver.| ## Time @@ -5807,14 +5810,14 @@ Probe describes a health check to be performed against a container to determine ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`exec`|[`ExecAction`](#execaction)|Exec specifies the action to take.| +|`exec`|[`ExecAction`](#execaction)|Exec specifies a command to execute in the container.| |`failureThreshold`|`integer`|Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.| -|`grpc`|[`GRPCAction`](#grpcaction)|GRPC specifies an action involving a GRPC port.| -|`httpGet`|[`HTTPGetAction`](#httpgetaction)|HTTPGet specifies the http request to perform.| +|`grpc`|[`GRPCAction`](#grpcaction)|GRPC specifies a GRPC HealthCheckRequest.| +|`httpGet`|[`HTTPGetAction`](#httpgetaction)|HTTPGet specifies an HTTP GET request to perform.| |`initialDelaySeconds`|`integer`|Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes| |`periodSeconds`|`integer`|How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1.| |`successThreshold`|`integer`|Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for liveness and startup. Minimum value is 1.| -|`tcpSocket`|[`TCPSocketAction`](#tcpsocketaction)|TCPSocket specifies an action involving a TCP port.| +|`tcpSocket`|[`TCPSocketAction`](#tcpsocketaction)|TCPSocket specifies a connection to a TCP port.| |`terminationGracePeriodSeconds`|`integer`|Optional duration in seconds the pod needs to terminate gracefully upon probe failure. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. If this value is nil, the pod's terminationGracePeriodSeconds will be used. Otherwise, this value overrides the value provided by the pod spec. Value must be non-negative integer. The value zero indicates stop immediately via the kill signal (no opportunity to shut down). This is a beta field and requires enabling ProbeTerminationGracePeriod feature gate. Minimum value is 1. spec.terminationGracePeriodSeconds is used if unset.| |`timeoutSeconds`|`integer`|Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes| @@ -5895,7 +5898,7 @@ SecurityContext holds security configuration that will be applied to a container |`appArmorProfile`|[`AppArmorProfile`](#apparmorprofile)|appArmorProfile is the AppArmor options to use by this container. If set, this profile overrides the pod's appArmorProfile. Note that this field cannot be set when spec.os.name is windows.| |`capabilities`|[`Capabilities`](#capabilities)|The capabilities to add/drop when running containers. Defaults to the default set of capabilities granted by the container runtime. Note that this field cannot be set when spec.os.name is windows.| |`privileged`|`boolean`|Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.| -|`procMount`|`string`|procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.| +|`procMount`|`string`|procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.| |`readOnlyRootFilesystem`|`boolean`|Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.| |`runAsGroup`|`integer`|The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.| |`runAsNonRoot`|`boolean`|Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.| @@ -6005,8 +6008,8 @@ PodDNSConfigOption defines DNS resolver options of a pod. ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`name`|`string`|Required.| -|`value`|`string`|_No description available_| +|`name`|`string`|Name is this DNS resolver option's name. Required.| +|`value`|`string`|Value is this DNS resolver option's value.| ## AppArmorProfile @@ -6075,7 +6078,7 @@ PersistentVolumeClaimSpec describes the common attributes of storage devices and |`resources`|[`VolumeResourceRequirements`](#volumeresourcerequirements)|resources represents the minimum resources the volume should have. If RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements that are lower than previous value but must still be higher than capacity recorded in the status field of the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources| |`selector`|[`LabelSelector`](#labelselector)|selector is a label query over volumes to consider for binding.| |`storageClassName`|`string`|storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1| -|`volumeAttributesClassName`|`string`|volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.| +|`volumeAttributesClassName`|`string`|volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).| |`volumeMode`|`string`|volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.| |`volumeName`|`string`|volumeName is the binding reference to the PersistentVolume backing this claim.| @@ -6091,8 +6094,8 @@ PersistentVolumeClaimStatus is the current status of a persistent volume claim. |`allocatedResources`|[`Quantity`](#quantity)|allocatedResources tracks the resources allocated to a PVC including its capacity. Key names follow standard Kubernetes label syntax. Valid values are either: * Un-prefixed keys: - storage - the capacity of the volume. * Custom resources must use implementation-defined prefixed names such as "example.com/my-custom-resource" Apart from above values - keys that are unprefixed or have kubernetes.io prefix are considered reserved and hence may not be used. Capacity reported here may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. A controller that receives PVC update with previously unknown resourceName should ignore the update for the purpose it was designed. For example - a controller that only is responsible for resizing capacity of the volume, should ignore PVC updates that change other valid resources associated with PVC. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.| |`capacity`|[`Quantity`](#quantity)|capacity represents the actual resources of the underlying volume.| |`conditions`|`Array<`[`PersistentVolumeClaimCondition`](#persistentvolumeclaimcondition)`>`|conditions is the current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'Resizing'.| -|`currentVolumeAttributesClassName`|`string`|currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature.| -|`modifyVolumeStatus`|[`ModifyVolumeStatus`](#modifyvolumestatus)|ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is an alpha field and requires enabling VolumeAttributesClass feature.| +|`currentVolumeAttributesClassName`|`string`|currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default).| +|`modifyVolumeStatus`|[`ModifyVolumeStatus`](#modifyvolumestatus)|ModifyVolumeStatus represents the status object of ControllerModifyVolume operation. When this is unset, there is no ModifyVolume operation being attempted. This is a beta field and requires enabling VolumeAttributesClass feature (off by default).| |`phase`|`string`|phase represents the current phase of PersistentVolumeClaim.| ## AWSElasticBlockStoreVolumeSource @@ -6310,6 +6313,356 @@ Represents a host path mapped into a pod. Host path volumes do not support owner |`path`|`string`|path of the directory on the host. If the path is a symlink, it will follow the link to the real path. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath| |`type`|`string`|type for HostPath Volume Defaults to "" More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath| +## ImageVolumeSource + +ImageVolumeSource represents a image volume resource. + +
+Examples with this field (click to open) + +- [`archive-location.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/archive-location.yaml) + +- [`arguments-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/arguments-artifacts.yaml) + +- [`arguments-parameters-from-configmap.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/arguments-parameters-from-configmap.yaml) + +- [`arguments-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/arguments-parameters.yaml) + +- [`artifact-disable-archive.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-disable-archive.yaml) + +- [`artifact-gc-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-gc-workflow.yaml) + +- [`artifact-passing-subpath.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-passing-subpath.yaml) + +- [`artifact-passing.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-passing.yaml) + +- [`artifact-path-placeholders.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-path-placeholders.yaml) + +- [`artifact-repository-ref.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifact-repository-ref.yaml) + +- [`artifactory-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifactory-artifact.yaml) + +- [`artifacts-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/artifacts-workflowtemplate.yaml) + +- [`buildkit-template.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/buildkit-template.yaml) + +- [`ci-output-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/ci-output-artifact.yaml) + +- [`ci-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/ci-workflowtemplate.yaml) + +- [`ci.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/ci.yaml) + +- [`clustertemplates.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/cluster-workflow-template/clustertemplates.yaml) + +- [`coinflip-recursive.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/coinflip-recursive.yaml) + +- [`coinflip.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/coinflip.yaml) + +- [`colored-logs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/colored-logs.yaml) + +- [`conditional-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/conditional-artifacts.yaml) + +- [`conditional-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/conditional-parameters.yaml) + +- [`conditionals-complex.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/conditionals-complex.yaml) + +- [`conditionals.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/conditionals.yaml) + +- [`graph-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/container-set-template/graph-workflow.yaml) + +- [`outputs-result-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/container-set-template/outputs-result-workflow.yaml) + +- [`parallel-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/container-set-template/parallel-workflow.yaml) + +- [`sequence-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/container-set-template/sequence-workflow.yaml) + +- [`workspace-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/container-set-template/workspace-workflow.yaml) + +- [`continue-on-fail.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/continue-on-fail.yaml) + +- [`cron-backfill.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/cron-backfill.yaml) + +- [`cron-when.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/cron-when.yaml) + +- [`cron-workflow-multiple-schedules.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/cron-workflow-multiple-schedules.yaml) + +- [`cron-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/cron-workflow.yaml) + +- [`custom-metrics.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/custom-metrics.yaml) + +- [`daemon-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/daemon-nginx.yaml) + +- [`daemon-step.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/daemon-step.yaml) + +- [`dag-coinflip.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-coinflip.yaml) + +- [`dag-conditional-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-conditional-artifacts.yaml) + +- [`dag-conditional-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-conditional-parameters.yaml) + +- [`dag-continue-on-fail.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-continue-on-fail.yaml) + +- [`dag-custom-metrics.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-custom-metrics.yaml) + +- [`dag-daemon-task.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-daemon-task.yaml) + +- [`dag-diamond-steps.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-diamond-steps.yaml) + +- [`dag-diamond.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-diamond.yaml) + +- [`dag-disable-failFast.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-disable-failFast.yaml) + +- [`dag-enhanced-depends.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-enhanced-depends.yaml) + +- [`dag-inline-clusterworkflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-inline-clusterworkflowtemplate.yaml) + +- [`dag-inline-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-inline-workflow.yaml) + +- [`dag-inline-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-inline-workflowtemplate.yaml) + +- [`dag-multiroot.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-multiroot.yaml) + +- [`dag-nested.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-nested.yaml) + +- [`dag-targets.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-targets.yaml) + +- [`dag-task-level-timeout.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dag-task-level-timeout.yaml) + +- [`data-transformations.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/data-transformations.yaml) + +- [`default-pdb-support.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/default-pdb-support.yaml) + +- [`dns-config.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/dns-config.yaml) + +- [`exit-code-output-variable.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-code-output-variable.yaml) + +- [`exit-handler-dag-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handler-dag-level.yaml) + +- [`exit-handler-slack.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handler-slack.yaml) + +- [`exit-handler-step-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handler-step-level.yaml) + +- [`exit-handler-with-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handler-with-artifacts.yaml) + +- [`exit-handler-with-param.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handler-with-param.yaml) + +- [`exit-handlers.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/exit-handlers.yaml) + +- [`expression-destructure-json-complex.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/expression-destructure-json-complex.yaml) + +- [`expression-destructure-json.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/expression-destructure-json.yaml) + +- [`expression-reusing-verbose-snippets.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/expression-reusing-verbose-snippets.yaml) + +- [`expression-tag-template-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/expression-tag-template-workflow.yaml) + +- [`fibonacci-seq-conditional-param.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/fibonacci-seq-conditional-param.yaml) + +- [`forever.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/forever.yaml) + +- [`fun-with-gifs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/fun-with-gifs.yaml) + +- [`gc-ttl.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/gc-ttl.yaml) + +- [`global-outputs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/global-outputs.yaml) + +- [`global-parameters-from-configmap-referenced-as-local-variable.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/global-parameters-from-configmap-referenced-as-local-variable.yaml) + +- [`global-parameters-from-configmap.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/global-parameters-from-configmap.yaml) + +- [`global-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/global-parameters.yaml) + +- [`handle-large-output-results.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/handle-large-output-results.yaml) + +- [`hdfs-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/hdfs-artifact.yaml) + +- [`hello-hybrid.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/hello-hybrid.yaml) + +- [`hello-windows.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/hello-windows.yaml) + +- [`hello-world.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/hello-world.yaml) + +- [`image-pull-secrets.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/image-pull-secrets.yaml) + +- [`influxdb-ci.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/influxdb-ci.yaml) + +- [`init-container.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/init-container.yaml) + +- [`input-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-azure.yaml) + +- [`input-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-gcs.yaml) + +- [`input-artifact-git.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-git.yaml) + +- [`input-artifact-http.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-http.yaml) + +- [`input-artifact-oss.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-oss.yaml) + +- [`input-artifact-raw.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-raw.yaml) + +- [`input-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/input-artifact-s3.yaml) + +- [`intermediate-parameters.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/intermediate-parameters.yaml) + +- [`k8s-owner-reference.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/k8s-owner-reference.yaml) + +- [`k8s-wait-wf.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/k8s-wait-wf.yaml) + +- [`key-only-artifact.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/key-only-artifact.yaml) + +- [`label-value-from-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/label-value-from-workflow.yaml) + +- [`life-cycle-hooks-tmpl-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/life-cycle-hooks-tmpl-level.yaml) + +- [`life-cycle-hooks-wf-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/life-cycle-hooks-wf-level.yaml) + +- [`loops-arbitrary-sequential-steps.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-arbitrary-sequential-steps.yaml) + +- [`loops-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-dag.yaml) + +- [`loops-maps.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-maps.yaml) + +- [`loops-param-argument.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-param-argument.yaml) + +- [`loops-param-result.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-param-result.yaml) + +- [`loops-sequence.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops-sequence.yaml) + +- [`loops.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/loops.yaml) + +- [`map-reduce.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/map-reduce.yaml) + +- [`nested-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/nested-workflow.yaml) + +- [`node-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/node-selector.yaml) + +- [`output-artifact-azure.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/output-artifact-azure.yaml) + +- [`output-artifact-gcs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/output-artifact-gcs.yaml) + +- [`output-artifact-s3.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/output-artifact-s3.yaml) + +- [`output-parameter.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/output-parameter.yaml) + +- [`parallelism-limit.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parallelism-limit.yaml) + +- [`parallelism-nested-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parallelism-nested-dag.yaml) + +- [`parallelism-nested-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parallelism-nested-workflow.yaml) + +- [`parallelism-nested.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parallelism-nested.yaml) + +- [`parallelism-template-limit.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parallelism-template-limit.yaml) + +- [`parameter-aggregation-dag.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parameter-aggregation-dag.yaml) + +- [`parameter-aggregation-script.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parameter-aggregation-script.yaml) + +- [`parameter-aggregation.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/parameter-aggregation.yaml) + +- [`pod-gc-strategy-with-label-selector.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-gc-strategy-with-label-selector.yaml) + +- [`pod-gc-strategy.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-gc-strategy.yaml) + +- [`pod-metadata-wf-field.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-metadata-wf-field.yaml) + +- [`pod-metadata.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-metadata.yaml) + +- [`pod-spec-from-previous-step.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-spec-from-previous-step.yaml) + +- [`pod-spec-patch-wf-tmpl.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-spec-patch-wf-tmpl.yaml) + +- [`pod-spec-patch.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-spec-patch.yaml) + +- [`pod-spec-yaml-patch.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/pod-spec-yaml-patch.yaml) + +- [`recursive-for-loop.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/recursive-for-loop.yaml) + +- [`resubmit.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/resubmit.yaml) + +- [`retry-backoff.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-backoff.yaml) + +- [`retry-conditional.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-conditional.yaml) + +- [`retry-container-to-completion.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-container-to-completion.yaml) + +- [`retry-container.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-container.yaml) + +- [`retry-on-error.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-on-error.yaml) + +- [`retry-script.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-script.yaml) + +- [`retry-with-steps.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/retry-with-steps.yaml) + +- [`scripts-bash.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/scripts-bash.yaml) + +- [`scripts-javascript.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/scripts-javascript.yaml) + +- [`scripts-python.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/scripts-python.yaml) + +- [`secrets.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/secrets.yaml) + +- [`sidecar-dind.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/sidecar-dind.yaml) + +- [`sidecar-nginx.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/sidecar-nginx.yaml) + +- [`sidecar.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/sidecar.yaml) + +- [`status-reference.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/status-reference.yaml) + +- [`step-level-timeout.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/step-level-timeout.yaml) + +- [`steps-inline-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/steps-inline-workflow.yaml) + +- [`steps.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/steps.yaml) + +- [`suspend-template-outputs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/suspend-template-outputs.yaml) + +- [`suspend-template.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/suspend-template.yaml) + +- [`synchronization-mutex-tmpl-level-legacy.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/synchronization-mutex-tmpl-level-legacy.yaml) + +- [`synchronization-mutex-tmpl-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/synchronization-mutex-tmpl-level.yaml) + +- [`synchronization-mutex-wf-level-legacy.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/synchronization-mutex-wf-level-legacy.yaml) + +- [`synchronization-mutex-wf-level.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/synchronization-mutex-wf-level.yaml) + +- [`template-defaults.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/template-defaults.yaml) + +- [`template-on-exit.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/template-on-exit.yaml) + +- [`timeouts-step.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/timeouts-step.yaml) + +- [`timeouts-workflow.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/timeouts-workflow.yaml) + +- [`title-and-description-with-markdown.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/title-and-description-with-markdown.yaml) + +- [`volumes-emptydir.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/volumes-emptydir.yaml) + +- [`volumes-existing.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/volumes-existing.yaml) + +- [`volumes-pvc.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/volumes-pvc.yaml) + +- [`webhdfs-input-output-artifacts.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/webhdfs-input-output-artifacts.yaml) + +- [`withsequence-nested-result.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/withsequence-nested-result.yaml) + +- [`work-avoidance.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/work-avoidance.yaml) + +- [`event-consumer-workflowtemplate.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/workflow-event-binding/event-consumer-workflowtemplate.yaml) + +- [`templates.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/workflow-template/templates.yaml) + +- [`workflow-archive-logs.yaml`](https://github.com/argoproj/argo-workflows/blob/main/examples/workflow-template/workflow-archive-logs.yaml) +
+ +### Fields +| Field Name | Field Type | Description | +|:----------:|:----------:|---------------| +|`pullPolicy`|`string`|Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise.| +|`reference`|`string`|Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.| + ## ISCSIVolumeSource Represents an ISCSI disk. ISCSI volumes can only be mounted as read/write once. ISCSI volumes support ownership management and SELinux relabeling. @@ -6385,7 +6738,7 @@ Represents a projected volume source | Field Name | Field Type | Description | |:----------:|:----------:|---------------| |`defaultMode`|`integer`|defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.| -|`sources`|`Array<`[`VolumeProjection`](#volumeprojection)`>`|sources is the list of volume projections| +|`sources`|`Array<`[`VolumeProjection`](#volumeprojection)`>`|sources is the list of volume projections. Each entry in this list handles one source.| ## QuobyteVolumeSource @@ -6580,10 +6933,10 @@ LifecycleHandler defines a specific action that should be taken in a lifecycle h ### Fields | Field Name | Field Type | Description | |:----------:|:----------:|---------------| -|`exec`|[`ExecAction`](#execaction)|Exec specifies the action to take.| -|`httpGet`|[`HTTPGetAction`](#httpgetaction)|HTTPGet specifies the http request to perform.| -|`sleep`|[`SleepAction`](#sleepaction)|Sleep represents the duration that the container should sleep before being terminated.| -|`tcpSocket`|[`TCPSocketAction`](#tcpsocketaction)|Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for the backward compatibility. There are no validation of this field and lifecycle hooks will fail in runtime when tcp handler is specified.| +|`exec`|[`ExecAction`](#execaction)|Exec specifies a command to execute in the container.| +|`httpGet`|[`HTTPGetAction`](#httpgetaction)|HTTPGet specifies an HTTP GET request to perform.| +|`sleep`|[`SleepAction`](#sleepaction)|Sleep represents a duration that the container should sleep.| +|`tcpSocket`|[`TCPSocketAction`](#tcpsocketaction)|Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept for backward compatibility. There is no validation of this field and lifecycle hooks will fail at runtime when it is specified.| ## ExecAction @@ -6602,7 +6955,7 @@ ExecAction describes a "run in container" action. ## GRPCAction -_No description available_ +GRPCAction specifies an action involving a GRPC service. ### Fields | Field Name | Field Type | Description | @@ -6653,6 +7006,7 @@ ResourceClaim references one entry in PodSpec.ResourceClaims. | Field Name | Field Type | Description | |:----------:|:----------:|---------------| |`name`|`string`|Name must match the name of one entry in pod.spec.resourceClaims of the Pod where this field is used. It makes that resource available inside a container.| +|`request`|`string`|Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request.| ## Quantity @@ -6719,8 +7073,8 @@ Defines a set of pods (namely those matching the labelSelector relative to the g | Field Name | Field Type | Description | |:----------:|:----------:|---------------| |`labelSelector`|[`LabelSelector`](#labelselector)|A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods.| -|`matchLabelKeys`|`Array< string >`|MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.| -|`mismatchLabelKeys`|`Array< string >`|MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.| +|`matchLabelKeys`|`Array< string >`|MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).| +|`mismatchLabelKeys`|`Array< string >`|MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).| |`namespaceSelector`|[`LabelSelector`](#labelselector)|A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces.| |`namespaces`|`Array< string >`|namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace".| |`topologyKey`|`string`|This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.| @@ -6738,7 +7092,7 @@ TypedLocalObjectReference contains enough information to let you locate the type ## TypedObjectReference -_No description available_ +TypedObjectReference contains enough information to let you locate the typed referenced object ### Fields | Field Name | Field Type | Description | @@ -6795,8 +7149,8 @@ PersistentVolumeClaimCondition contains details about state of pvc |`lastTransitionTime`|[`Time`](#time)|lastTransitionTime is the time the condition transitioned from one status to another.| |`message`|`string`|message is the human-readable message indicating details about last transition.| |`reason`|`string`|reason is a unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports "Resizing" that means the underlying persistent volume is being resized.| -|`status`|`string`|_No description available_| -|`type`|`string`|_No description available_| +|`status`|`string`|Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required| +|`type`|`string`|Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about| ## ModifyVolumeStatus @@ -6843,7 +7197,7 @@ PersistentVolumeClaimTemplate is used to produce PersistentVolumeClaim objects a ## VolumeProjection -Projection that may be projected along with other supported volume types +Projection that may be projected along with other supported volume types. Exactly one of these fields must be set. ### Fields | Field Name | Field Type | Description | diff --git a/docs/releases.md b/docs/releases.md index 76a4f84ece41..8fcd2946298a 100644 --- a/docs/releases.md +++ b/docs/releases.md @@ -38,25 +38,16 @@ Otherwise, we typically release every two weeks: * Patch fixes for the current stable version. * The next RC, if we are currently in a release cycle. -## Kubernetes Compatibility Matrix +## Tested Versions -| Argo Workflows \ Kubernetes | 1.28 | 1.29 | 1.30 | 1.31 | -|-----------------------------|------|------|------|------| -| **main** | `✓` | `✓` | `✓` | `✓` | -| **3.6** | `✓` | `✓` | `✓` | `✓` | -| **3.5** | `✓` | `✓` | `✓` | `?` | -| **3.4** | `?` | `?` | `?` | `?` | +--8<-- "docs/tested-kubernetes-versions.md" -* `✓` Fully supported versions. -* `?` Due to breaking changes might not work. Also, we haven't thoroughly tested against this version. -* `✕` Unsupported versions. +Use the version selector to view the tested Kubernetes versions for a specific Argo Workflows version. ### Notes on Compatibility -Argo versions may be compatible with newer and older Kubernetes versions (indicated by `?`), but only three minor versions are tested unless otherwise noted. +Argo versions may be compatible with newer and older Kubernetes versions, but only two minor versions are tested. Note that Kubernetes [is backward compatible with clients](https://github.com/kubernetes/client-go/tree/aa7909e7d7c0661792ba21b9e882f3cd6ad0ce53?tab=readme-ov-file#compatibility-client-go---kubernetes-clusters), so newer k8s versions are generally supported. The caveats with newer k8s versions are possible changes to experimental APIs and unused new features. Argo uses stable Kubernetes APIs such as Pods and ConfigMaps; see the Controller and Server RBAC of your [installation](installation.md) for a full list. - -The `main` branch is currently [tested on Kubernetes 1.28](https://github.com/argoproj/argo-workflows/blob/main/.github/workflows/ci-build.yaml#L228) and [1.31](https://github.com/argoproj/argo-workflows/blob/main/.github/workflows/ci-build.yaml#L263). diff --git a/docs/scaling.md b/docs/scaling.md index 0b3d4130c089..822e859898c6 100644 --- a/docs/scaling.md +++ b/docs/scaling.md @@ -5,10 +5,7 @@ For running large workflows, you'll typically need to scale the controller to ma ## Horizontally Scaling You cannot horizontally scale the controller. - -> v3.0 and after - -As of v3.0, the controller supports having a hot-standby for [High Availability](high-availability.md#workflow-controller). +The controller supports having a hot-standby for [High Availability](high-availability.md#workflow-controller). ## Vertically Scaling @@ -32,7 +29,8 @@ If you have sufficient CPU cores, you can take advantage of them with more gorou ### K8S API Client Side Rate Limiting -The K8S client library rate limits the messages that can go out. +The Kubernetes client library used by the Workflow Controller rate limits the number of API requests that can be sent to the Kubernetes API server. +This rate limiting helps prevent overwhelming the API server with too many requests at once. If you frequently see messages similar to this in the Controller log (issued by the library): @@ -46,9 +44,34 @@ Or, in >= v3.5, if you see warnings similar to this (could be any CR, not just ` Waited for 7.090296384s, request:GET:https://10.100.0.1:443/apis/argoproj.io/v1alpha1/namespaces/argo/workflowtemplates/s2t ``` -Then, if your K8S API Server can handle more requests: +These messages indicate that the Controller is being throttled by the client-side rate limiting. + +#### Adjusting Rate Limiting + +By using cluster-wide observability tooling, you can determine whether or not your Kubernetes API server can handle more requests. +You can increase the rate limits by adjusting the `--qps` and `--burst` arguments for the Controller: + +- `--qps`: This argument sets the average number of queries per second allowed by the Kubernetes client. +The default value is 20. +- `--burst`: This argument sets the number of queries per second the client can send before it starts enforcing the qps limit. +The default value is 30. Typically, burst should be greater than qps. + +By increasing these values, you can allow the Controller to send more requests to the API server, reducing the likelihood of throttling. + +##### Example Configuration -- Increase both `--qps` and `--burst` arguments for the Controller. The `qps` value indicates the average number of queries per second allowed by the K8S Client. The `burst` value is the number of queries/sec the Client receives before it starts enforcing `qps`, so typically `burst` > `qps`. If not set, the default values are `qps=20` and `burst=30` (as of v3.5 (refer to `cmd/workflow-controller/main.go` in case the values change)). +To increase the rate limits, you might set the arguments as follows: + +```yaml +args: + - --qps=50 + - --burst=75 +``` + +This configuration allows the Controller to send an average of 50 queries per second, with a burst capacity of 75 queries per second before throttling is enforced. + +It is important to note that increasing these values can increase the load on the Kubernetes API server and that you must observe your Kubernetes API under load in order to determine whether or not the values you have chosen are correct for your needs. +It is not possible to provide a one-size-fits-all recommendation for these values. ## Sharding @@ -73,8 +96,6 @@ data: instanceID: i1 ``` -> v2.9 and after - You may need to pass the instance ID to the CLI: ```bash diff --git a/docs/tested-kubernetes-versions.md b/docs/tested-kubernetes-versions.md new file mode 100644 index 000000000000..7c966e2f1983 --- /dev/null +++ b/docs/tested-kubernetes-versions.md @@ -0,0 +1 @@ +This version is tested under Kubernetes v1.28.13 and v1.31.0. \ No newline at end of file diff --git a/go.mod b/go.mod index 855e9a89b4da..249d35292373 100644 --- a/go.mod +++ b/go.mod @@ -1,110 +1,121 @@ module github.com/argoproj/argo-workflows/v3 -go 1.23.1 +go 1.24.2 require ( - cloud.google.com/go/storage v1.36.0 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 + cloud.google.com/go/storage v1.50.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.3.2 github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible - github.com/Masterminds/sprig/v3 v3.2.3 + github.com/Masterminds/sprig/v3 v3.3.0 github.com/TwiN/go-color v1.4.1 github.com/alibabacloud-go/tea v1.2.1 github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible github.com/aliyun/credentials-go v1.3.2 - github.com/argoproj/argo-events v1.9.1 + github.com/argoproj/argo-events v1.9.6 github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117 github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9 github.com/colinmarc/hdfs/v2 v2.4.0 github.com/coreos/go-oidc/v3 v3.9.0 github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 github.com/evanphx/json-patch v5.8.0+incompatible - github.com/expr-lang/expr v1.16.9 + github.com/expr-lang/expr v1.17.0 github.com/gavv/httpexpect/v2 v2.16.0 - github.com/go-git/go-git/v5 v5.13.1 - github.com/go-jose/go-jose/v3 v3.0.3 - github.com/go-openapi/jsonreference v0.20.4 + github.com/go-git/go-git/v5 v5.14.0 + github.com/go-jose/go-jose/v3 v3.0.4 + github.com/go-openapi/jsonreference v0.21.0 github.com/go-sql-driver/mysql v1.7.1 github.com/gogo/protobuf v1.3.2 github.com/golang/protobuf v1.5.4 - github.com/google/go-containerregistry v0.17.0 - github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220720195016-31786c6cbb82 + github.com/google/go-containerregistry v0.20.2 + github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20241111191718-6bce25ecf029 github.com/gorilla/handlers v1.5.2 github.com/gorilla/websocket v1.5.1 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/grpc-ecosystem/grpc-gateway v1.16.0 - github.com/itchyny/gojq v0.12.14 + github.com/itchyny/gojq v0.12.17 github.com/jcmturner/gokrb5/v8 v8.4.4 github.com/klauspost/pgzip v1.2.6 - github.com/minio/minio-go/v7 v7.0.77 - github.com/prometheus/client_golang v1.19.0 - github.com/prometheus/common v0.48.0 + github.com/minio/minio-go/v7 v7.0.89 + github.com/prometheus/client_golang v1.21.1 + github.com/prometheus/common v0.62.0 github.com/robfig/cron/v3 v3.0.1 github.com/sethvargo/go-limiter v0.7.2 github.com/sirupsen/logrus v1.9.3 github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 github.com/soheilhy/cmux v0.1.5 - github.com/spf13/cobra v1.8.1 - github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.18.2 + github.com/spf13/cobra v1.9.1 + github.com/spf13/pflag v1.0.6 + github.com/spf13/viper v1.20.0 github.com/stretchr/testify v1.10.0 - github.com/tidwall/gjson v1.17.0 + github.com/tidwall/gjson v1.18.0 github.com/upper/db/v4 v4.7.0 github.com/valyala/fasttemplate v1.2.2 github.com/xeipuuv/gojsonschema v1.2.0 go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0 - go.opentelemetry.io/otel v1.23.0 + go.opentelemetry.io/otel v1.34.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0 go.opentelemetry.io/otel/exporters/prometheus v0.45.1 - go.opentelemetry.io/otel/metric v1.23.0 - go.opentelemetry.io/otel/sdk v1.23.0 - go.opentelemetry.io/otel/sdk/metric v1.23.0 - golang.org/x/crypto v0.31.0 + go.opentelemetry.io/otel/metric v1.34.0 + go.opentelemetry.io/otel/sdk v1.34.0 + go.opentelemetry.io/otel/sdk/metric v1.34.0 + golang.org/x/crypto v0.36.0 golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/oauth2 v0.21.0 - golang.org/x/sync v0.10.0 - golang.org/x/sys v0.28.0 - golang.org/x/term v0.27.0 - golang.org/x/time v0.5.0 - google.golang.org/api v0.163.0 - google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe - google.golang.org/grpc v1.61.0 + golang.org/x/oauth2 v0.28.0 + golang.org/x/sync v0.12.0 + golang.org/x/time v0.11.0 + google.golang.org/api v0.228.0 + google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb + google.golang.org/grpc v1.71.1 gopkg.in/go-playground/webhooks.v5 v5.17.0 - k8s.io/api v0.30.3 - k8s.io/apimachinery v0.30.3 - k8s.io/cli-runtime v0.30.3 - k8s.io/client-go v0.30.3 - k8s.io/gengo v0.0.0-20220902162205-c0856e24416d - k8s.io/klog/v2 v2.120.1 - k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 - k8s.io/kubectl v0.30.3 - k8s.io/utils v0.0.0-20230726121419-3b25d923346b + k8s.io/api v0.32.2 + k8s.io/apimachinery v0.32.2 + k8s.io/cli-runtime v0.32.2 + k8s.io/client-go v0.32.2 + k8s.io/gengo v0.0.0-20250207200755-1244d31929d7 + k8s.io/klog/v2 v2.130.1 + k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff + k8s.io/kubectl v0.32.2 + k8s.io/utils v0.0.0-20241210054802-24370beab758 sigs.k8s.io/yaml v1.4.0 zombiezen.com/go/sqlite v1.2.0 ) require ( - dario.cat/mergo v1.0.0 // indirect + cel.dev/expr v0.19.2 // indirect + cloud.google.com/go/auth v0.15.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/monitoring v1.24.0 // indirect + dario.cat/mergo v1.0.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect github.com/TylerBrock/colorjson v0.0.0-20200706003622-8a50f05110d2 // indirect github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.2 // indirect - github.com/cenkalti/backoff/v4 v4.2.1 // indirect - github.com/cloudflare/circl v1.3.7 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 // indirect + github.com/blang/semver/v4 v4.0.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cloudflare/circl v1.6.0 // indirect + github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect - github.com/cyphar/filepath-securejoin v0.3.6 // indirect - github.com/distribution/reference v0.5.0 // indirect + github.com/cyphar/filepath-securejoin v0.4.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/evilmonkeyinc/jsonpath v0.8.1 // indirect - github.com/fatih/color v1.15.0 // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fxamacker/cbor/v2 v2.7.0 // indirect github.com/go-ini/ini v1.67.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/gobwas/glob v0.2.4-0.20181002190808-e7a84e9525fe // indirect - github.com/goccy/go-json v0.10.3 // indirect - github.com/golang-jwt/jwt/v5 v5.2.1 // indirect - github.com/google/gnostic-models v0.6.8 // indirect - github.com/google/s2a-go v0.1.7 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/golang-jwt/jwt/v5 v5.2.2 // indirect + github.com/google/gnostic-models v0.6.9 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/pgconn v1.14.3 // indirect github.com/jackc/pgio v1.0.0 // indirect @@ -119,144 +130,140 @@ require ( github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/minio/crc64nvme v1.0.1 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect - github.com/pjbgf/sha1cd v0.3.0 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/prometheus/client_model v0.6.0 // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/prometheus/client_model v0.6.1 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect - github.com/sagikazarmark/locafero v0.4.0 // indirect - github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/sagikazarmark/locafero v0.7.0 // indirect github.com/segmentio/fasthash v1.0.3 // indirect - github.com/skeema/knownhosts v1.3.0 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/vbatts/tar-split v0.11.3 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 // indirect - go.opentelemetry.io/otel/trace v1.23.0 // indirect - go.opentelemetry.io/proto/otlp v1.1.0 // indirect + github.com/x448/float16 v0.8.4 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.34.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect + go.opentelemetry.io/otel/trace v1.34.0 // indirect + go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/mod v0.19.0 // indirect - golang.org/x/text v0.21.0 // indirect - golang.org/x/tools v0.23.0 // indirect - google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe // indirect + google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect + gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect modernc.org/libc v1.41.0 // indirect modernc.org/mathutil v1.6.0 // indirect modernc.org/memory v1.7.2 // indirect modernc.org/sqlite v1.29.1 // indirect - sigs.k8s.io/kustomize/kustomize/v5 v5.0.4-0.20230601165947-6ce0bf390ce3 // indirect + sigs.k8s.io/kustomize/kustomize/v5 v5.5.0 // indirect + sigs.k8s.io/randfill v1.0.0 // indirect ) require ( - cloud.google.com/go v0.112.0 // indirect - cloud.google.com/go/compute/metadata v0.3.0 // indirect - cloud.google.com/go/iam v1.1.5 // indirect - github.com/Azure/azure-sdk-for-go v66.0.0+incompatible // indirect - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 - github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 // indirect - github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + cloud.google.com/go v0.119.0 // indirect + cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/iam v1.4.1 // indirect + github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 + github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect - github.com/Azure/go-autorest/autorest v0.11.28 // indirect - github.com/Azure/go-autorest/autorest/adal v0.9.21 // indirect - github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 // indirect - github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 // indirect + github.com/Azure/go-autorest/autorest v0.11.29 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.23 // indirect + github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 // indirect + github.com/Azure/go-autorest/autorest/azure/cli v0.4.6 // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.4.0 // indirect github.com/MakeNowJust/heredoc v1.0.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.2.0 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/ProtonMail/go-crypto v1.1.3 // indirect + github.com/Masterminds/semver/v3 v3.3.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.1.5 // indirect github.com/ajg/form v1.5.1 // indirect github.com/andybalholm/brotli v1.0.4 // indirect github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71 // indirect - github.com/aws/aws-sdk-go-v2 v1.30.1 // indirect - github.com/aws/aws-sdk-go-v2/config v1.27.23 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.17.24 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.9 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.13 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.13 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect - github.com/aws/aws-sdk-go-v2/service/ecr v1.17.8 // indirect - github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.13.8 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.15 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.22.1 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.30.1 // indirect - github.com/aws/smithy-go v1.20.3 // indirect - github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220706184558-ce46abcd012b // indirect + github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect + github.com/aws/aws-sdk-go-v2/config v1.29.9 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.62 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ecr v1.36.7 // indirect + github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.27.7 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 // indirect + github.com/aws/smithy-go v1.22.2 // indirect + github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20241209220728-69e8c24e6fc1 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/chai2010/gettext-go v1.0.2 // indirect - github.com/chrismellard/docker-credential-acr-env v0.0.0-20220327082430-c57b701bfc08 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect + github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect github.com/creack/pty v1.1.21 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/daviddengcn/go-colortext v1.0.0 // indirect github.com/dimchansky/utfbom v1.1.1 // indirect - github.com/docker/cli v24.0.7+incompatible // indirect + github.com/docker/cli v27.1.1+incompatible // indirect github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/docker v27.1.1+incompatible // indirect - github.com/docker/docker-credential-helpers v0.7.0 // indirect + github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/emicklei/go-restful/v3 v3.11.0 // indirect github.com/emirpasic/gods v1.18.1 // indirect - github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect + github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f // indirect github.com/fatih/camelcase v1.0.0 // indirect github.com/fatih/structs v1.1.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect - github.com/fvbommel/sortorder v1.1.0 // indirect + github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/go-errors/errors v1.4.2 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect - github.com/go-git/go-billy/v5 v5.6.1 // indirect - github.com/go-logr/logr v1.4.1 // indirect - github.com/go-openapi/jsonpointer v0.20.2 // indirect - github.com/go-openapi/swag v0.22.6 // indirect - github.com/golang-jwt/jwt/v4 v4.5.1 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/go-git/go-billy/v5 v5.6.2 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/golang-jwt/jwt/v4 v4.5.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/mock v1.6.0 github.com/google/btree v1.0.1 // indirect - github.com/google/go-cmp v0.6.0 // indirect - github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220719135131-f79ec2192282 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20230516205744-dbecb1de8cfa // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/uuid v1.6.0 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.3.3 // indirect - github.com/imdario/mergo v0.3.15 // indirect + github.com/huandu/xstrings v1.5.0 // indirect github.com/imkira/go-interpol v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/itchyny/timefmt-go v0.1.5 // indirect + github.com/itchyny/timefmt-go v0.1.6 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jcmturner/gofork v1.7.6 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/jonboulle/clockwork v0.2.2 // indirect + github.com/jonboulle/clockwork v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/klauspost/compress v1.17.9 // indirect - github.com/klauspost/cpuid/v2 v2.2.8 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/lithammer/dedent v1.1.0 // indirect - github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/minio/md5-simd v1.1.2 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/moby/spdystream v0.2.0 // indirect - github.com/moby/term v0.0.0-20221205130635-1aeaba878587 // indirect + github.com/moby/spdystream v0.5.0 // indirect + github.com/moby/term v0.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect @@ -265,18 +272,18 @@ require ( github.com/onsi/ginkgo v1.16.5 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc3 // indirect - github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/prometheus/procfs v0.12.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect github.com/rs/xid v1.6.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sanity-io/litter v1.5.5 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect - github.com/shopspring/decimal v1.2.0 // indirect - github.com/spf13/afero v1.11.0 // indirect - github.com/spf13/cast v1.6.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + github.com/spf13/afero v1.12.0 // indirect + github.com/spf13/cast v1.7.1 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -290,21 +297,21 @@ require ( github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 // indirect github.com/yudai/gojsondiff v1.0.0 // indirect github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 // indirect - go.opencensus.io v0.24.0 // indirect - go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect - golang.org/x/net v0.33.0 // indirect - google.golang.org/protobuf v1.34.2 // indirect + golang.org/x/net v0.37.0 // indirect + golang.org/x/sys v0.31.0 + golang.org/x/term v0.30.0 + golang.org/x/text v0.23.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/component-base v0.30.3 // indirect - k8s.io/component-helpers v0.30.3 // indirect - k8s.io/metrics v0.30.3 // indirect + k8s.io/component-base v0.32.2 // indirect + k8s.io/component-helpers v0.32.2 // indirect + k8s.io/metrics v0.32.2 // indirect moul.io/http2curl/v2 v2.3.0 // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd - sigs.k8s.io/kustomize/api v0.13.5-0.20230601165947-6ce0bf390ce3 // indirect - sigs.k8s.io/kustomize/kyaml v0.14.3-0.20230601165947-6ce0bf390ce3 // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect + sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 + sigs.k8s.io/kustomize/api v0.18.0 // indirect + sigs.k8s.io/kustomize/kyaml v0.18.1 // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.6.0 // indirect ) diff --git a/go.sum b/go.sum index 480e62ebde02..55aabc5a1701 100644 --- a/go.sum +++ b/go.sum @@ -1,48 +1,66 @@ +cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= +cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= -cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= -cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc= -cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= -cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= -cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= -cloud.google.com/go/storage v1.36.0 h1:P0mOkAcaJxhCTvAkMhxMfrTKiNcub4YmmPBtlhAyTr8= -cloud.google.com/go/storage v1.36.0/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8= -dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= -dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +cloud.google.com/go v0.119.0 h1:tw7OjErMzJKbbjaEHkrt60KQrK5Wus/boCZ7tm5/RNE= +cloud.google.com/go v0.119.0/go.mod h1:fwB8QLzTcNevxqi8dcpR+hoMIs3jBherGS9VUBDAW08= +cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= +cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= +cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/iam v1.4.1 h1:cFC25Nv+u5BkTR/BT1tXdoF2daiVbZ1RLx2eqfQ9RMM= +cloud.google.com/go/iam v1.4.1/go.mod h1:2vUEJpUG3Q9p2UdsyksaKpDzlwOrnMzS30isdReIcLM= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= +cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= +cloud.google.com/go/longrunning v0.6.4 h1:3tyw9rO3E2XVXzSApn1gyEEnH2K9SynNQjMlBi3uHLg= +cloud.google.com/go/longrunning v0.6.4/go.mod h1:ttZpLCe6e7EXvn9OxpBRx7kZEB0efv8yBO6YnVMfhJs= +cloud.google.com/go/monitoring v1.24.0 h1:csSKiCJ+WVRgNkRzzz3BPoGjFhjPY23ZTcaenToJxMM= +cloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc= +cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= +cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= +cloud.google.com/go/trace v1.11.3 h1:c+I4YFjxRQjvAhRmSsmjpASUKq88chOX854ied0K/pE= +cloud.google.com/go/trace v1.11.3/go.mod h1:pt7zCYiDSQjC9Y2oqCsh9jF4GStB/hmjrYLsxRR27q8= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/azure-sdk-for-go v66.0.0+incompatible h1:bmmC38SlE8/E81nNADlgmVGurPWMHDX2YNXVQMrBpEE= -github.com/Azure/azure-sdk-for-go v66.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 h1:E+OJmp2tPvt1W+amx48v1eqbjDYsgN+RzP4q16yV5eM= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1/go.mod h1:a6xsAQUZg+VsS3TJ05SRp524Hs4pZ/AeFSr5ENf0Yjo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ= github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 h1:U2rTu3Ef+7w9FHKIAXM6ZyqF3UOWJZ12zIm8zECAFfg= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2/go.mod h1:SqINnQ9lVVdRlyC8cd1lCI0SdX4n2paeABd2K8ggfnE= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 h1:jBQA3cKT4L2rWMpgE7Yt3Hwh2aUj8KXjIGLxjHeYNNo= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0 h1:AifHbc4mg0x9zW52WOpKbsHaDKuRhlI7TVl47thgQ70= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0/go.mod h1:T5RfihdXtBDxt1Ch2wobif3TvzTdumDy29kahv6AV9A= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.3.2 h1:YUUxeiOWgdAQE3pXt2H7QXzZs0q8UBjgRbl56qo8GYM= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.3.2/go.mod h1:dmXQgZuiSubAecswZE+Sm8jkvEa7kQgTPVRvwL/nd0E= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsCswgme4iYf6rfSKUDzbCc= -github.com/Azure/go-autorest/autorest v0.11.28 h1:ndAExarwr5Y+GaHE6VCaY1kyS/HwwGGyuimVhWsHOEM= -github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= +github.com/Azure/go-autorest/autorest v0.11.29 h1:I4+HL/JDvErx2LjyzaVxllw2lRDB5/BT2Bm4g20iqYw= +github.com/Azure/go-autorest/autorest v0.11.29/go.mod h1:ZtEzC4Jy2JDrZLxvWs8LrBWEBycl1hbT1eknI8MtfAs= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.21 h1:jjQnVFXPfekaqb8vIsv2G1lxshoW+oGv4MDlhRtnYZk= -github.com/Azure/go-autorest/autorest/adal v0.9.21/go.mod h1:zua7mBUaCc5YnSLKYgGJR/w5ePdMDA6H56upLsHzA9U= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 h1:P6bYXFoao05z5uhOQzbC3Qd8JqF3jUoocoTeIxkp2cA= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.11/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 h1:0W/yGmFdTIT77fvdlGZ0LMISoLHFJ7Tx4U0yeB+uFs4= +github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= +github.com/Azure/go-autorest/autorest/adal v0.9.23 h1:Yepx8CvFxwNKpH6ja7RZ+sKX+DWYNldbLiALMC3BTz8= +github.com/Azure/go-autorest/autorest/adal v0.9.23/go.mod h1:5pcMqFkdPhviJdlEy3kC/v1ZLnQl0MH6XA5YCcMhy4c= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 h1:wkAZRgT/pn8HhFyzfe9UnqOjJYqlembgCTi72Bm/xKk= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.12/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= github.com/Azure/go-autorest/autorest/azure/cli v0.4.5/go.mod h1:ADQAXrkgm7acgWVUNamOgh8YNrv4p27l3Wc55oVfpzg= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.6 h1:w77/uPk80ZET2F+AfQExZyEWtn+0Rk/uw17m9fv5Ajc= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.6/go.mod h1:piCfgPho7BiIDdEQ1+g4VmKyD5y+p/XtSNqE6Hc4QD0= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= @@ -57,11 +75,21 @@ github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZ github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.0 h1:MUkXAnvvDHgvPItl0nBj0hgk0f7hnnQbGm0h0+YxbN4= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 h1:5IT7xOdq17MtcdtL/vtl6mGfzhaq4m4vpollPRmlsBQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0/go.mod h1:ZV4VOm0/eHR06JLrXWe09068dHpr3TRpY9Uo7T+anuA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0 h1:nNMpRpnkWDAaqcpxMJvxa/Ud98gjbYwayJY4/9bdjiU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 h1:ig/FpDD2JofP/NExKQUbn7uOSZzJAQqogfqluZK4ed4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1:1G1pk05UrOh0NlF1oeaaix1x8XzrfjIDK47TY0Zehcw= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= @@ -69,16 +97,16 @@ github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6 github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= -github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= +github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= +github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= -github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= -github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4= +github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/TwiN/go-color v1.4.1 h1:mqG0P/KBgHKVqmtL5ye7K0/Gr4l6hTksPgTgMk3mUzc= @@ -102,82 +130,83 @@ github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHG github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/argoproj/argo-events v1.9.1 h1:X7Sp8Xrj6OlUtrHJyJLOt6flzRwOAsKUifnf/sLJDac= -github.com/argoproj/argo-events v1.9.1/go.mod h1:yPwsLeU/Vp9nAEd4OBT8fOMEbIrmuvC4SIIqx5uJnxY= +github.com/argoproj/argo-events v1.9.6 h1:tQTyUmMt0/4UI+9fbXrmK1/h9oalV7KBCC3YgPI7qz0= +github.com/argoproj/argo-events v1.9.6/go.mod h1:MkJI9UXTLnLOFX6LKo0rC1tnvWfLFzKkGigsdfu58SA= github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117 h1:iOmb5RDUnQ80ZLaBYCbfgNxMJ7qC0boM267nlzMyFjo= github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117/go.mod h1:mwXbiH0ojJzbstR8XV9Ha/dK4IHHTKfgkQi2Kz8Aq0Y= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71 h1:m3N1Fv5vE5IcxuTOGFGGV0grrVFHV8UY2SV0wSBXAC8= github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71/go.mod h1:/ynarkO/43wP/JM2Okn61e8WFMtdbtA8he7GJxW+SFM= -github.com/aws/aws-sdk-go-v2 v1.16.7/go.mod h1:6CpKuLXg2w7If3ABZCl/qZ6rEgwtjZTn4eAf4RcEyuw= -github.com/aws/aws-sdk-go-v2 v1.30.1 h1:4y/5Dvfrhd1MxRDD77SrfsDaj8kUkkljU7XE83NPV+o= github.com/aws/aws-sdk-go-v2 v1.30.1/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc= -github.com/aws/aws-sdk-go-v2/config v1.15.13/go.mod h1:AcMu50uhV6wMBUlURnEXhr9b3fX6FLSTlEV89krTEGk= -github.com/aws/aws-sdk-go-v2/config v1.27.23 h1:Cr/gJEa9NAS7CDAjbnB7tHYb3aLZI2gVggfmSAasDac= +github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= +github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= github.com/aws/aws-sdk-go-v2/config v1.27.23/go.mod h1:WMMYHqLCFu5LH05mFOF5tsq1PGEMfKbu083VKqLCd0o= -github.com/aws/aws-sdk-go-v2/credentials v1.12.8/go.mod h1:P2Hd4Sy7mXRxPNcQMPBmqszSJoDXexX8XEDaT6lucO0= +github.com/aws/aws-sdk-go-v2/config v1.29.9 h1:Kg+fAYNaJeGXp1vmjtidss8O2uXIsXwaRqsQJKXVr+0= +github.com/aws/aws-sdk-go-v2/config v1.29.9/go.mod h1:oU3jj2O53kgOU4TXq/yipt6ryiooYjlkqqVaZk7gY/U= github.com/aws/aws-sdk-go-v2/credentials v1.17.23/go.mod h1:V/DvSURn6kKgcuKEk4qwSwb/fZ2d++FFARtWSbXnLqY= -github.com/aws/aws-sdk-go-v2/credentials v1.17.24 h1:YclAsrnb1/GTQNt2nzv+756Iw4mF8AOzcDfweWwwm/M= github.com/aws/aws-sdk-go-v2/credentials v1.17.24/go.mod h1:Hld7tmnAkoBQdTMNYZGzztzKRdA4fCdn9L83LOoigac= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.8/go.mod h1:oL1Q3KuCq1D4NykQnIvtRiBGLUXhcpY5pl6QZB2XEPU= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.9 h1:Aznqksmd6Rfv2HQN9cpqIV/lQRMaIpJkLLaJ1ZI76no= +github.com/aws/aws-sdk-go-v2/credentials v1.17.62 h1:fvtQY3zFzYJ9CfixuAQ96IxDrBajbBWGqjNTCa79ocU= +github.com/aws/aws-sdk-go-v2/credentials v1.17.62/go.mod h1:ElETBxIQqcxej++Cs8GyPBbgMys5DgQPTwo7cUPDKt8= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.9/go.mod h1:WQr3MY7AxGNxaqAtsDWn+fBxmd4XvLkzeqQ8P1VM0/w= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.14/go.mod h1:kdjrMwHwrC3+FsKhNcCMJ7tUVj/8uSD5CZXeQ4wV6fM= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.13 h1:5SAoZ4jYpGH4721ZNoS1znQrhOfZinOhc4XuTXx/nVc= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.13/go.mod h1:+rdA6ZLpaSeM7tSg/B0IEDinCIBJGmW8rKDFkYpP04g= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.8/go.mod h1:ZIV8GYoC6WLBW5KGs+o4rsc65/ozd+eQ0L31XF5VDwk= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.13 h1:WIijqeaAO7TYFLbhsZmi2rgLEAtWOC1LhxCAVTJlSKw= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.13/go.mod h1:i+kbfa76PQbWw/ULoWnp51EYVWH4ENln76fLQE3lXT8= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.15/go.mod h1:Tkrthp/0sNBShQQsamR7j/zY4p19tVTAs+nnqhH6R3c= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= -github.com/aws/aws-sdk-go-v2/service/ecr v1.17.8 h1:wgZo/yeY0f+2RWy2q1rTtZSPMmq37Zy3pY4QypHeurg= -github.com/aws/aws-sdk-go-v2/service/ecr v1.17.8/go.mod h1:ItZADKTnGxqcqXABHyNpoBljQ8ORt4h+D39RToM/3Ds= -github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.13.8 h1:uByYzUJNBrI4LN0H+HMA7yrDWQxe2f9cF7ZkiXltXRo= -github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.13.8/go.mod h1:nPSH6Ebmb3OkKl7+CLSjx+SMBaoFKbOe9mZhTAd352k= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= +github.com/aws/aws-sdk-go-v2/service/ecr v1.36.7 h1:R+5XKIJga2K9Dkj0/iQ6fD/MBGo02oxGGFTc512lK/Q= +github.com/aws/aws-sdk-go-v2/service/ecr v1.36.7/go.mod h1:fDPQV/6ONOQOjvtKhtypIy1wcGLcKYtoK/lvZ9fyDGQ= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.27.7 h1:eddAe+FnCCLECUKHUHLM9/iAlqDkyRjZk/84+eo0ToE= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.27.7/go.mod h1:lik3qEJ4TBnQrEKoZnFh8E8nn5oZSebnB/pydq8oQRQ= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3/go.mod h1:GlAeCkHwugxdHaueRr4nhPuY+WW+gR8UjlcqzPr1SPI= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.8/go.mod h1:rDVhIMAX9N2r8nWxDUlbubvvaFMnfsm+3jAV7q+rpM4= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.15 h1:I9zMeF107l0rJrpnHpjEiiTSCKYAIw8mALiXcPsGBiA= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.15/go.mod h1:9xWJ3Q/S6Ojusz1UIkfycgD1mGirJfLLKqq3LPT7WN8= -github.com/aws/aws-sdk-go-v2/service/sso v1.11.11/go.mod h1:MO4qguFjs3wPGcCSpQ7kOFTwRvb+eu+fn+1vKleGHUk= -github.com/aws/aws-sdk-go-v2/service/sso v1.22.1 h1:p1GahKIjyMDZtiKoIn0/jAj/TkMzfzndDv5+zi2Mhgc= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= github.com/aws/aws-sdk-go-v2/service/sso v1.22.1/go.mod h1:/vWdhoIoYA5hYoPZ6fm7Sv4d8701PiG5VKe8/pPJL60= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 h1:8JdC7Gr9NROg1Rusk25IcZeTO59zLxsKgE0gkh5O6h0= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.1/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.1/go.mod h1:xyFHA4zGxgYkdD73VeezHt3vSKEG9EmFnGwoKlP00u4= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.2 h1:ORnrOK0C4WmYV/uYt3koHEWBLYsRDwk2Np+eEoyV4Z0= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.2/go.mod h1:xyFHA4zGxgYkdD73VeezHt3vSKEG9EmFnGwoKlP00u4= -github.com/aws/aws-sdk-go-v2/service/sts v1.16.9/go.mod h1:O1IvkYxr+39hRf960Us6j0x1P8pDqhTX+oXM5kQNl/Y= -github.com/aws/aws-sdk-go-v2/service/sts v1.30.1 h1:+woJ607dllHJQtsnJLi52ycuqHMwlW+Wqm2Ppsfp4nQ= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 h1:KwuLovgQPcdjNMfFt9OhUd9a2OwcOKhxfvF4glTzLuA= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= github.com/aws/aws-sdk-go-v2/service/sts v1.30.1/go.mod h1:jiNR3JqT15Dm+QWq2SRgh0x0bCNSRP2L25+CqPNpJlQ= -github.com/aws/smithy-go v1.12.0/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= -github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 h1:PZV5W8yk4OtH1JAuhV2PXwwO9v5G5Aoj+eMCn4T+1Kc= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= -github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220706184558-ce46abcd012b h1:+I25t8HCatBZtvU9bFugfi8Y2zCpKUVcTRaeC0oOHOk= -github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20220706184558-ce46abcd012b/go.mod h1:wHkLB7jZX+7D2RArMnwuFMvrLENsgd6zrwBEJo863aQ= +github.com/aws/smithy-go v1.22.2 h1:6D9hW43xKFrRx/tXXfAlIZc4JI+yQe6snnWcQyxSyLQ= +github.com/aws/smithy-go v1.22.2/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20241209220728-69e8c24e6fc1 h1:zIZZLt8m2JBdKXMrb/0V97ygUE1H3IlVo3CzVNHAwbg= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20241209220728-69e8c24e6fc1/go.mod h1:FQkFAykCmZPtLktIiegZowcORDKY3tsTSQoMrNMHF9g= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= +github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9 h1:mV+hh0rMjzrhg7Jc/GKwpa+y/0BMHGOHdM9yY1GYyFI= github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9/go.mod h1:nDeXEIaeDV+mAK1gBD3/RJH67DYPC0GdaznWN7sB07s= github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= -github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= -github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chai2010/gettext-go v1.0.2 h1:1Lwwip6Q2QGsAdl/ZKPCwTe9fe0CjlUbqj5bFNSjIRk= github.com/chai2010/gettext-go v1.0.2/go.mod h1:y+wnP2cHYaVj19NZhYKAwEMH2CI1gNHeQQ+5AjwawxA= -github.com/chrismellard/docker-credential-acr-env v0.0.0-20220327082430-c57b701bfc08 h1:9Qh4lJ/KMr5iS1zfZ8I97+3MDpiKjl+0lZVUNBhdvRs= -github.com/chrismellard/docker-credential-acr-env v0.0.0-20220327082430-c57b701bfc08/go.mod h1:MAuu1uDJNOS3T3ui0qmKdPUwm59+bO19BbTph2wZafE= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 h1:krfRl01rzPzxSxyLyrChD+U+MzsBXbm0OwYYB67uF+4= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589/go.mod h1:OuDyvmLnMCwa2ep4Jkm6nyA0ocJuZlGyk2gGseVzERM= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= -github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= +github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk= +github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= -github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/colinmarc/hdfs/v2 v2.4.0 h1:v6R8oBx/Wu9fHpdPoJJjpGSUxo8NhHIwrwsfhFvU9W0= @@ -189,38 +218,35 @@ github.com/coreos/go-oidc/v3 v3.9.0/go.mod h1:rTKz2PYwftcrtoCzV5g5kvfJoWcm0Mk8AF github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0= github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= -github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= -github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= +github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= +github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/dave/jennifer v1.4.1/go.mod h1:7jEdnm+qBcxl8PC0zyp7vxcpSRnzXSt9r39tpTVGlwA= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/daviddengcn/go-colortext v1.0.0 h1:ANqDyC0ys6qCSvuEK7l3g5RaehL/Xck9EX8ATG8oKsE= -github.com/daviddengcn/go-colortext v1.0.0/go.mod h1:zDqEI5NVUop5QPpVJUxE9UO10hRnmkD5G4Pmri9+m4c= github.com/denisenkom/go-mssqldb v0.12.3/go.mod h1:k0mtMFOnU+AihqFxPMiF05rtiDrorD1Vrm1KEz5hxDo= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= -github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= -github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= -github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg= -github.com/docker/cli v24.0.7+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v27.1.1+incompatible h1:goaZxOqs4QKxznZjjBWKONQci/MywhtRv2oNn0GkeZE= +github.com/docker/cli v27.1.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY= -github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= -github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= -github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= +github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= +github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 h1:7nllYTGLnq4CqBL27lV6oNfXzM2tJ2mrKF8E+aBXOV0= github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3/go.mod h1:v/MTKot4he5oRHGirOYGN4/hEOONNnWtDBLAzllSGMw= @@ -228,8 +254,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/edsrzf/mmap-go v1.1.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q= github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= -github.com/elazarl/goproxy v1.2.3 h1:xwIyKHbaP5yfT6O9KIeYJR5549MXRQkoQMRXGztz8YQ= -github.com/elazarl/goproxy v1.2.3/go.mod h1:YfEbZtqP4AetfO6d40vWchF3znWX7C7Vd6ZMfdL8z64= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= @@ -238,22 +264,28 @@ github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FM github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= -github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v5.8.0+incompatible h1:1Av9pn2FyxPdvrWNQszj1g6D6YthSmvCfcN6SYclTJg= github.com/evanphx/json-patch v5.8.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evilmonkeyinc/jsonpath v0.8.1 h1:W8K4t8u7aipkQE0hcTICGAdAN0Xph349LtjgSoofvVo= github.com/evilmonkeyinc/jsonpath v0.8.1/go.mod h1:EQhs0ZsoD4uD56ZJbO30gMTfHLQ6DEa0/5rT5Ymy42s= -github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM= -github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= -github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI= -github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= +github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f h1:Wl78ApPPB2Wvf/TIe2xdyJxTlb6obmF18d8QdkxNDu4= +github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f/go.mod h1:OSYXu++VVOHnXeitef/D8n/6y4QV8uLHSFXX4NeXMGc= +github.com/expr-lang/expr v1.17.0 h1:+vpszOyzKLQXC9VF+wA8cVA0tlA984/Wabc/1hF9Whg= +github.com/expr-lang/expr v1.17.0/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= @@ -262,10 +294,10 @@ github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHk github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= -github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= -github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw= -github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= +github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= github.com/gavv/httpexpect/v2 v2.16.0 h1:Ty2favARiTYTOkCRZGX7ojXXjGyNAIohM1lZ3vqaEwI= github.com/gavv/httpexpect/v2 v2.16.0/go.mod h1:uJLaO+hQ25ukBJtQi750PsztObHybNllN+t+MbbW8PY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -277,17 +309,17 @@ github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxI github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.6.1 h1:u+dcrgaguSSkbjzHwelEjc0Yj300NUevrrPphk/SoRA= -github.com/go-git/go-billy/v5 v5.6.1/go.mod h1:0AsLr1z2+Uksi4NlElmMblP5rPcDZNRCD8ujZCRR2BE= +github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= +github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.13.1 h1:DAQ9APonnlvSWpvolXWIuV6Q6zXy2wHbN4cVlNR5Q+M= -github.com/go-git/go-git/v5 v5.13.1/go.mod h1:qryJB4cSBoq3FRoBRf5A77joojuBcmPJ0qu3XXXVixc= +github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60= +github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= -github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY= +github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= @@ -296,30 +328,33 @@ github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7 github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= -github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbXz58sAx6Q= -github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/jsonreference v0.20.4 h1:bKlDxQxQJgwpUSgOENiMPzCTBVuc7vTdXSSgNeAhojU= -github.com/go-openapi/jsonreference v0.20.4/go.mod h1:5pZJyJP2MnYCpoeoMAql78cCHauHj0V9Lhc506VOpw4= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/go-openapi/swag v0.22.6 h1:dnqg1XfHXL9aBxSbktBqFR5CxVyVI+7fYWhAf1JOeTw= -github.com/go-openapi/swag v0.22.6/go.mod h1:Gl91UqO+btAM0plGGxHqJcQZ1ZTy6jbmridBTsDy8A0= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI= github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobwas/glob v0.2.4-0.20181002190808-e7a84e9525fe h1:zn8tqiUbec4wR94o7Qj3LZCAT6uGobhEgnDRg6isG5U= github.com/gobwas/glob v0.2.4-0.20181002190808-e7a84e9525fe/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= -github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= @@ -327,19 +362,19 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= -github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= -github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= @@ -353,41 +388,31 @@ github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:x github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golangplus/bytes v0.0.0-20160111154220-45c989fe5450/go.mod h1:Bk6SMAONeMXrxql8uvOKuAZSu8aM5RUGv+1C6IJaEho= -github.com/golangplus/bytes v1.0.0/go.mod h1:AdRaCFwmc/00ZzELMWb01soso6W1R/++O1XL80yAn+A= -github.com/golangplus/fmt v1.0.0/go.mod h1:zpM0OfbMCjPtd2qkTD/jX2MgiFCqklhSUFyDW44gVQE= -github.com/golangplus/testing v1.0.0 h1:+ZeeiKZENNOMkTTELoSySazi+XaEhVO0mb+eanrSEUQ= -github.com/golangplus/testing v1.0.0/go.mod h1:ZDreixUV3YzhoVraIDyOzHrr76p6NUh6k/pPg/Q3gYA= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= -github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= -github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63KqpoNbWqVw= +github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-containerregistry v0.17.0 h1:5p+zYs/R4VGHkhyvgWurWrpJ2hW4Vv9fQI+GzdcwXLk= -github.com/google/go-containerregistry v0.17.0/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= -github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220720195016-31786c6cbb82 h1:c9/uyN+ttOK+8FdjyzN4d40EY3u7YUGkEl6V1PDLiMc= -github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20220720195016-31786c6cbb82/go.mod h1:oiF0XG4IIPHc9kusCLuBDwY0ov4e1cikrP/vAqSp868= -github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220719135131-f79ec2192282 h1:Mjy6sd3HMC/PU+p4zuy3R5p5oevK8PggZms07Mgr95I= -github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20220719135131-f79ec2192282/go.mod h1:gbpYHfGj1oNhLX9gla4EjsnlJ44Ng+/kJRKi09FiXOU= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo= +github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8= +github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20241111191718-6bce25ecf029 h1:0G7T22yXy+FqumvxcEg48EU4llskcDeQ2eM3vaTr64c= +github.com/google/go-containerregistry/pkg/authn/k8schain v0.0.0-20241111191718-6bce25ecf029/go.mod h1:Xxhh5HFmICiLl0vmmfdsvuWPFITh3DqQf3UQqU2I6V8= +github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20230516205744-dbecb1de8cfa h1:+MG+Q2Q7mtW6kCIbUPZ9ZMrj7xOWDKI1hhy1qp0ygI0= +github.com/google/go-containerregistry/pkg/authn/kubernetes v0.0.0-20230516205744-dbecb1de8cfa/go.mod h1:KdL98/Va8Dy1irB6lTxIRIQ7bQj4lbrlvqUzKEQ+ZBU= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -396,14 +421,14 @@ github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= -github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= -github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db h1:097atOisP2aRj7vFgYQBbFN4U4JNXUNYpxael3UzMyo= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= -github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -411,11 +436,11 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= -github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= -github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= +github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -426,7 +451,6 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= @@ -438,35 +462,30 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92Bcuy github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0/go.mod h1:qmOFXW2epJhM0qSnUUYpldc7gVz2KMQwJ/QYCDIa7XU= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hokaccha/go-prettyjson v0.0.0-20211117102719-0474bc63780f h1:7LYC+Yfkj3CTRcShK0KOL/w6iTiKyqqBA9a41Wnggw8= github.com/hokaccha/go-prettyjson v0.0.0-20211117102719-0474bc63780f/go.mod h1:pFlLw2CfqZiIBOx6BuCeRLCrfxBJipTY0nIOF/VbGcI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.1.1/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= -github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/ipfs/go-detect-race v0.0.1 h1:qX/xay2W3E4Q1U7d9lNs1sU9nvguX0a7319XbyQ6cOk= github.com/ipfs/go-detect-race v0.0.1/go.mod h1:8BNT7shDZPo99Q74BpGMK+4D8Mn4j46UU0LZ723meps= -github.com/itchyny/gojq v0.12.14 h1:6k8vVtsrhQSYgSGg827AD+PVVaB1NLXEdX+dda2oZCc= -github.com/itchyny/gojq v0.12.14/go.mod h1:y1G7oO7XkcR1LPZO59KyoCRy08T3j9vDYRV0GgYSS+s= -github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE= -github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8= +github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg= +github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY= +github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q= +github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -537,8 +556,8 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ= -github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= +github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= @@ -550,18 +569,20 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV github.com/karrick/godirwalk v1.7.8/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46shStcFDJ34= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6 h1:IsMZxCuZqKuao2vNdfD82fjjgPLfyHLpR41Z88viRWs= +github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6/go.mod h1:3VeWNIJaW+O5xpRQbPp0Ybqu1vJd/pm7s2F473HRrkw= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= -github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= -github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= -github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= -github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -590,8 +611,6 @@ github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhn github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY= github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc= -github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= -github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= @@ -610,13 +629,14 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/minio/crc64nvme v1.0.1 h1:DHQPrYPdqK7jQG/Ls5CTBZWeex/2FMS3G5XGkycuFrY= +github.com/minio/crc64nvme v1.0.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= github.com/minio/minio-go/v7 v7.0.66/go.mod h1:DHAgmyQEGdW3Cif0UooKOyrT3Vxs82zNdV6tkKhRtbs= -github.com/minio/minio-go/v7 v7.0.77 h1:GaGghJRg9nwDVlNbwYjSDJT1rqltQkBFDsypWX1v3Bw= -github.com/minio/minio-go/v7 v7.0.77/go.mod h1:AVM3IUN6WwKzmwBxVdjzhH8xq+f57JSbbvzqvUzR6eg= +github.com/minio/minio-go/v7 v7.0.89 h1:hx4xV5wwTUfyv8LarhJAwNecnXpoTsj9v3f3q/ZkiJU= +github.com/minio/minio-go/v7 v7.0.89/go.mod h1:2rFnGAp02p7Dddo1Fq4S2wYOfpF0MUTSeLTRC90I204= github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -625,15 +645,12 @@ github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZX github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= -github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA= -github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/moby/spdystream v0.5.0 h1:7r0J1Si3QO/kjRitvSLVVFUjxMEb/YLj6S9FF62JBCU= +github.com/moby/spdystream v0.5.0/go.mod h1:xBAYlnt/ay+11ShkdFKNAG7LsyK/tmNBVvVOwrfMgdI= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -662,25 +679,25 @@ github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+ github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.15.0 h1:79HwNRBAZHOEwrczrgSOPy+eFTTlIGELKy5as+ClttY= -github.com/onsi/ginkgo/v2 v2.15.0/go.mod h1:HlxMHtYF57y6Dpf+mc5529KKmSq9h2FpCF+/ZkwUxKM= +github.com/onsi/ginkgo/v2 v2.21.0 h1:7rg/4f3rB88pb5obDgNZrNHrQ4e6WpjonchcpuBRnZM= +github.com/onsi/ginkgo/v2 v2.21.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= -github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= +github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= +github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0-rc3 h1:fzg1mXZFj8YdPeNkRXMg+zb88BFV0Ys52cJydRwBkb8= github.com/opencontainers/image-spec v1.1.0-rc3/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= -github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= -github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= +github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= +github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= @@ -689,19 +706,23 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= -github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= +github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk= +github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= -github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= -github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE= -github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= -github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= -github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io= +github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= github.com/remyoudompheng/bigfft v0.0.0-20190728182440-6a916e37a237/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= @@ -710,8 +731,8 @@ github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= @@ -720,10 +741,8 @@ github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OK github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= +github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= @@ -735,17 +754,17 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG github.com/sethvargo/go-limiter v0.7.2 h1:FgC4N7RMpV5gMrUdda15FaFTkQ/L4fEqM7seXMs4oO8= github.com/sethvargo/go-limiter v0.7.2/go.mod h1:C0kbSFbiriE5k2FFOe18M1YZbAR2Fiwf72uGu0CXCcU= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= -github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= +github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= +github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= @@ -756,18 +775,19 @@ github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= +github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= +github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= +github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= +github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= -github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.20.0 h1:zrxIyR3RQIOsarIrgL8+sAvALXul9jeEPa06Y0Ph6vY= +github.com/spf13/viper v1.20.0/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= @@ -793,8 +813,8 @@ github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502/go.mod h1:p9lPsd+cx33L3H9nNoecRRxPssFKUwwI50I3pZ0yT+8= -github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= -github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= @@ -812,6 +832,8 @@ github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+ github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= github.com/vbatts/tar-split v0.11.3 h1:hLFqsOLQ1SsppQNTMpkpPXClLDfC2A3Zgy9OUU+RVck= github.com/vbatts/tar-split v0.11.3/go.mod h1:9QlHN18E+fEH7RdG+QAJJcuya3rqT7eXSTY7wGrAokY= +github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= +github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -837,36 +859,40 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 h1:UNQQKPfTDe1J81ViolILjTKPr9WetKW6uei2hFgJmFs= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0/go.mod h1:r9vWsPS/3AQItv3OSlEJ/E4mbrhUbbw18meOjArPtKQ= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 h1:sv9kVfal0MK0wBMCOGr+HeJm9v803BkJxGrk2au7j08= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0/go.mod h1:SK2UL73Zy1quvRPonmOmRDiWk1KBV3LyIeeIxcEApWw= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= +go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I= go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0 h1:dJlCKeq+zmO5Og4kgxqPvvJrzuD/mygs1g/NYM9dAsU= go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0/go.mod h1:p+hpBCpLHpuUrR0lHgnHbUnbCBll1IhrcMIlycC+xYs= -go.opentelemetry.io/otel v1.23.0 h1:Df0pqjqExIywbMCMTxkAwzjLZtRf+bBKLbUcpxO2C9E= -go.opentelemetry.io/otel v1.23.0/go.mod h1:YCycw9ZeKhcJFrb34iVSkyT0iczq/zYDtZYFufObyB0= +go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= +go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0 h1:97CpJflo7dJK4A4SLMNoP2loDEAiG0ifF6MnLhtSHUY= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0/go.mod h1:YzC+4JHcK24PylBTZ78U0XJSYbhHY0uHYNqr+OlcLCs= go.opentelemetry.io/otel/exporters/prometheus v0.45.1 h1:R/bW3afad6q6VGU+MFYpnEdo0stEARMCdhWu6+JI6aI= go.opentelemetry.io/otel/exporters/prometheus v0.45.1/go.mod h1:wnHAfKRav5Dfp4iZhyWZ7SzQfT+rDZpEpYG7To+qJ1k= -go.opentelemetry.io/otel/metric v1.23.0 h1:pazkx7ss4LFVVYSxYew7L5I6qvLXHA0Ap2pwV+9Cnpo= -go.opentelemetry.io/otel/metric v1.23.0/go.mod h1:MqUW2X2a6Q8RN96E2/nqNoT+z9BSms20Jb7Bbp+HiTo= -go.opentelemetry.io/otel/sdk v1.23.0 h1:0KM9Zl2esnl+WSukEmlaAEjVY5HDZANOHferLq36BPc= -go.opentelemetry.io/otel/sdk v1.23.0/go.mod h1:wUscup7byToqyKJSilEtMf34FgdCAsFpFOjXnAwFfO0= -go.opentelemetry.io/otel/sdk/metric v1.23.0 h1:u81lMvmK6GMgN4Fty7K7S6cSKOZhMKJMK2TB+KaTs0I= -go.opentelemetry.io/otel/sdk/metric v1.23.0/go.mod h1:2LUOToN/FdX6wtfpHybOnCZjoZ6ViYajJYMiJ1LKDtQ= -go.opentelemetry.io/otel/trace v1.23.0 h1:37Ik5Ib7xfYVb4V1UtnT97T1jI+AoIYkJyPkuL4iJgI= -go.opentelemetry.io/otel/trace v1.23.0/go.mod h1:GSGTbIClEsuZrGIzoEHqsVfxgn5UkggkflQwDScNUsk= -go.opentelemetry.io/proto/otlp v1.1.0 h1:2Di21piLrCqJ3U3eXGCTPHE9R8Nh+0uglSnOyxikMeI= -go.opentelemetry.io/proto/otlp v1.1.0/go.mod h1:GpBHCBWiqvVLDqmHZsoMM3C5ySeKTC7ej/RNTae6MdY= -go.starlark.net v0.0.0-20230525235612-a134d8f9ddca h1:VdD38733bfYv5tUZwEIskMM93VanwNIi5bIKnDrJdEY= -go.starlark.net v0.0.0-20230525235612-a134d8f9ddca/go.mod h1:jxU+3+j+71eXOW14274+SmmuW82qJzl6iZSeqEtTGds= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= +go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= +go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= +go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= +go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= +go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= +go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= +go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= +go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= +go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= +go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= @@ -894,7 +920,6 @@ golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= @@ -904,8 +929,8 @@ golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= -golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= -golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= golang.org/x/exp v0.0.0-20181106170214-d68db9428509/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -932,8 +957,6 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8= -golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -951,7 +974,6 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= @@ -959,7 +981,6 @@ golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= @@ -970,14 +991,14 @@ golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= -golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= -golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= -golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.28.0 h1:CrgCKl8PPAVtLnU3c+EDw6x11699EWlsDeWNWKdIOkc= +golang.org/x/oauth2 v0.28.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -991,8 +1012,8 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= -golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1018,7 +1039,6 @@ golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1034,7 +1054,6 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220906165534-d0df966e6959/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1046,14 +1065,12 @@ golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo= @@ -1064,8 +1081,8 @@ golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= -golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= -golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= +golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1074,7 +1091,6 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= @@ -1083,12 +1099,12 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1116,19 +1132,17 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg= -golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI= +golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= +golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.163.0 h1:4BBDpPaSH+H28NhnX+WwjXxbRLQ7TWuEKp4BQyEjxvk= -google.golang.org/api v0.163.0/go.mod h1:6SulDkfoBIg4NFmCuZ39XeeAgSHCPecfSUuDyYlAHs0= +google.golang.org/api v0.228.0 h1:X2DJ/uoWGnY5obVjewbp8icSL5U4FzuCfy9OjbLSnLs= +google.golang.org/api v0.228.0/go.mod h1:wNvRS1Pbe8r4+IfBIniV8fwCpGwTrYa+kMUDiC5z5a4= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1138,33 +1152,28 @@ google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRn google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac h1:ZL/Teoy/ZGnzyrqK/Optxxp2pmVh+fmJ97slxSRyzUg= -google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:+Rvu7ElI+aLzyDQhpHMFMMltsD6m7nqpuWDd2CwJw3k= -google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe h1:0poefMBYvYbs7g5UkjS6HcxBPaTRAmznle9jnxYoAI8= -google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe h1:bQnxqljG/wqi4NTXu2+DJ3n7APcEA882QZ1JvhQAq9o= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= +google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb h1:p31xT4yrYrSM/G4Sn2+TNUkVhFCbG9y8itM2S6Th950= +google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:jbe3Bkdp+Dh2IrslsFCklNhweNTBgSYanP1UXhJDhKg= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.61.0 h1:TOvOcuXn30kRao+gfcvsebNEa5iZIiLkisYEkf7R7o0= -google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= +google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= +google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1172,6 +1181,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/evanphx/json-patch.v4 v4.12.0 h1:n6jtcsulIzXPJaxegRbvFNNrZDjbij7ny3gmSPG+6V4= +gopkg.in/evanphx/json-patch.v4 v4.12.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/go-playground/webhooks.v5 v5.17.0 h1:truBced5ZmkiNKK47cM8bMe86wUSjNks7SFMuNKwzlc= gopkg.in/go-playground/webhooks.v5 v5.17.0/go.mod h1:LZbya/qLVdbqDR1aKrGuWV6qbia2zCYSR5dpom2SInQ= @@ -1206,40 +1217,40 @@ honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= k8s.io/api v0.17.8/go.mod h1:N++Llhs8kCixMUoCaXXAyMMPbo8dDVnh+IQ36xZV2/0= -k8s.io/api v0.30.3 h1:ImHwK9DCsPA9uoU3rVh4QHAHHK5dTSv1nxJUapx8hoQ= -k8s.io/api v0.30.3/go.mod h1:GPc8jlzoe5JG3pb0KJCSLX5oAFIW3/qNJITlDj8BH04= +k8s.io/api v0.32.2 h1:bZrMLEkgizC24G9eViHGOPbW+aRo9duEISRIJKfdJuw= +k8s.io/api v0.32.2/go.mod h1:hKlhk4x1sJyYnHENsrdCWw31FEmCijNGPJO5WzHiJ6Y= k8s.io/apimachinery v0.17.8/go.mod h1:Lg8zZ5iC/O8UjCqW6DNhcQG2m4TdjF9kwG3891OWbbA= -k8s.io/apimachinery v0.30.3 h1:q1laaWCmrszyQuSQCfNB8cFgCuDAoPszKY4ucAjDwHc= -k8s.io/apimachinery v0.30.3/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc= -k8s.io/cli-runtime v0.30.3 h1:aG69oRzJuP2Q4o8dm+f5WJIX4ZBEwrvdID0+MXyUY6k= -k8s.io/cli-runtime v0.30.3/go.mod h1:hwrrRdd9P84CXSKzhHxrOivAR9BRnkMt0OeP5mj7X30= +k8s.io/apimachinery v0.32.2 h1:yoQBR9ZGkA6Rgmhbp/yuT9/g+4lxtsGYwW6dR6BDPLQ= +k8s.io/apimachinery v0.32.2/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE= +k8s.io/cli-runtime v0.32.2 h1:aKQR4foh9qeyckKRkNXUccP9moxzffyndZAvr+IXMks= +k8s.io/cli-runtime v0.32.2/go.mod h1:a/JpeMztz3xDa7GCyyShcwe55p8pbcCVQxvqZnIwXN8= k8s.io/client-go v0.17.8/go.mod h1:SJsDS64AAtt9VZyeaQMb4Ck5etCitZ/FwajWdzua5eY= -k8s.io/client-go v0.30.3 h1:bHrJu3xQZNXIi8/MoxYtZBBWQQXwy16zqJwloXXfD3k= -k8s.io/client-go v0.30.3/go.mod h1:8d4pf8vYu665/kUbsxWAQ/JDBNWqfFeZnvFiVdmx89U= -k8s.io/component-base v0.30.3 h1:Ci0UqKWf4oiwy8hr1+E3dsnliKnkMLZMVbWzeorlk7s= -k8s.io/component-base v0.30.3/go.mod h1:C1SshT3rGPCuNtBs14RmVD2xW0EhRSeLvBh7AGk1quA= -k8s.io/component-helpers v0.30.3 h1:KPc8l0eGx9Wg2OcKc58k9ozNcVcOInAi3NGiuS2xJ/c= -k8s.io/component-helpers v0.30.3/go.mod h1:VOQ7g3q+YbKWwKeACG2BwPv4ftaN8jXYJ5U3xpzuYAE= +k8s.io/client-go v0.32.2 h1:4dYCD4Nz+9RApM2b/3BtVvBHw54QjMFUl1OLcJG5yOA= +k8s.io/client-go v0.32.2/go.mod h1:fpZ4oJXclZ3r2nDOv+Ux3XcJutfrwjKTCHz2H3sww94= +k8s.io/component-base v0.32.2 h1:1aUL5Vdmu7qNo4ZsE+569PV5zFatM9hl+lb3dEea2zU= +k8s.io/component-base v0.32.2/go.mod h1:PXJ61Vx9Lg+P5mS8TLd7bCIr+eMJRQTyXe8KvkrvJq0= +k8s.io/component-helpers v0.32.2 h1:2usSAm3zNE5yu5DdAdrKBWLfSYNpU4OPjZywJY5ovP8= +k8s.io/component-helpers v0.32.2/go.mod h1:fvQAoiiOP7jUEUBc9qR0PXiBPuB0I56WTxTkkpcI8g8= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20220902162205-c0856e24416d h1:U9tB195lKdzwqicbJvyJeOXV7Klv+wNAWENRnXEGi08= -k8s.io/gengo v0.0.0-20220902162205-c0856e24416d/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/gengo v0.0.0-20250207200755-1244d31929d7 h1:iOdAjO5OsCszwPG5xNcJJL+rKaMUogk5fx9HsYZJxQM= +k8s.io/gengo v0.0.0-20250207200755-1244d31929d7/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.5.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= -k8s.io/klog/v2 v2.120.1 h1:QXU6cPEOIslTGvZaXvFWiP9VKyeet3sawzTOvdXb4Vw= -k8s.io/klog/v2 v2.120.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= +k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= +k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= k8s.io/kube-openapi v0.0.0-20200410145947-bcb3869e6f29/go.mod h1:F+5wygcW0wmRTnM3cOgIqGivxkwSWIWT5YdsDbeAOaU= -k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 h1:BZqlfIlq5YbRMFko6/PM7FjZpUb45WallggurYhKGag= -k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340/go.mod h1:yD4MZYeKMBwQKVht279WycxKyM84kkAx2DPrTXaeb98= -k8s.io/kubectl v0.30.3 h1:YIBBvMdTW0xcDpmrOBzcpUVsn+zOgjMYIu7kAq+yqiI= -k8s.io/kubectl v0.30.3/go.mod h1:IcR0I9RN2+zzTRUa1BzZCm4oM0NLOawE6RzlDvd1Fpo= -k8s.io/metrics v0.30.3 h1:gKCpte5zykrOmQhZ8qmsxyJslMdiLN+sqbBfIWNpbGM= -k8s.io/metrics v0.30.3/go.mod h1:W06L2nXRhOwPkFYDJYWdEIS3u6JcJy3ebIPYbndRs6A= +k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff h1:/usPimJzUKKu+m+TE36gUyGcf03XZEP0ZIKgKj35LS4= +k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff/go.mod h1:5jIi+8yX4RIb8wk3XwBo5Pq2ccx4FP10ohkbSKCZoK8= +k8s.io/kubectl v0.32.2 h1:TAkag6+XfSBgkqK9I7ZvwtF0WVtUAvK8ZqTt+5zi1Us= +k8s.io/kubectl v0.32.2/go.mod h1:+h/NQFSPxiDZYX/WZaWw9fwYezGLISP0ud8nQKg+3g8= +k8s.io/metrics v0.32.2 h1:7t/rZzTHFrGa9f94XcgLlm3ToAuJtdlHANcJEHlYl9g= +k8s.io/metrics v0.32.2/go.mod h1:VL3nJpzcgB6L5nSljkkzoE0nilZhVgcjCfNRgoylaIQ= k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= -k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +k8s.io/utils v0.0.0-20241210054802-24370beab758 h1:sdbE21q2nlQtFh65saZY+rRM6x6aJJI8IUa1AmH/qa0= +k8s.io/utils v0.0.0-20241210054802-24370beab758/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= modernc.org/b v1.0.4/go.mod h1:Oqc2xtmGT0tvBUsPZIanirLhxBCQZhM7Lu3TlzBj9w8= modernc.org/b v1.1.0/go.mod h1:yF+wmBAFjebNdVqZNTeNfmnLaLqq91wozvDLcuXz+ck= modernc.org/db v1.0.8/go.mod h1:L8Az96H46DF2+BGeaS6+WiEqLORR2sjp0yBn6LA/lAQ= @@ -1286,17 +1297,20 @@ modernc.org/zappy v1.0.9/go.mod h1:y2c4Hv5jzyBP179SxNmx5H/BM6cVgNIXPQv2bCeR6IM= modernc.org/zappy v1.1.0/go.mod h1:cxC0dWAgZuyMsJ+KL3ZBgo3twyKGBB/0By/umSZE2bQ= moul.io/http2curl/v2 v2.3.0 h1:9r3JfDzWPcbIklMOs2TnIFzDYvfAZvjeavG6EzP7jYs= moul.io/http2curl/v2 v2.3.0/go.mod h1:RW4hyBjTWSYDOxapodpNEtX0g5Eb16sxklBqmd2RHcE= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/kustomize/api v0.13.5-0.20230601165947-6ce0bf390ce3 h1:XX3Ajgzov2RKUdc5jW3t5jwY7Bo7dcRm+tFxT+NfgY0= -sigs.k8s.io/kustomize/api v0.13.5-0.20230601165947-6ce0bf390ce3/go.mod h1:9n16EZKMhXBNSiUC5kSdFQJkdH3zbxS/JoO619G1VAY= -sigs.k8s.io/kustomize/kustomize/v5 v5.0.4-0.20230601165947-6ce0bf390ce3 h1:vq2TtoDcQomhy7OxXLUOzSbHMuMYq0Bjn93cDtJEdKw= -sigs.k8s.io/kustomize/kustomize/v5 v5.0.4-0.20230601165947-6ce0bf390ce3/go.mod h1:/d88dHCvoy7d0AKFT0yytezSGZKjsZBVs9YTkBHSGFk= -sigs.k8s.io/kustomize/kyaml v0.14.3-0.20230601165947-6ce0bf390ce3 h1:W6cLQc5pnqM7vh3b7HvGNfXrJ/xL6BDMS0v1V/HHg5U= -sigs.k8s.io/kustomize/kyaml v0.14.3-0.20230601165947-6ce0bf390ce3/go.mod h1:JWP1Fj0VWGHyw3YUPjXSQnRnrwezrZSrApfX5S0nIag= +sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8= +sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo= +sigs.k8s.io/kustomize/api v0.18.0 h1:hTzp67k+3NEVInwz5BHyzc9rGxIauoXferXyjv5lWPo= +sigs.k8s.io/kustomize/api v0.18.0/go.mod h1:f8isXnX+8b+SGLHQ6yO4JG1rdkZlvhaCf/uZbLVMb0U= +sigs.k8s.io/kustomize/kustomize/v5 v5.5.0 h1:o1mtt6vpxsxDYaZKrw3BnEtc+pAjLz7UffnIvHNbvW0= +sigs.k8s.io/kustomize/kustomize/v5 v5.5.0/go.mod h1:AeFCmgCrXzmvjWWaeZCyBp6XzG1Y0w1svYus8GhJEOE= +sigs.k8s.io/kustomize/kyaml v0.18.1 h1:WvBo56Wzw3fjS+7vBjN6TeivvpbW9GmRaWZ9CIVmt4E= +sigs.k8s.io/kustomize/kyaml v0.18.1/go.mod h1:C3L2BFVU1jgcddNBE1TxuVLgS46TjObMwW5FT9FcjYo= +sigs.k8s.io/randfill v0.0.0-20250304075658-069ef1bbf016/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY= +sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU= +sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY= sigs.k8s.io/structured-merge-diff/v2 v2.0.1/go.mod h1:Wb7vfKAodbKgf6tn1Kl0VvGj7mRH6DGaRcixXEJXTsE= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc= +sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= diff --git a/hack/api/jsonschema/main.go b/hack/api/jsonschema/main.go index 72018f00049e..3102a807548c 100644 --- a/hack/api/jsonschema/main.go +++ b/hack/api/jsonschema/main.go @@ -39,8 +39,8 @@ func main() { props["kind"].(obj)["const"] = kind } schema := obj{ - "$id": "http://workflows.argoproj.io/workflows.json", // don't really know what this should be - "$schema": "http://json-schema.org/schema#", + "$id": "https://raw.githubusercontent.com/argoproj/argo-workflows/HEAD/api/jsonschema/schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "oneOf": []interface{}{ obj{"$ref": "#/definitions/io.argoproj.workflow.v1alpha1.ClusterWorkflowTemplate"}, diff --git a/hack/docs/fields.go b/hack/docs/fields.go index 992f3cf32880..3ae77444613b 100644 --- a/hack/docs/fields.go +++ b/hack/docs/fields.go @@ -136,9 +136,9 @@ func getObjectType(field map[string]interface{}, addToQueue func(string)) string name := getNameFromFullName(refString) if refString == "io.argoproj.workflow.v1alpha1.ParallelSteps" { - return fmt.Sprintf("`Array>`", link(fmt.Sprintf("`%s`", "WorkflowStep"), fmt.Sprintf("#"+strings.ToLower("WorkflowStep")))) + return fmt.Sprintf("`Array>`", link("`WorkflowStep`", "#"+strings.ToLower("WorkflowStep"))) } - return fmt.Sprintf("`Array<`%s`>`", link(fmt.Sprintf("`%s`", name), fmt.Sprintf("#"+strings.ToLower(name)))) + return fmt.Sprintf("`Array<`%s`>`", link(fmt.Sprintf("`%s`", name), "#"+strings.ToLower(name))) } fullName := field["items"].(map[string]interface{})["type"].(string) return fmt.Sprintf("`Array< %s >`", getNameFromFullName(fullName)) diff --git a/hack/docs/tested-versions.sh b/hack/docs/tested-versions.sh new file mode 100755 index 000000000000..2c06368aac85 --- /dev/null +++ b/hack/docs/tested-versions.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -eu +. hack/k8s-versions.sh +printf 'This version is tested under Kubernetes %s and %s.' "${K8S_VERSIONS[min]}" "${K8S_VERSIONS[max]}" \ No newline at end of file diff --git a/hack/k8s-versions.sh b/hack/k8s-versions.sh new file mode 100755 index 000000000000..16f513abc1eb --- /dev/null +++ b/hack/k8s-versions.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# Centralized config to define the minimum and maximum tested Kubernetes versions. +# This is used in the CI workflow for e2e tests, the devcontainer, and to generate docs. +declare -A K8S_VERSIONS=( + [min]=v1.28.13 + [max]=v1.31.0 +) diff --git a/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml b/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml index f94e33fee75d..3d47f01bd83d 100644 --- a/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml +++ b/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml @@ -2051,6 +2051,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -2077,6 +2079,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -3409,6 +3413,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3515,6 +3520,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3596,6 +3602,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -3713,6 +3721,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4089,6 +4098,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4195,6 +4205,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4276,6 +4287,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -4393,6 +4406,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6798,6 +6812,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6906,6 +6921,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6987,6 +7003,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -7104,6 +7122,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9469,6 +9488,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9575,6 +9595,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9656,6 +9677,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -9775,6 +9798,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9916,6 +9940,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -9942,6 +9968,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -10223,6 +10251,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10331,6 +10360,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10412,6 +10442,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -10529,6 +10561,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10792,10 +10825,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -11155,6 +11190,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -11168,6 +11210,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -11416,6 +11459,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -11423,6 +11467,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -11434,6 +11479,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -11442,6 +11488,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -11459,6 +11506,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -12742,6 +12790,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12848,6 +12897,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12929,6 +12979,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13046,6 +13098,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13422,6 +13475,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13528,6 +13582,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13609,6 +13664,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13726,6 +13783,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16131,6 +16189,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16239,6 +16298,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16320,6 +16380,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -16437,6 +16499,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18802,6 +18865,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18908,6 +18972,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18989,6 +19054,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19108,6 +19175,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19249,6 +19317,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -19275,6 +19345,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -19556,6 +19628,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19664,6 +19737,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19745,6 +19819,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19862,6 +19938,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -20125,10 +20202,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -20488,6 +20567,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -20501,6 +20587,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -20749,6 +20836,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -20756,6 +20844,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -20767,6 +20856,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -20775,6 +20865,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -20792,6 +20883,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -21092,10 +21184,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -21455,6 +21549,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -21468,6 +21569,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -21716,6 +21818,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -21723,6 +21826,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -21734,6 +21838,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -21742,6 +21847,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -21759,6 +21865,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string diff --git a/manifests/base/crds/full/argoproj.io_cronworkflows.yaml b/manifests/base/crds/full/argoproj.io_cronworkflows.yaml index 57487541cac0..27f2eef29d73 100644 --- a/manifests/base/crds/full/argoproj.io_cronworkflows.yaml +++ b/manifests/base/crds/full/argoproj.io_cronworkflows.yaml @@ -2085,6 +2085,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -2111,6 +2113,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -3443,6 +3447,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3549,6 +3554,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3630,6 +3636,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -3747,6 +3755,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4123,6 +4132,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4229,6 +4239,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4310,6 +4321,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -4427,6 +4440,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6832,6 +6846,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6940,6 +6955,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7021,6 +7037,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -7138,6 +7156,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9503,6 +9522,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9609,6 +9629,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9690,6 +9711,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -9809,6 +9832,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9950,6 +9974,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -9976,6 +10002,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -10257,6 +10285,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10365,6 +10394,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10446,6 +10476,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -10563,6 +10595,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10826,10 +10859,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -11189,6 +11224,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -11202,6 +11244,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -11450,6 +11493,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -11457,6 +11501,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -11468,6 +11513,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -11476,6 +11522,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -11493,6 +11540,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -12776,6 +12824,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12882,6 +12931,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12963,6 +13013,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13080,6 +13132,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13456,6 +13509,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13562,6 +13616,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13643,6 +13698,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13760,6 +13817,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16165,6 +16223,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16273,6 +16332,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16354,6 +16414,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -16471,6 +16533,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18836,6 +18899,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18942,6 +19006,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19023,6 +19088,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19142,6 +19209,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19283,6 +19351,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -19309,6 +19379,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -19590,6 +19662,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19698,6 +19771,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19779,6 +19853,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19896,6 +19972,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -20159,10 +20236,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -20522,6 +20601,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -20535,6 +20621,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -20783,6 +20870,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -20790,6 +20878,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -20801,6 +20890,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -20809,6 +20899,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -20826,6 +20917,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -21126,10 +21218,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -21489,6 +21583,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -21502,6 +21603,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -21750,6 +21852,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -21757,6 +21860,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -21768,6 +21872,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -21776,6 +21881,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -21793,6 +21899,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string diff --git a/manifests/base/crds/full/argoproj.io_workflows.yaml b/manifests/base/crds/full/argoproj.io_workflows.yaml index 897e44078f68..6bda8bddfdc1 100644 --- a/manifests/base/crds/full/argoproj.io_workflows.yaml +++ b/manifests/base/crds/full/argoproj.io_workflows.yaml @@ -2065,6 +2065,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -2091,6 +2093,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -3423,6 +3427,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3529,6 +3534,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3610,6 +3616,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -3727,6 +3735,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4103,6 +4112,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4209,6 +4219,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4290,6 +4301,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -4407,6 +4420,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6812,6 +6826,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6920,6 +6935,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7001,6 +7017,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -7118,6 +7136,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9483,6 +9502,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9589,6 +9609,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9670,6 +9691,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -9789,6 +9812,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9930,6 +9954,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -9956,6 +9982,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -10237,6 +10265,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10345,6 +10374,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10426,6 +10456,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -10543,6 +10575,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10806,10 +10839,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -11169,6 +11204,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -11182,6 +11224,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -11430,6 +11473,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -11437,6 +11481,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -11448,6 +11493,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -11456,6 +11502,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -11473,6 +11520,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -12756,6 +12804,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12862,6 +12911,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12943,6 +12993,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13060,6 +13112,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13436,6 +13489,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13542,6 +13596,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13623,6 +13678,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13740,6 +13797,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16145,6 +16203,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16253,6 +16312,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16334,6 +16394,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -16451,6 +16513,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18816,6 +18879,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18922,6 +18986,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19003,6 +19068,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19122,6 +19189,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19263,6 +19331,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -19289,6 +19359,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -19570,6 +19642,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19678,6 +19751,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19759,6 +19833,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19876,6 +19952,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -20139,10 +20216,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -20502,6 +20581,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -20515,6 +20601,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -20763,6 +20850,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -20770,6 +20858,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -20781,6 +20870,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -20789,6 +20879,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -20806,6 +20897,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -21106,10 +21198,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -21469,6 +21563,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -21482,6 +21583,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -21730,6 +21832,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -21737,6 +21840,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -21748,6 +21852,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -21756,6 +21861,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -21773,6 +21879,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -24211,10 +24318,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -24574,6 +24683,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -24587,6 +24703,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -24835,6 +24952,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -24842,6 +24960,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -24853,6 +24972,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -24861,6 +24981,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -24878,6 +24999,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -26172,6 +26294,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -26278,6 +26401,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -26359,6 +26483,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -26476,6 +26602,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -26852,6 +26979,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -26958,6 +27086,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -27039,6 +27168,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -27156,6 +27287,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -29561,6 +29693,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -29669,6 +29802,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -29750,6 +29884,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -29867,6 +30003,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -32232,6 +32369,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -32338,6 +32476,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -32419,6 +32558,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -32538,6 +32679,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -32679,6 +32821,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -32705,6 +32849,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -32986,6 +33132,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -33094,6 +33241,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -33175,6 +33323,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -33292,6 +33442,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -33555,10 +33706,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -33918,6 +34071,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -33931,6 +34091,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -34179,6 +34340,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -34186,6 +34348,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -34197,6 +34360,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -34205,6 +34369,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -34222,6 +34387,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -36323,6 +36489,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -36349,6 +36517,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -37681,6 +37851,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -37787,6 +37958,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -37868,6 +38040,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -37985,6 +38159,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -38361,6 +38536,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -38467,6 +38643,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -38548,6 +38725,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -38665,6 +38844,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -41070,6 +41250,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -41178,6 +41359,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -41259,6 +41441,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -41376,6 +41560,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -43741,6 +43926,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -43847,6 +44033,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -43928,6 +44115,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -44047,6 +44236,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -44188,6 +44378,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -44214,6 +44406,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -44495,6 +44689,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -44603,6 +44798,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -44684,6 +44880,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -44801,6 +44999,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -45064,10 +45263,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -45427,6 +45628,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -45440,6 +45648,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -45688,6 +45897,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -45695,6 +45905,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -45706,6 +45917,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -45714,6 +45926,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -45731,6 +45944,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -47014,6 +47228,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -47120,6 +47335,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -47201,6 +47417,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -47318,6 +47536,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -47694,6 +47913,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -47800,6 +48020,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -47881,6 +48102,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -47998,6 +48221,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -50403,6 +50627,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -50511,6 +50736,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -50592,6 +50818,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -50709,6 +50937,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -53074,6 +53303,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -53180,6 +53410,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -53261,6 +53492,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -53380,6 +53613,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -53521,6 +53755,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -53547,6 +53783,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -53828,6 +54066,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -53936,6 +54175,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -54017,6 +54257,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -54134,6 +54376,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -54397,10 +54640,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -54760,6 +55005,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -54773,6 +55025,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -55021,6 +55274,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -55028,6 +55282,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -55039,6 +55294,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -55047,6 +55303,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -55064,6 +55321,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -55364,10 +55622,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -55727,6 +55987,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -55740,6 +56007,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -55988,6 +56256,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -55995,6 +56264,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -56006,6 +56276,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -56014,6 +56285,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -56031,6 +56303,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string diff --git a/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml b/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml index 7bea90f68ad3..a49938d1859c 100644 --- a/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml +++ b/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml @@ -1235,6 +1235,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -1341,6 +1342,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -1422,6 +1424,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -1539,6 +1543,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -1915,6 +1920,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -2021,6 +2027,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -2102,6 +2109,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -2219,6 +2228,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4624,6 +4634,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4732,6 +4743,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4813,6 +4825,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -4930,6 +4944,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7295,6 +7310,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7401,6 +7417,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7482,6 +7499,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -7601,6 +7620,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -7742,6 +7762,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -7768,6 +7790,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -8049,6 +8073,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -8157,6 +8182,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -8238,6 +8264,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -8355,6 +8383,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -8618,10 +8647,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -8981,6 +9012,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -8994,6 +9032,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -9242,6 +9281,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -9249,6 +9289,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -9260,6 +9301,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -9268,6 +9310,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -9285,6 +9328,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string diff --git a/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml b/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml index 0147e01dff5e..36276f2e9add 100644 --- a/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml +++ b/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml @@ -2050,6 +2050,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -2076,6 +2078,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -3408,6 +3412,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3514,6 +3519,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -3595,6 +3601,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -3712,6 +3720,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4088,6 +4097,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4194,6 +4204,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -4275,6 +4286,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -4392,6 +4405,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6797,6 +6811,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6905,6 +6920,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -6986,6 +7002,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -7103,6 +7121,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9468,6 +9487,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9574,6 +9594,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9655,6 +9676,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -9774,6 +9797,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -9915,6 +9939,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -9941,6 +9967,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -10222,6 +10250,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10330,6 +10359,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10411,6 +10441,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -10528,6 +10560,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -10791,10 +10824,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -11154,6 +11189,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -11167,6 +11209,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -11415,6 +11458,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -11422,6 +11466,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -11433,6 +11478,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -11441,6 +11487,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -11458,6 +11505,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -12741,6 +12789,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12847,6 +12896,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -12928,6 +12978,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13045,6 +13097,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13421,6 +13474,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13527,6 +13581,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -13608,6 +13663,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -13725,6 +13782,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16130,6 +16188,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16238,6 +16297,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -16319,6 +16379,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -16436,6 +16498,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18801,6 +18864,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18907,6 +18971,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -18988,6 +19053,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19107,6 +19174,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19248,6 +19316,8 @@ spec: runAsUser: format: int64 type: integer + seLinuxChangePolicy: + type: string seLinuxOptions: properties: level: @@ -19274,6 +19344,8 @@ spec: type: integer type: array x-kubernetes-list-type: atomic + supplementalGroupsPolicy: + type: string sysctls: items: properties: @@ -19555,6 +19627,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19663,6 +19736,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -19744,6 +19818,8 @@ spec: properties: name: type: string + request: + type: string required: - name type: object @@ -19861,6 +19937,7 @@ spec: format: int32 type: integer service: + default: "" type: string required: - port @@ -20124,10 +20201,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -20487,6 +20566,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -20500,6 +20586,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -20748,6 +20835,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -20755,6 +20843,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -20766,6 +20855,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -20774,6 +20864,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -20791,6 +20882,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string @@ -21091,10 +21183,12 @@ spec: diskURI: type: string fsType: + default: ext4 type: string kind: type: string readOnly: + default: false type: boolean required: - diskName @@ -21454,6 +21548,13 @@ spec: required: - path type: object + image: + properties: + pullPolicy: + type: string + reference: + type: string + type: object iscsi: properties: chapAuthDiscovery: @@ -21467,6 +21568,7 @@ spec: iqn: type: string iscsiInterface: + default: default type: string lun: format: int32 @@ -21715,6 +21817,7 @@ spec: image: type: string keyring: + default: /etc/ceph/keyring type: string monitors: items: @@ -21722,6 +21825,7 @@ spec: type: array x-kubernetes-list-type: atomic pool: + default: rbd type: string readOnly: type: boolean @@ -21733,6 +21837,7 @@ spec: type: object x-kubernetes-map-type: atomic user: + default: admin type: string required: - image @@ -21741,6 +21846,7 @@ spec: scaleIO: properties: fsType: + default: xfs type: string gateway: type: string @@ -21758,6 +21864,7 @@ spec: sslEnabled: type: boolean storageMode: + default: ThinProvisioned type: string storagePool: type: string diff --git a/mkdocs.yml b/mkdocs.yml index 2f69fbfb1fa5..d7653d9f982e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -54,6 +54,7 @@ validation: exclude_docs: | /proposals/ /requirements.txt + /tested-kubernetes-versions.md nav: - Home: README.md diff --git a/persist/sqldb/explosive_offload_node_status_repo.go b/persist/sqldb/explosive_offload_node_status_repo.go index d9e1816af2cb..a8d4cabe3fd5 100644 --- a/persist/sqldb/explosive_offload_node_status_repo.go +++ b/persist/sqldb/explosive_offload_node_status_repo.go @@ -8,7 +8,7 @@ import ( var ( ExplosiveOffloadNodeStatusRepo OffloadNodeStatusRepo = &explosiveOffloadNodeStatusRepo{} - OffloadNotSupportedError = fmt.Errorf("offload node status is not supported") + ErrOffloadNotSupported = fmt.Errorf("offload node status is not supported") ) type explosiveOffloadNodeStatusRepo struct{} @@ -18,21 +18,21 @@ func (n *explosiveOffloadNodeStatusRepo) IsEnabled() bool { } func (n *explosiveOffloadNodeStatusRepo) Save(string, string, wfv1.Nodes) (string, error) { - return "", OffloadNotSupportedError + return "", ErrOffloadNotSupported } func (n *explosiveOffloadNodeStatusRepo) Get(string, string) (wfv1.Nodes, error) { - return nil, OffloadNotSupportedError + return nil, ErrOffloadNotSupported } func (n *explosiveOffloadNodeStatusRepo) List(string) (map[UUIDVersion]wfv1.Nodes, error) { - return nil, OffloadNotSupportedError + return nil, ErrOffloadNotSupported } func (n *explosiveOffloadNodeStatusRepo) Delete(string, string) error { - return OffloadNotSupportedError + return ErrOffloadNotSupported } func (n *explosiveOffloadNodeStatusRepo) ListOldOffloads(string) (map[string][]string, error) { - return nil, OffloadNotSupportedError + return nil, ErrOffloadNotSupported } diff --git a/persist/sqldb/mocks/WorkflowArchive.go b/persist/sqldb/mocks/WorkflowArchive.go index 4731333ce7e4..521b619875ae 100644 --- a/persist/sqldb/mocks/WorkflowArchive.go +++ b/persist/sqldb/mocks/WorkflowArchive.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -160,7 +160,7 @@ func (_m *WorkflowArchive) GetWorkflowForEstimator(namespace string, requirement return r0, r1 } -// IsEnabled provides a mock function with given fields: +// IsEnabled provides a mock function with no fields func (_m *WorkflowArchive) IsEnabled() bool { ret := _m.Called() @@ -208,7 +208,7 @@ func (_m *WorkflowArchive) ListWorkflows(options utils.ListOptions) (v1alpha1.Wo return r0, r1 } -// ListWorkflowsLabelKeys provides a mock function with given fields: +// ListWorkflowsLabelKeys provides a mock function with no fields func (_m *WorkflowArchive) ListWorkflowsLabelKeys() (*v1alpha1.LabelKeys, error) { ret := _m.Called() diff --git a/persist/sqldb/workflow_archive.go b/persist/sqldb/workflow_archive.go index 7371fb37bedd..01d6e0069924 100644 --- a/persist/sqldb/workflow_archive.go +++ b/persist/sqldb/workflow_archive.go @@ -98,7 +98,7 @@ func NewWorkflowArchive(session db.Session, clusterName, managedNamespace string func (r *workflowArchive) ArchiveWorkflow(wf *wfv1.Workflow) error { logCtx := log.WithFields(log.Fields{"uid": wf.UID, "labels": wf.GetLabels()}) logCtx.Debug("Archiving workflow") - wf.ObjectMeta.Labels[common.LabelKeyWorkflowArchivingStatus] = "Persisted" + wf.Labels[common.LabelKeyWorkflowArchivingStatus] = "Persisted" workflow, err := json.Marshal(wf) if err != nil { return err @@ -369,7 +369,7 @@ func (r *workflowArchive) GetWorkflow(uid string, namespace string, name string) return nil, err } // For backward compatibility, we should label workflow retrieved from DB as Persisted. - wf.ObjectMeta.Labels[common.LabelKeyWorkflowArchivingStatus] = "Persisted" + wf.Labels[common.LabelKeyWorkflowArchivingStatus] = "Persisted" return wf, nil } diff --git a/pkg/apiclient/argo-kube-client.go b/pkg/apiclient/argo-kube-client.go index 6678de64d3d2..465b653ca632 100644 --- a/pkg/apiclient/argo-kube-client.go +++ b/pkg/apiclient/argo-kube-client.go @@ -4,15 +4,12 @@ import ( "context" "fmt" - eventsource "github.com/argoproj/argo-events/pkg/client/eventsource/clientset/versioned" - sensor "github.com/argoproj/argo-events/pkg/client/sensor/clientset/versioned" + events "github.com/argoproj/argo-events/pkg/client/clientset/versioned" "k8s.io/client-go/dynamic" "k8s.io/client-go/kubernetes" restclient "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" - "github.com/argoproj/argo-workflows/v3/server/workflow/store" - "github.com/argoproj/argo-workflows/v3" "github.com/argoproj/argo-workflows/v3/persist/sqldb" "github.com/argoproj/argo-workflows/v3/pkg/apiclient/clusterworkflowtemplate" @@ -27,6 +24,7 @@ import ( cronworkflowserver "github.com/argoproj/argo-workflows/v3/server/cronworkflow" "github.com/argoproj/argo-workflows/v3/server/types" workflowserver "github.com/argoproj/argo-workflows/v3/server/workflow" + "github.com/argoproj/argo-workflows/v3/server/workflow/store" workflowtemplateserver "github.com/argoproj/argo-workflows/v3/server/workflowtemplate" "github.com/argoproj/argo-workflows/v3/util/help" "github.com/argoproj/argo-workflows/v3/util/instanceid" @@ -34,7 +32,7 @@ import ( var ( argoKubeOffloadNodeStatusRepo = sqldb.ExplosiveOffloadNodeStatusRepo - NoArgoServerErr = fmt.Errorf("this is impossible if you are not using the Argo Server, see %s", help.CLI()) + ErrNoArgoServer = fmt.Errorf("this is impossible if you are not using the Argo Server, see %s", help.CLI()) ) type argoKubeClient struct { @@ -59,11 +57,7 @@ func newArgoKubeClient(ctx context.Context, clientConfig clientcmd.ClientConfig, if err != nil { return nil, nil, err } - eventSourceInterface, err := eventsource.NewForConfig(restConfig) - if err != nil { - return nil, nil, err - } - sensorInterface, err := sensor.NewForConfig(restConfig) + eventInterface, err := events.NewForConfig(restConfig) if err != nil { return nil, nil, err } @@ -72,11 +66,10 @@ func newArgoKubeClient(ctx context.Context, clientConfig clientcmd.ClientConfig, return nil, nil, err } clients := &types.Clients{ - Dynamic: dynamicClient, - EventSource: eventSourceInterface, - Kubernetes: kubeClient, - Sensor: sensorInterface, - Workflow: wfClient, + Dynamic: dynamicClient, + Events: eventInterface, + Kubernetes: kubeClient, + Workflow: wfClient, } gatekeeper, err := auth.NewGatekeeper(auth.Modes{auth.Server: true}, clients, restConfig, nil, auth.DefaultClientForAuthorization, "unused", "unused", false, nil) if err != nil { @@ -104,11 +97,11 @@ func (a *argoKubeClient) NewWorkflowTemplateServiceClient() (workflowtemplate.Wo } func (a *argoKubeClient) NewArchivedWorkflowServiceClient() (workflowarchivepkg.ArchivedWorkflowServiceClient, error) { - return nil, NoArgoServerErr + return nil, ErrNoArgoServer } func (a *argoKubeClient) NewInfoServiceClient() (infopkg.InfoServiceClient, error) { - return nil, NoArgoServerErr + return nil, ErrNoArgoServer } func (a *argoKubeClient) NewClusterWorkflowTemplateServiceClient() (clusterworkflowtemplate.ClusterWorkflowTemplateServiceClient, error) { diff --git a/pkg/apiclient/argo-server-client.go b/pkg/apiclient/argo-server-client.go index 0a4419b2dc09..d68ed55574a0 100644 --- a/pkg/apiclient/argo-server-client.go +++ b/pkg/apiclient/argo-server-client.go @@ -66,7 +66,7 @@ func newClientConn(opts ArgoServerOpts) (*grpc.ClientConn, error) { if opts.Secure { creds = grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{InsecureSkipVerify: opts.InsecureSkipVerify})) } - conn, err := grpc.Dial(opts.URL, + conn, err := grpc.NewClient(opts.URL, grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(MaxClientGRPCMessageSize)), creds, grpc.WithUnaryInterceptor(grpcutil.GetVersionHeaderClientUnaryInterceptor), diff --git a/pkg/apiclient/eventsource/eventsource.pb.go b/pkg/apiclient/eventsource/eventsource.pb.go index 7768031c36bc..fd290bf507cb 100644 --- a/pkg/apiclient/eventsource/eventsource.pb.go +++ b/pkg/apiclient/eventsource/eventsource.pb.go @@ -6,7 +6,7 @@ package eventsource import ( context "context" fmt "fmt" - v1alpha1 "github.com/argoproj/argo-events/pkg/apis/eventsource/v1alpha1" + v1alpha1 "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1" proto "github.com/gogo/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" @@ -621,60 +621,61 @@ func init() { } var fileDescriptor_b13fbf03f636aa35 = []byte{ - // 848 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x96, 0xcf, 0x6e, 0xd3, 0x4e, - 0x10, 0xc7, 0xb5, 0xe9, 0x9f, 0xdf, 0x2f, 0x5b, 0x15, 0x9a, 0x15, 0x55, 0x23, 0xab, 0xaa, 0x5a, - 0xf3, 0x2f, 0x04, 0x62, 0x37, 0x2d, 0x48, 0x08, 0x21, 0x0e, 0xd0, 0xaa, 0xa2, 0x0a, 0x05, 0xb9, - 0x45, 0x08, 0x2e, 0xc8, 0x71, 0x16, 0xc7, 0x8d, 0xed, 0x35, 0xf6, 0x36, 0x55, 0x85, 0x7a, 0xe9, - 0x89, 0x03, 0x12, 0x42, 0x88, 0x57, 0x80, 0x03, 0x37, 0xe0, 0x04, 0x2f, 0xc0, 0x11, 0x89, 0x17, - 0x40, 0x15, 0x0f, 0xc0, 0x23, 0xa0, 0x5d, 0x3b, 0xf5, 0xda, 0x89, 0x49, 0x2a, 0xa5, 0xb7, 0xc9, - 0x66, 0x67, 0xe6, 0xf3, 0x9d, 0x59, 0xcf, 0x2e, 0xbc, 0xec, 0xb5, 0x4c, 0x55, 0xf7, 0x2c, 0xc3, - 0xb6, 0xb0, 0x4b, 0x55, 0xdc, 0xc6, 0x2e, 0x0d, 0xc8, 0x8e, 0x6f, 0x60, 0xd1, 0x56, 0x3c, 0x9f, - 0x50, 0x82, 0x26, 0x84, 0x25, 0x69, 0xd6, 0x24, 0xc4, 0xb4, 0x31, 0x73, 0x56, 0x75, 0xd7, 0x25, - 0x54, 0xa7, 0x16, 0x71, 0x83, 0x70, 0xab, 0x74, 0xb5, 0x75, 0x3d, 0x50, 0x2c, 0xc2, 0xfe, 0x75, - 0x74, 0xa3, 0x69, 0xb9, 0xd8, 0xdf, 0x53, 0xa3, 0x5c, 0x81, 0xea, 0x60, 0xaa, 0xab, 0xed, 0xaa, - 0x6a, 0x62, 0x17, 0xfb, 0x3a, 0xc5, 0x8d, 0xc8, 0x4b, 0x8e, 0xbd, 0x54, 0x83, 0xf8, 0xb8, 0xd7, - 0x9e, 0x7b, 0xa6, 0x45, 0x9b, 0x3b, 0x75, 0xc5, 0x20, 0x8e, 0xaa, 0xfb, 0x26, 0xf1, 0x7c, 0xb2, - 0xcd, 0x8d, 0x4a, 0x48, 0x17, 0x67, 0x11, 0xc5, 0xb4, 0xab, 0xba, 0xed, 0x35, 0xf5, 0xae, 0x70, - 0xf2, 0x7b, 0x00, 0x8b, 0x77, 0x7c, 0xac, 0x53, 0xbc, 0xca, 0xb6, 0x6f, 0xf2, 0xed, 0x1a, 0x7e, - 0xbe, 0x83, 0x03, 0x8a, 0x66, 0x61, 0xde, 0xd5, 0x1d, 0x1c, 0x78, 0xba, 0x81, 0x8b, 0x60, 0x1e, - 0x94, 0xf2, 0x5a, 0xbc, 0x80, 0x6c, 0x18, 0x16, 0x24, 0xf4, 0x29, 0xe6, 0xe6, 0x41, 0x69, 0x62, - 0x69, 0x5d, 0x89, 0xf9, 0x94, 0x0e, 0x1f, 0x37, 0x9e, 0x86, 0x3c, 0x8a, 0xd7, 0x32, 0x15, 0xc6, - 0xa7, 0x88, 0x05, 0xee, 0xf0, 0x29, 0x22, 0x85, 0x18, 0x5e, 0xbe, 0x0b, 0xa7, 0xd7, 0x30, 0xed, - 0x01, 0x89, 0xe0, 0x28, 0x63, 0x8a, 0xf8, 0xb8, 0x9d, 0x04, 0xcf, 0xa5, 0xc0, 0xe5, 0x57, 0x00, - 0xce, 0xd4, 0xac, 0x40, 0x0c, 0x16, 0x0c, 0x26, 0x79, 0x13, 0x4e, 0xd8, 0x56, 0x40, 0xef, 0x7b, - 0xbc, 0xd7, 0x91, 0xe4, 0xaa, 0x12, 0xb6, 0x4d, 0x11, 0x9b, 0x1d, 0xcb, 0x64, 0xcd, 0x56, 0xda, - 0x55, 0xa5, 0x16, 0x3b, 0x6a, 0x62, 0x14, 0xf9, 0x03, 0x80, 0xc5, 0x15, 0x6c, 0xe3, 0x9e, 0x2d, - 0x38, 0xb6, 0x3a, 0xf4, 0x18, 0x4e, 0x36, 0x78, 0xb4, 0x0e, 0xe5, 0x08, 0xa7, 0x5c, 0x1e, 0x8c, - 0x72, 0x45, 0x74, 0xd5, 0x92, 0x91, 0xe4, 0xaf, 0x00, 0x16, 0x1f, 0x7a, 0x0d, 0x7d, 0x48, 0xa4, - 0xa9, 0x03, 0x34, 0x72, 0xb2, 0x07, 0xe8, 0x0f, 0x80, 0x33, 0x62, 0xc7, 0x6b, 0xc4, 0x1c, 0xb0, - 0xeb, 0x1d, 0x65, 0x39, 0x41, 0x59, 0x09, 0x9e, 0x16, 0x82, 0x6f, 0xed, 0x79, 0x21, 0x7f, 0x5e, - 0x4b, 0x2f, 0xb3, 0xd8, 0x7c, 0x69, 0x83, 0x85, 0x18, 0x0d, 0x63, 0x1f, 0x2d, 0xb0, 0xd8, 0xa6, - 0x8f, 0xbd, 0xe2, 0x58, 0x18, 0x9b, 0xd9, 0x68, 0x0d, 0x4e, 0x7a, 0xa4, 0x51, 0x23, 0x66, 0xa7, - 0x83, 0xe3, 0xbc, 0x32, 0x0b, 0x42, 0x07, 0x15, 0x36, 0x1e, 0x58, 0xbf, 0x1e, 0x88, 0x1b, 0xb5, - 0xa4, 0x9f, 0x7c, 0x90, 0x83, 0xff, 0xd7, 0x88, 0xb9, 0xea, 0x52, 0x7f, 0xaf, 0x8f, 0xc6, 0xa4, - 0x9e, 0x8d, 0x58, 0x6e, 0x7a, 0x79, 0x68, 0xca, 0xcf, 0xc0, 0x31, 0x1b, 0xb7, 0xb1, 0x1d, 0x49, - 0x0f, 0x7f, 0xa0, 0x5b, 0x70, 0x94, 0x5a, 0x0e, 0x8e, 0x24, 0x97, 0x07, 0x3b, 0xb4, 0x5b, 0x96, - 0x83, 0x35, 0xee, 0x87, 0xa6, 0xe0, 0x88, 0x13, 0x98, 0xc5, 0xff, 0x78, 0x4c, 0x66, 0xca, 0xaf, - 0x01, 0x9c, 0x16, 0xfa, 0xfe, 0x48, 0xa7, 0x46, 0x93, 0xff, 0x66, 0xb5, 0xa7, 0x0c, 0x3f, 0x3a, - 0xb1, 0xcc, 0x46, 0x75, 0x38, 0x4e, 0xea, 0xdb, 0xd8, 0xa0, 0x27, 0x30, 0xcf, 0xa2, 0xc8, 0xf2, - 0x2c, 0x94, 0x84, 0xe5, 0xf0, 0x8b, 0x6b, 0x68, 0x38, 0xf0, 0x88, 0x1b, 0xe0, 0xa5, 0x8f, 0x79, - 0x88, 0x84, 0xbf, 0x37, 0xb1, 0xdf, 0xb6, 0x0c, 0x8c, 0xbe, 0x00, 0x58, 0xe8, 0x1a, 0xd4, 0xe8, - 0x7c, 0x22, 0x6b, 0xd6, 0x20, 0x97, 0x86, 0xa8, 0x42, 0xbe, 0x72, 0xf0, 0xf3, 0xf7, 0xdb, 0xdc, - 0x05, 0x79, 0x81, 0x5f, 0x53, 0xed, 0x6a, 0x78, 0xcf, 0x54, 0x42, 0x97, 0x40, 0x7d, 0x71, 0x74, - 0xa6, 0xf6, 0x6f, 0x80, 0x32, 0xfa, 0x04, 0xe0, 0xa9, 0xe4, 0xdc, 0x46, 0x72, 0x22, 0x47, 0xcf, - 0xa1, 0x3e, 0x54, 0xe0, 0x45, 0x0e, 0x5c, 0x46, 0xa5, 0xbe, 0xc0, 0xa1, 0xbd, 0x8f, 0xde, 0x01, - 0x58, 0xe8, 0x9a, 0xc8, 0xa9, 0x5a, 0x67, 0x4d, 0x6c, 0xe9, 0x62, 0x62, 0x5b, 0x76, 0xa3, 0x3b, - 0x5c, 0xe5, 0xc1, 0xb9, 0xbe, 0x01, 0x58, 0xe8, 0x9a, 0xbf, 0x29, 0xae, 0xac, 0xf9, 0x3c, 0xd4, - 0x92, 0x2e, 0x73, 0xf4, 0x8a, 0x34, 0x30, 0x3a, 0x3b, 0x0a, 0x9f, 0x01, 0x9c, 0x4a, 0x5f, 0xbb, - 0xe8, 0x5c, 0x22, 0x59, 0xc6, 0xad, 0x2c, 0x6d, 0x0c, 0x8f, 0x9d, 0xa5, 0x90, 0x2f, 0x71, 0xfe, - 0xb3, 0xa8, 0xff, 0x19, 0x46, 0x2f, 0x01, 0x9c, 0x4a, 0x5f, 0x1b, 0x29, 0xea, 0x8c, 0x5b, 0x45, - 0x9a, 0x4e, 0x6a, 0x8b, 0x06, 0xb1, 0x7c, 0x8d, 0x27, 0x57, 0x51, 0xa5, 0x93, 0x3c, 0xa0, 0x3e, - 0xd6, 0x9d, 0x7f, 0xd4, 0xd0, 0x26, 0x66, 0xb0, 0x08, 0xd0, 0x1b, 0x00, 0x0b, 0xf1, 0xf8, 0x3a, - 0x5e, 0x05, 0xe5, 0x2c, 0xe2, 0x38, 0x60, 0xf7, 0x87, 0xd2, 0x0f, 0x6c, 0x11, 0xdc, 0x5e, 0xff, - 0x7e, 0x38, 0x07, 0x7e, 0x1c, 0xce, 0x81, 0x5f, 0x87, 0x73, 0xe0, 0xc9, 0xcd, 0xcc, 0xc7, 0xe9, - 0x2e, 0xf1, 0x5b, 0xcf, 0x6c, 0xb2, 0x7b, 0xf4, 0x3e, 0xed, 0x7e, 0x71, 0xd7, 0xc7, 0xf9, 0x93, - 0x74, 0xf9, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x19, 0x2c, 0x8c, 0x91, 0x95, 0x0b, 0x00, 0x00, + // 853 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x96, 0xcf, 0x6b, 0xdc, 0x46, + 0x14, 0xc7, 0x99, 0xf5, 0x8f, 0x76, 0xc7, 0xb8, 0xf5, 0x0e, 0x35, 0x5e, 0x84, 0x31, 0xb6, 0xfa, + 0x6b, 0xbb, 0xed, 0x4a, 0x5e, 0xbb, 0x85, 0x52, 0x4a, 0x0f, 0xad, 0x8d, 0xeb, 0xb2, 0xb8, 0x45, + 0x76, 0x29, 0x2d, 0x94, 0x20, 0x6b, 0x5f, 0xb4, 0xf2, 0x4a, 0x1a, 0x45, 0x1a, 0xaf, 0x31, 0xc1, + 0x17, 0x9f, 0x42, 0xc8, 0x25, 0x84, 0x90, 0x73, 0x4e, 0x39, 0x25, 0x39, 0x25, 0x7f, 0x43, 0x8e, + 0x81, 0xfc, 0x03, 0xc1, 0xe4, 0x0f, 0xc8, 0x9f, 0x10, 0x66, 0xa4, 0xb5, 0x46, 0xda, 0x55, 0x76, + 0x0d, 0x9b, 0xdb, 0x9b, 0xd1, 0xbc, 0xef, 0x7c, 0xde, 0x7b, 0xa3, 0x37, 0x83, 0xbf, 0x0d, 0xba, + 0xb6, 0x6e, 0x06, 0x8e, 0xe5, 0x3a, 0xe0, 0x33, 0x1d, 0x7a, 0xe0, 0xb3, 0x88, 0x1e, 0x87, 0x16, + 0xc8, 0xb6, 0x16, 0x84, 0x94, 0x51, 0x32, 0x27, 0x4d, 0x29, 0xcb, 0x36, 0xa5, 0xb6, 0x0b, 0xdc, + 0x59, 0x37, 0x7d, 0x9f, 0x32, 0x93, 0x39, 0xd4, 0x8f, 0xe2, 0xa5, 0xca, 0xf7, 0xdd, 0x1f, 0x23, + 0xcd, 0xa1, 0xfc, 0xab, 0x67, 0x5a, 0x1d, 0xc7, 0x87, 0xf0, 0x54, 0x4f, 0xf6, 0x8a, 0x74, 0x0f, + 0x98, 0xa9, 0xf7, 0x9a, 0xba, 0x0d, 0x3e, 0x84, 0x26, 0x83, 0x76, 0xe2, 0xa5, 0xa6, 0x5e, 0xba, + 0x45, 0x43, 0x18, 0xb6, 0xe6, 0x77, 0xdb, 0x61, 0x9d, 0xe3, 0x43, 0xcd, 0xa2, 0x9e, 0x6e, 0x86, + 0x36, 0x0d, 0x42, 0x7a, 0x24, 0x8c, 0x46, 0x4c, 0x97, 0xee, 0x92, 0x8c, 0x7b, 0x4d, 0xd3, 0x0d, + 0x3a, 0xe6, 0x80, 0x92, 0xfa, 0x10, 0xe1, 0xea, 0x6f, 0x21, 0x98, 0x0c, 0xb6, 0xf9, 0xca, 0x7d, + 0x11, 0x97, 0x01, 0x37, 0x8e, 0x21, 0x62, 0x64, 0x19, 0x97, 0x7d, 0xd3, 0x83, 0x28, 0x30, 0x2d, + 0xa8, 0xa2, 0x55, 0x54, 0x2b, 0x1b, 0xe9, 0x04, 0xb1, 0x71, 0x9c, 0x8b, 0xd8, 0xa7, 0x5a, 0x5a, + 0x45, 0xb5, 0xb9, 0x8d, 0x6d, 0x2d, 0x45, 0xd3, 0xfa, 0x68, 0xc2, 0xb8, 0x16, 0xa3, 0x68, 0x41, + 0xd7, 0xd6, 0x38, 0x9a, 0x96, 0x8c, 0xfb, 0x68, 0x9a, 0x0c, 0x20, 0x2b, 0xab, 0xbb, 0x78, 0x71, + 0x07, 0xd8, 0x10, 0x3e, 0x82, 0xa7, 0x39, 0x4e, 0x82, 0x26, 0xec, 0x2c, 0x73, 0x29, 0xc7, 0xac, + 0xde, 0x41, 0x78, 0xa9, 0xe5, 0x44, 0xb2, 0x58, 0x34, 0x5e, 0xb4, 0xfb, 0x78, 0xce, 0x75, 0x22, + 0xf6, 0x67, 0x20, 0x2a, 0x9c, 0x44, 0xdb, 0xd4, 0xe2, 0x62, 0x69, 0x72, 0x89, 0xd3, 0x08, 0x79, + 0x89, 0xb5, 0x5e, 0x53, 0x6b, 0xa5, 0x8e, 0x86, 0xac, 0xa2, 0x3e, 0x42, 0xb8, 0xba, 0x05, 0x2e, + 0x0c, 0xcd, 0xfe, 0x95, 0xa3, 0x23, 0xff, 0xe2, 0xf9, 0xb6, 0x50, 0xeb, 0x53, 0x4e, 0x09, 0xca, + 0xcd, 0xf1, 0x28, 0xb7, 0x64, 0x57, 0x23, 0xab, 0xa4, 0x3e, 0x43, 0xb8, 0xfa, 0x77, 0xd0, 0x36, + 0x27, 0x44, 0x9a, 0x3b, 0x3b, 0x53, 0x1f, 0xec, 0xec, 0xbc, 0x45, 0x78, 0x49, 0x2e, 0x76, 0x8b, + 0xda, 0x63, 0x16, 0xbc, 0x1f, 0x54, 0x49, 0x0a, 0xaa, 0x86, 0x3f, 0x95, 0xc4, 0x0f, 0x4e, 0x83, + 0x18, 0xbd, 0x6c, 0xe4, 0xa7, 0xb9, 0xb6, 0x98, 0xda, 0xe3, 0x12, 0xd3, 0xb1, 0xf6, 0xe5, 0x04, + 0xd7, 0xb6, 0x43, 0x08, 0xaa, 0x33, 0xb1, 0x36, 0xb7, 0xc9, 0x0e, 0x9e, 0x0f, 0x68, 0xbb, 0x45, + 0xed, 0x7e, 0xf1, 0x66, 0x45, 0x52, 0xd6, 0xa4, 0xe2, 0x69, 0xbc, 0x1f, 0xf0, 0x52, 0xfd, 0x25, + 0x2f, 0x34, 0xb2, 0x7e, 0xea, 0x79, 0x09, 0x7f, 0xdc, 0xa2, 0xf6, 0xb6, 0xcf, 0xc2, 0xd3, 0x11, + 0x31, 0x66, 0xe3, 0xd9, 0x4b, 0xc3, 0xcd, 0x4f, 0x4f, 0x2c, 0xf2, 0xcf, 0xf0, 0x8c, 0x0b, 0x3d, + 0x70, 0x93, 0xd0, 0xe3, 0x01, 0xf9, 0x05, 0x4f, 0x33, 0xc7, 0x83, 0x24, 0xe4, 0xfa, 0x78, 0xe7, + 0xf5, 0xc0, 0xf1, 0xc0, 0x10, 0x7e, 0x64, 0x01, 0x4f, 0x79, 0x91, 0x5d, 0xfd, 0x48, 0x68, 0x72, + 0x53, 0xbd, 0x8d, 0xf0, 0xa2, 0x54, 0xf7, 0x7f, 0x4c, 0x66, 0x75, 0xc4, 0x98, 0xe7, 0x9e, 0x71, + 0xfc, 0xe4, 0xb0, 0x72, 0x9b, 0xfc, 0x8f, 0x67, 0xe9, 0xe1, 0x11, 0x58, 0x6c, 0xb2, 0x5d, 0x2c, + 0x11, 0x55, 0x97, 0xb1, 0x22, 0x4d, 0xc7, 0xff, 0x59, 0xdb, 0x80, 0x28, 0xa0, 0x7e, 0x04, 0x1b, + 0x0f, 0xca, 0x98, 0x48, 0x9f, 0xf7, 0x21, 0xec, 0x39, 0x16, 0x90, 0xa7, 0x08, 0x57, 0x06, 0x3a, + 0x33, 0xf9, 0x52, 0x93, 0xaf, 0xa4, 0xa2, 0xce, 0xad, 0x4c, 0x26, 0x00, 0xf5, 0xbb, 0xf3, 0x57, + 0x6f, 0xee, 0x95, 0xbe, 0x52, 0xd7, 0xc4, 0x6d, 0xd4, 0x6b, 0xc6, 0xd7, 0x49, 0x23, 0xde, 0x3d, + 0xd2, 0x6f, 0x5e, 0x9e, 0xa4, 0xb3, 0x9f, 0x50, 0x9d, 0x3c, 0x46, 0xf8, 0x93, 0x6c, 0xa3, 0x26, + 0x6a, 0x06, 0x77, 0x68, 0x17, 0x9f, 0x14, 0xeb, 0xba, 0x60, 0xad, 0x93, 0xda, 0x48, 0xd6, 0xd8, + 0x3e, 0x23, 0xf7, 0x11, 0xae, 0x0c, 0x74, 0xdf, 0x5c, 0x86, 0x8b, 0xba, 0xb3, 0xf2, 0x75, 0x66, + 0x59, 0x71, 0x79, 0xfb, 0x5c, 0xf5, 0xf1, 0xb9, 0x9e, 0x23, 0x5c, 0x19, 0xe8, 0xb5, 0x39, 0xae, + 0xa2, 0x5e, 0x3c, 0xa9, 0x6c, 0x6e, 0x0a, 0xea, 0x86, 0x32, 0x36, 0x35, 0x3f, 0x00, 0x4f, 0x10, + 0x5e, 0xc8, 0xdf, 0xae, 0xe4, 0x8b, 0x0c, 0x77, 0xc1, 0xe5, 0xab, 0xec, 0x4e, 0x04, 0x9b, 0xab, + 0xab, 0xdf, 0x08, 0xf4, 0xcf, 0xc9, 0xe8, 0x43, 0x4b, 0x6e, 0x21, 0xbc, 0x90, 0xbf, 0x1d, 0x72, + 0xc0, 0x05, 0x97, 0x87, 0xb2, 0x98, 0x0d, 0x2b, 0xe9, 0xb7, 0xea, 0x0f, 0x62, 0x73, 0x9d, 0x34, + 0xfa, 0x9b, 0x47, 0x2c, 0x04, 0xd3, 0x7b, 0x4f, 0xfa, 0x5c, 0x6a, 0x47, 0xeb, 0x88, 0xdc, 0x45, + 0xb8, 0x92, 0x76, 0xa9, 0xab, 0x25, 0x4f, 0x2d, 0x22, 0x4e, 0x05, 0x07, 0x7f, 0x8f, 0x51, 0x60, + 0xeb, 0xe8, 0xd7, 0x3f, 0x5e, 0x5c, 0xac, 0xa0, 0x97, 0x17, 0x2b, 0xe8, 0xf5, 0xc5, 0x0a, 0xfa, + 0xef, 0xe7, 0xc2, 0x47, 0xe7, 0x09, 0x0d, 0xbb, 0xd7, 0x5d, 0x7a, 0x72, 0xf9, 0xee, 0x1c, 0x7c, + 0x49, 0x1f, 0xce, 0x8a, 0xf7, 0xe6, 0xe6, 0xbb, 0x00, 0x00, 0x00, 0xff, 0xff, 0x4b, 0xb9, 0x7d, + 0x65, 0x6d, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/pkg/apiclient/eventsource/eventsource.proto b/pkg/apiclient/eventsource/eventsource.proto index 932c6944cce6..4b3df7fde7a7 100644 --- a/pkg/apiclient/eventsource/eventsource.proto +++ b/pkg/apiclient/eventsource/eventsource.proto @@ -4,13 +4,13 @@ option go_package = "github.com/argoproj/argo-workflows/pkg/apiclient/eventsourc import "google/api/annotations.proto"; import "k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto"; import "k8s.io/api/core/v1/generated.proto"; -import "github.com/argoproj/argo-events/pkg/apis/eventsource/v1alpha1/generated.proto"; +import "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1/generated.proto"; package eventsource; message CreateEventSourceRequest { string namespace = 1; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 2; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource eventSource = 2; } message GetEventSourceRequest { @@ -32,7 +32,7 @@ message DeleteEventSourceRequest { message UpdateEventSourceRequest { string name = 1; string namespace = 2; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource eventSource = 3; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource eventSource = 3; } message EventSourcesLogsRequest { @@ -63,7 +63,7 @@ message LogEntry { message EventSourceWatchEvent { string type = 1; - github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource object = 2; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource object = 2; } message EventSourceDeletedResponse { @@ -71,14 +71,14 @@ message EventSourceDeletedResponse { service EventSourceService { - rpc CreateEventSource(CreateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + rpc CreateEventSource(CreateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource) { option (google.api.http) = { post : "/api/v1/event-sources/{namespace}" body : "*" }; } - rpc GetEventSource(GetEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + rpc GetEventSource(GetEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource) { option (google.api.http).get = "/api/v1/event-sources/{namespace}/{name}"; } @@ -86,14 +86,14 @@ service EventSourceService { option (google.api.http).delete = "/api/v1/event-sources/{namespace}/{name}"; } - rpc UpdateEventSource(UpdateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSource) { + rpc UpdateEventSource(UpdateEventSourceRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSource) { option (google.api.http) = { put : "/api/v1/event-sources/{namespace}/{name}" body : "*" }; } - rpc ListEventSources(ListEventSourcesRequest) returns (github.com.argoproj.argo_events.pkg.apis.eventsource.v1alpha1.EventSourceList) { + rpc ListEventSources(ListEventSourcesRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.EventSourceList) { option (google.api.http).get = "/api/v1/event-sources/{namespace}"; } rpc EventSourcesLogs(EventSourcesLogsRequest) returns (stream LogEntry) { diff --git a/pkg/apiclient/offline-client.go b/pkg/apiclient/offline-client.go index c6048d1dffe8..9f320c19af08 100644 --- a/pkg/apiclient/offline-client.go +++ b/pkg/apiclient/offline-client.go @@ -35,7 +35,7 @@ type offlineClient struct { namespacedWorkflowTemplateGetterMap offlineWorkflowTemplateGetterMap } -var OfflineErr = fmt.Errorf("not supported when you are in offline mode") +var ErrOffline = fmt.Errorf("not supported when you are in offline mode") var _ Client = &offlineClient{} @@ -129,11 +129,11 @@ func (c *offlineClient) NewClusterWorkflowTemplateServiceClient() (clusterworkfl } func (c *offlineClient) NewArchivedWorkflowServiceClient() (workflowarchivepkg.ArchivedWorkflowServiceClient, error) { - return nil, NoArgoServerErr + return nil, ErrNoArgoServer } func (c *offlineClient) NewInfoServiceClient() (infopkg.InfoServiceClient, error) { - return nil, NoArgoServerErr + return nil, ErrNoArgoServer } type offlineWorkflowTemplateNamespacedGetter struct { diff --git a/pkg/apiclient/offline-cluster-workflow-template-service-client.go b/pkg/apiclient/offline-cluster-workflow-template-service-client.go index 51795f5ed856..b96e4afd492a 100644 --- a/pkg/apiclient/offline-cluster-workflow-template-service-client.go +++ b/pkg/apiclient/offline-cluster-workflow-template-service-client.go @@ -19,23 +19,23 @@ type OfflineClusterWorkflowTemplateServiceClient struct { var _ clusterworkflowtmplpkg.ClusterWorkflowTemplateServiceClient = &OfflineClusterWorkflowTemplateServiceClient{} func (o OfflineClusterWorkflowTemplateServiceClient) CreateClusterWorkflowTemplate(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateCreateRequest, opts ...grpc.CallOption) (*v1alpha1.ClusterWorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineClusterWorkflowTemplateServiceClient) GetClusterWorkflowTemplate(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateGetRequest, opts ...grpc.CallOption) (*v1alpha1.ClusterWorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineClusterWorkflowTemplateServiceClient) ListClusterWorkflowTemplates(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateListRequest, opts ...grpc.CallOption) (*v1alpha1.ClusterWorkflowTemplateList, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineClusterWorkflowTemplateServiceClient) UpdateClusterWorkflowTemplate(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateUpdateRequest, opts ...grpc.CallOption) (*v1alpha1.ClusterWorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineClusterWorkflowTemplateServiceClient) DeleteClusterWorkflowTemplate(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateDeleteRequest, opts ...grpc.CallOption) (*clusterworkflowtmplpkg.ClusterWorkflowTemplateDeleteResponse, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineClusterWorkflowTemplateServiceClient) LintClusterWorkflowTemplate(ctx context.Context, req *clusterworkflowtmplpkg.ClusterWorkflowTemplateLintRequest, opts ...grpc.CallOption) (*v1alpha1.ClusterWorkflowTemplate, error) { diff --git a/pkg/apiclient/offline-cron-workflow-service-client.go b/pkg/apiclient/offline-cron-workflow-service-client.go index 6dbd1a7bd407..a09add307725 100644 --- a/pkg/apiclient/offline-cron-workflow-service-client.go +++ b/pkg/apiclient/offline-cron-workflow-service-client.go @@ -27,29 +27,29 @@ func (o OfflineCronWorkflowServiceClient) LintCronWorkflow(ctx context.Context, } func (o OfflineCronWorkflowServiceClient) CreateCronWorkflow(ctx context.Context, req *cronworkflow.CreateCronWorkflowRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) ListCronWorkflows(ctx context.Context, req *cronworkflow.ListCronWorkflowsRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflowList, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) GetCronWorkflow(ctx context.Context, req *cronworkflow.GetCronWorkflowRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) UpdateCronWorkflow(ctx context.Context, req *cronworkflow.UpdateCronWorkflowRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) DeleteCronWorkflow(ctx context.Context, req *cronworkflow.DeleteCronWorkflowRequest, _ ...grpc.CallOption) (*cronworkflow.CronWorkflowDeletedResponse, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) ResumeCronWorkflow(ctx context.Context, req *cronworkflow.CronWorkflowResumeRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineCronWorkflowServiceClient) SuspendCronWorkflow(ctx context.Context, req *cronworkflow.CronWorkflowSuspendRequest, _ ...grpc.CallOption) (*v1alpha1.CronWorkflow, error) { - return nil, OfflineErr + return nil, ErrOffline } diff --git a/pkg/apiclient/offline-workflow-service-client.go b/pkg/apiclient/offline-workflow-service-client.go index 706db86518fb..fbc965b13dff 100644 --- a/pkg/apiclient/offline-workflow-service-client.go +++ b/pkg/apiclient/offline-workflow-service-client.go @@ -19,55 +19,55 @@ type OfflineWorkflowServiceClient struct { var _ workflowpkg.WorkflowServiceClient = &OfflineWorkflowServiceClient{} func (o OfflineWorkflowServiceClient) CreateWorkflow(context.Context, *workflowpkg.WorkflowCreateRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) GetWorkflow(context.Context, *workflowpkg.WorkflowGetRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) ListWorkflows(context.Context, *workflowpkg.WorkflowListRequest, ...grpc.CallOption) (*wfv1.WorkflowList, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) WatchWorkflows(context.Context, *workflowpkg.WatchWorkflowsRequest, ...grpc.CallOption) (workflowpkg.WorkflowService_WatchWorkflowsClient, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) WatchEvents(context.Context, *workflowpkg.WatchEventsRequest, ...grpc.CallOption) (workflowpkg.WorkflowService_WatchEventsClient, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) DeleteWorkflow(context.Context, *workflowpkg.WorkflowDeleteRequest, ...grpc.CallOption) (*workflowpkg.WorkflowDeleteResponse, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) RetryWorkflow(context.Context, *workflowpkg.WorkflowRetryRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) ResubmitWorkflow(context.Context, *workflowpkg.WorkflowResubmitRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) ResumeWorkflow(context.Context, *workflowpkg.WorkflowResumeRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) SuspendWorkflow(context.Context, *workflowpkg.WorkflowSuspendRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) TerminateWorkflow(context.Context, *workflowpkg.WorkflowTerminateRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) StopWorkflow(context.Context, *workflowpkg.WorkflowStopRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) SetWorkflow(context.Context, *workflowpkg.WorkflowSetRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) LintWorkflow(_ context.Context, req *workflowpkg.WorkflowLintRequest, _ ...grpc.CallOption) (*wfv1.Workflow, error) { @@ -79,13 +79,13 @@ func (o OfflineWorkflowServiceClient) LintWorkflow(_ context.Context, req *workf } func (o OfflineWorkflowServiceClient) PodLogs(context.Context, *workflowpkg.WorkflowLogRequest, ...grpc.CallOption) (workflowpkg.WorkflowService_PodLogsClient, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) WorkflowLogs(context.Context, *workflowpkg.WorkflowLogRequest, ...grpc.CallOption) (workflowpkg.WorkflowService_WorkflowLogsClient, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowServiceClient) SubmitWorkflow(context.Context, *workflowpkg.WorkflowSubmitRequest, ...grpc.CallOption) (*wfv1.Workflow, error) { - return nil, OfflineErr + return nil, ErrOffline } diff --git a/pkg/apiclient/offline-workflow-template-service-client.go b/pkg/apiclient/offline-workflow-template-service-client.go index 429c232d1094..7cc999a26c98 100644 --- a/pkg/apiclient/offline-workflow-template-service-client.go +++ b/pkg/apiclient/offline-workflow-template-service-client.go @@ -19,23 +19,23 @@ type OfflineWorkflowTemplateServiceClient struct { var _ workflowtemplatepkg.WorkflowTemplateServiceClient = &OfflineWorkflowTemplateServiceClient{} func (o OfflineWorkflowTemplateServiceClient) CreateWorkflowTemplate(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateCreateRequest, _ ...grpc.CallOption) (*v1alpha1.WorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowTemplateServiceClient) GetWorkflowTemplate(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateGetRequest, _ ...grpc.CallOption) (*v1alpha1.WorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowTemplateServiceClient) ListWorkflowTemplates(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateListRequest, _ ...grpc.CallOption) (*v1alpha1.WorkflowTemplateList, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowTemplateServiceClient) UpdateWorkflowTemplate(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateUpdateRequest, _ ...grpc.CallOption) (*v1alpha1.WorkflowTemplate, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowTemplateServiceClient) DeleteWorkflowTemplate(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateDeleteRequest, _ ...grpc.CallOption) (*workflowtemplatepkg.WorkflowTemplateDeleteResponse, error) { - return nil, OfflineErr + return nil, ErrOffline } func (o OfflineWorkflowTemplateServiceClient) LintWorkflowTemplate(ctx context.Context, req *workflowtemplatepkg.WorkflowTemplateLintRequest, _ ...grpc.CallOption) (*v1alpha1.WorkflowTemplate, error) { diff --git a/pkg/apiclient/sensor/sensor.pb.go b/pkg/apiclient/sensor/sensor.pb.go index bb7c6e25c440..82018af4098c 100644 --- a/pkg/apiclient/sensor/sensor.pb.go +++ b/pkg/apiclient/sensor/sensor.pb.go @@ -6,7 +6,7 @@ package sensor import ( context "context" fmt "fmt" - v1alpha1 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" + v1alpha1 "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1" proto "github.com/gogo/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" @@ -635,63 +635,63 @@ func init() { func init() { proto.RegisterFile("pkg/apiclient/sensor/sensor.proto", fileDescriptor_78ba963e1c6b5b55) } var fileDescriptor_78ba963e1c6b5b55 = []byte{ - // 887 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xdd, 0x6e, 0x1b, 0x45, - 0x14, 0xc7, 0x35, 0x4e, 0x9a, 0xd6, 0xc7, 0x0e, 0x8a, 0x26, 0x15, 0xb2, 0xb6, 0x21, 0x4a, 0x07, - 0xd4, 0x96, 0x48, 0xdd, 0x8d, 0x5b, 0x24, 0x10, 0x17, 0x08, 0x91, 0x44, 0xe1, 0xc2, 0x2a, 0xd5, - 0x06, 0x04, 0xe5, 0x06, 0x6d, 0xd6, 0x87, 0xc9, 0x36, 0xbb, 0x3b, 0xcb, 0xcc, 0xc4, 0x25, 0x42, - 0x48, 0x88, 0x1b, 0x1e, 0xa0, 0xc0, 0x15, 0x42, 0x82, 0x77, 0x41, 0xe2, 0x0a, 0x21, 0xf1, 0x02, - 0x28, 0xe2, 0x82, 0xc7, 0xa8, 0x66, 0x76, 0xbd, 0x1f, 0xb6, 0xdb, 0xba, 0x8d, 0xaf, 0x3c, 0x9e, - 0x99, 0x73, 0xce, 0xef, 0x7f, 0xce, 0xec, 0x9c, 0x81, 0xeb, 0xd9, 0x09, 0xf7, 0x82, 0x2c, 0x0a, - 0xe3, 0x08, 0x53, 0xed, 0x29, 0x4c, 0x95, 0x90, 0xc5, 0x8f, 0x9b, 0x49, 0xa1, 0x05, 0x5d, 0xc9, - 0xff, 0x39, 0x1b, 0x5c, 0x08, 0x1e, 0xa3, 0xd9, 0xed, 0x05, 0x69, 0x2a, 0x74, 0xa0, 0x23, 0x91, - 0xaa, 0x7c, 0x97, 0xf3, 0xd6, 0xc9, 0x3b, 0xca, 0x8d, 0x84, 0x59, 0x4d, 0x82, 0xf0, 0x38, 0x4a, - 0x51, 0x9e, 0x79, 0x85, 0x73, 0xe5, 0x25, 0xa8, 0x03, 0x6f, 0xd4, 0xf7, 0x38, 0xa6, 0x28, 0x03, - 0x8d, 0xc3, 0xc2, 0x8a, 0x55, 0x56, 0x5e, 0x28, 0x24, 0xce, 0xda, 0xf3, 0x21, 0x8f, 0xf4, 0xf1, - 0xe9, 0x91, 0x1b, 0x8a, 0xc4, 0x0b, 0x24, 0x17, 0x99, 0x14, 0x0f, 0xed, 0xe0, 0x36, 0x8e, 0x30, - 0xd5, 0xaa, 0x8a, 0x52, 0xd0, 0x8f, 0xfa, 0x41, 0x9c, 0x1d, 0x07, 0x53, 0x9e, 0xd8, 0x0f, 0x04, - 0xe8, 0x20, 0x52, 0xfa, 0xd0, 0xee, 0x53, 0x3e, 0x7e, 0x75, 0x8a, 0x4a, 0xd3, 0x0d, 0x68, 0xa7, - 0x41, 0x82, 0x2a, 0x0b, 0x42, 0xec, 0x91, 0x2d, 0x72, 0xab, 0xed, 0x57, 0x13, 0xf4, 0x10, 0x3a, - 0x71, 0xa4, 0xf4, 0x47, 0x99, 0x55, 0xdb, 0x6b, 0x6d, 0x91, 0x5b, 0x9d, 0x3b, 0x7d, 0x37, 0x07, - 0x77, 0xeb, 0x72, 0xdd, 0xec, 0x84, 0x9b, 0x09, 0xe5, 0x1a, 0xb9, 0xee, 0xa8, 0xef, 0x0e, 0x2a, - 0x43, 0xbf, 0xee, 0x85, 0xfd, 0x4f, 0x60, 0x7d, 0x57, 0x62, 0xa0, 0x31, 0x67, 0x99, 0x0f, 0xe5, - 0x33, 0x28, 0x6a, 0x51, 0x50, 0xbc, 0xef, 0x56, 0xa9, 0x71, 0xc7, 0xa9, 0xb1, 0x83, 0x2f, 0xf2, - 0xd4, 0x54, 0x44, 0x45, 0x45, 0xc7, 0xa9, 0x71, 0x8b, 0xb0, 0x85, 0x3f, 0xfa, 0x00, 0x56, 0x43, - 0x8b, 0x33, 0x96, 0xb9, 0x64, 0x03, 0xdc, 0x9d, 0x4f, 0xe6, 0x6e, 0xdd, 0xd4, 0x6f, 0x7a, 0x62, - 0x3f, 0x13, 0x58, 0x3b, 0x40, 0xdd, 0xd4, 0x49, 0x61, 0xd9, 0xc8, 0x2a, 0x24, 0xda, 0x71, 0x53, - 0x7b, 0x6b, 0x52, 0xfb, 0x7d, 0x00, 0x8e, 0xba, 0x89, 0xb7, 0x33, 0x1f, 0xde, 0x41, 0x69, 0xe7, - 0xd7, 0x7c, 0xb0, 0xdf, 0x08, 0xac, 0x7f, 0x92, 0x0d, 0x5f, 0xb0, 0x06, 0x63, 0xf2, 0x56, 0x8d, - 0xbc, 0xaa, 0xcb, 0xd2, 0x62, 0xeb, 0xc2, 0x7e, 0x27, 0xb0, 0xbe, 0x87, 0x31, 0x4e, 0x32, 0xbe, - 0x78, 0xfe, 0x1e, 0xc0, 0xea, 0xd0, 0x3a, 0x7a, 0xa9, 0x0a, 0xef, 0xd5, 0x4d, 0xfd, 0xa6, 0x27, - 0xf6, 0x2a, 0x5c, 0x6d, 0x32, 0xaa, 0x4c, 0xa4, 0x0a, 0xd9, 0x1f, 0x04, 0x68, 0xf1, 0xa9, 0x0d, - 0x04, 0x57, 0x2f, 0x9f, 0xdf, 0x2d, 0xe8, 0x68, 0x19, 0x71, 0x8e, 0xf2, 0x9e, 0x59, 0x5a, 0xb2, - 0x4b, 0xf5, 0x29, 0x63, 0xc5, 0x25, 0x66, 0xbd, 0xe5, 0xdc, 0xca, 0x8c, 0xe9, 0x01, 0xac, 0x66, - 0x62, 0x38, 0x10, 0x7c, 0xac, 0xf8, 0x92, 0x55, 0x7c, 0xbd, 0xa6, 0xd8, 0x35, 0x77, 0x8e, 0xd1, - 0x77, 0xbf, 0xbe, 0xd1, 0x6f, 0xda, 0xb1, 0x5f, 0x5b, 0x70, 0x65, 0x20, 0xf8, 0x7e, 0xaa, 0xe5, - 0xd9, 0x73, 0xe8, 0x37, 0x01, 0xf2, 0xca, 0xdd, 0xab, 0x34, 0xd4, 0x66, 0xe6, 0x50, 0x72, 0x15, - 0x2e, 0xc5, 0x38, 0xc2, 0xd8, 0xd2, 0xb6, 0xfd, 0xfc, 0x0f, 0x7d, 0x0f, 0x96, 0x75, 0x94, 0x60, - 0x6f, 0xc5, 0x4a, 0xd8, 0x9e, 0xaf, 0x68, 0x1f, 0x47, 0x09, 0xfa, 0xd6, 0x8e, 0xae, 0xc1, 0x52, - 0xa2, 0x78, 0xef, 0xb2, 0xf5, 0x69, 0x86, 0xf4, 0x06, 0xbc, 0x32, 0xc4, 0x0c, 0xd3, 0x21, 0xa6, - 0xe1, 0x99, 0x85, 0xb9, 0x62, 0x17, 0x27, 0x66, 0x29, 0x83, 0xae, 0x3d, 0xb3, 0xbb, 0x22, 0xd5, - 0xf8, 0xb5, 0xee, 0xb5, 0xed, 0xae, 0xc6, 0x1c, 0xfb, 0x8e, 0xc0, 0x5a, 0x5e, 0xe8, 0x4f, 0x03, - 0x1d, 0x1e, 0xef, 0x9b, 0x35, 0x53, 0x12, 0x7d, 0x96, 0x95, 0x47, 0xd4, 0x8c, 0xcd, 0x87, 0x22, - 0x8e, 0x1e, 0x62, 0xa8, 0x17, 0x77, 0x81, 0xe5, 0xfe, 0xee, 0xfc, 0x75, 0x19, 0x56, 0xf3, 0xa9, - 0x43, 0x94, 0xa3, 0x28, 0x44, 0xfa, 0x23, 0x81, 0x4e, 0xed, 0xb2, 0xa7, 0xce, 0xd8, 0xc5, 0x74, - 0x07, 0x70, 0xf6, 0x2e, 0xca, 0x61, 0x7c, 0xb2, 0xd7, 0xbf, 0xff, 0xe7, 0xbf, 0xc7, 0xad, 0xd7, - 0xe8, 0x35, 0xdb, 0xce, 0x46, 0xfd, 0xa2, 0x1f, 0x29, 0xef, 0x9b, 0xf2, 0x80, 0x7c, 0x4b, 0x53, - 0xe8, 0xd4, 0xbe, 0x89, 0x8a, 0x6a, 0xfa, 0x43, 0x71, 0xd6, 0x4a, 0xe2, 0xe2, 0xf0, 0x31, 0xcf, - 0x46, 0x78, 0x93, 0xde, 0x2c, 0x23, 0x68, 0x89, 0x41, 0x32, 0x2b, 0x90, 0x17, 0x0b, 0xae, 0x76, - 0x08, 0x95, 0xd0, 0xb5, 0x45, 0x99, 0x27, 0x0d, 0xbd, 0x26, 0x4c, 0x55, 0x4c, 0xb6, 0x6d, 0x03, - 0xbf, 0x41, 0xd9, 0xf3, 0x03, 0xef, 0x10, 0xfa, 0x13, 0x81, 0x6e, 0xbd, 0xbb, 0xd1, 0x6b, 0x63, - 0xc7, 0x33, 0x7a, 0x9e, 0x73, 0xe1, 0x43, 0xc0, 0x6e, 0x58, 0xba, 0x2d, 0xf6, 0xac, 0xc4, 0xbf, - 0x4b, 0xb6, 0xe9, 0x2f, 0x04, 0xba, 0xf5, 0x1b, 0xbf, 0xe2, 0x9a, 0xd1, 0x07, 0x16, 0xc0, 0x75, - 0xdb, 0x72, 0xdd, 0x74, 0xd8, 0x33, 0xb8, 0xf2, 0xb1, 0xc5, 0x3b, 0x85, 0x6e, 0xfd, 0x1e, 0xad, - 0xe8, 0x66, 0x74, 0x00, 0x67, 0x63, 0xf6, 0x62, 0x71, 0xf5, 0x16, 0xf5, 0xda, 0x9e, 0x23, 0x32, - 0x7d, 0x4c, 0xa0, 0x5d, 0x36, 0x68, 0x5a, 0x9e, 0x81, 0xc9, 0x9e, 0xbd, 0x80, 0x7c, 0x4c, 0x9f, - 0xa2, 0xa7, 0x52, 0x7d, 0xb0, 0xff, 0xe7, 0xf9, 0x26, 0xf9, 0xfb, 0x7c, 0x93, 0xfc, 0x7b, 0xbe, - 0x49, 0x3e, 0x7f, 0xfb, 0xa9, 0x6f, 0xc0, 0x47, 0x42, 0x9e, 0x7c, 0x19, 0x8b, 0x47, 0xe5, 0x33, - 0xb0, 0xf1, 0x92, 0x3d, 0x5a, 0xb1, 0x2f, 0xbf, 0xbb, 0x4f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x54, - 0xc1, 0x1a, 0x86, 0xe8, 0x0a, 0x00, 0x00, + // 886 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5f, 0x6f, 0x1c, 0x35, + 0x10, 0x97, 0x2f, 0x69, 0xda, 0x9b, 0xbb, 0xa0, 0xc8, 0xa9, 0xd0, 0xe9, 0x1a, 0xa2, 0xd4, 0xa0, + 0xb6, 0x44, 0xea, 0x6e, 0xae, 0x45, 0x02, 0xf1, 0x80, 0x10, 0x49, 0x14, 0x1e, 0x4e, 0xa5, 0xda, + 0x80, 0xa0, 0xbc, 0xa0, 0xcd, 0xde, 0xe0, 0x6c, 0xb3, 0xbb, 0x5e, 0x6c, 0xe7, 0x4a, 0x84, 0x90, + 0x10, 0x2f, 0x7c, 0x80, 0x02, 0x4f, 0x08, 0x09, 0xbe, 0x0b, 0x12, 0x4f, 0x08, 0x89, 0x2f, 0x80, + 0x22, 0x1e, 0xf8, 0x18, 0x95, 0xbd, 0xbe, 0xfd, 0x93, 0x5c, 0xdb, 0x6b, 0x73, 0x4f, 0x3b, 0x3b, + 0xf6, 0xcc, 0xfc, 0x7e, 0x33, 0x63, 0x8f, 0xe1, 0x7a, 0x7e, 0xc4, 0xfd, 0x30, 0x8f, 0xa3, 0x24, + 0xc6, 0x4c, 0xfb, 0x0a, 0x33, 0x25, 0xa4, 0xfb, 0x78, 0xb9, 0x14, 0x5a, 0xd0, 0xa5, 0xe2, 0xaf, + 0xbf, 0xc6, 0x85, 0xe0, 0x09, 0x9a, 0xdd, 0x7e, 0x98, 0x65, 0x42, 0x87, 0x3a, 0x16, 0x99, 0x2a, + 0x76, 0xf5, 0xdf, 0x3a, 0x7a, 0x47, 0x79, 0xb1, 0x30, 0xab, 0x69, 0x18, 0x1d, 0xc6, 0x19, 0xca, + 0x13, 0xdf, 0x39, 0x57, 0x7e, 0x8a, 0x3a, 0xf4, 0xc7, 0x03, 0x9f, 0x63, 0x86, 0x32, 0xd4, 0x38, + 0x72, 0x56, 0xac, 0xb2, 0xf2, 0x23, 0x21, 0x71, 0xda, 0x9e, 0x0f, 0x79, 0xac, 0x0f, 0x8f, 0x0f, + 0xbc, 0x48, 0xa4, 0x7e, 0x28, 0xb9, 0xc8, 0xa5, 0x78, 0x68, 0x85, 0xdb, 0x38, 0xc6, 0x4c, 0xab, + 0x2a, 0x8a, 0xfb, 0x1f, 0x0f, 0xc2, 0x24, 0x3f, 0x0c, 0xcf, 0x79, 0x62, 0x3f, 0x10, 0xa0, 0xc3, + 0x58, 0xe9, 0x7d, 0x4b, 0x48, 0x05, 0xf8, 0xd5, 0x31, 0x2a, 0x4d, 0xd7, 0xa0, 0x9d, 0x85, 0x29, + 0xaa, 0x3c, 0x8c, 0xb0, 0x47, 0x36, 0xc8, 0xad, 0x76, 0x50, 0x29, 0xe8, 0x3e, 0x74, 0x92, 0x58, + 0xe9, 0x8f, 0x72, 0xcb, 0xb6, 0xd7, 0xda, 0x20, 0xb7, 0x3a, 0x77, 0x06, 0x5e, 0x01, 0xdc, 0xab, + 0xd3, 0xf5, 0xf2, 0x23, 0x6e, 0x14, 0xca, 0x33, 0x74, 0xbd, 0xf1, 0xc0, 0x1b, 0x56, 0x86, 0x41, + 0xdd, 0x0b, 0xfb, 0x9f, 0xc0, 0xea, 0xb6, 0xc4, 0x50, 0x63, 0x81, 0x65, 0x36, 0x28, 0x9f, 0x81, + 0xab, 0x85, 0x43, 0xf1, 0xbe, 0x57, 0xa5, 0xc6, 0x9b, 0xa4, 0xc6, 0x0a, 0x5f, 0x14, 0xa9, 0xa8, + 0x10, 0xb9, 0xff, 0x49, 0x6a, 0x3c, 0x17, 0xd6, 0xf9, 0xa3, 0x0f, 0x60, 0x39, 0xb2, 0x70, 0x26, + 0x34, 0x17, 0x6c, 0x80, 0xbb, 0xb3, 0xd1, 0xdc, 0xae, 0x9b, 0x06, 0x4d, 0x4f, 0xec, 0x67, 0x02, + 0x2b, 0x7b, 0xa8, 0x9b, 0x3c, 0x29, 0x2c, 0x1a, 0x5a, 0x8e, 0xa2, 0x95, 0x9b, 0xdc, 0x5b, 0x67, + 0xb9, 0xdf, 0x07, 0xe0, 0xa8, 0x9b, 0xf0, 0xb6, 0x66, 0x83, 0xb7, 0x57, 0xda, 0x05, 0x35, 0x1f, + 0xec, 0x37, 0x02, 0xab, 0x9f, 0xe4, 0xa3, 0x17, 0xac, 0xc1, 0x04, 0x79, 0xab, 0x86, 0xbc, 0xaa, + 0xcb, 0xc2, 0x7c, 0xeb, 0xc2, 0x7e, 0x27, 0xb0, 0xba, 0x83, 0x09, 0x9e, 0xc5, 0xf8, 0xe2, 0xf9, + 0x7b, 0x00, 0xcb, 0x23, 0xeb, 0xe8, 0xa5, 0x2a, 0xbc, 0x53, 0x37, 0x0d, 0x9a, 0x9e, 0xd8, 0xab, + 0x70, 0xb5, 0x89, 0x51, 0xe5, 0x22, 0x53, 0xc8, 0xfe, 0x20, 0x40, 0xdd, 0x51, 0x1b, 0x0a, 0xae, + 0x5e, 0x3e, 0xbf, 0x1b, 0xd0, 0xd1, 0x32, 0xe6, 0x1c, 0xe5, 0x3d, 0xb3, 0xb4, 0x60, 0x97, 0xea, + 0x2a, 0x63, 0xc5, 0x25, 0xe6, 0xbd, 0xc5, 0xc2, 0xca, 0xc8, 0x74, 0x0f, 0x96, 0x73, 0x31, 0x1a, + 0x0a, 0x3e, 0x61, 0x7c, 0xc9, 0x32, 0xbe, 0x5e, 0x63, 0xec, 0x99, 0x3b, 0xc7, 0xf0, 0xbb, 0x5f, + 0xdf, 0x18, 0x34, 0xed, 0xd8, 0xaf, 0x2d, 0xb8, 0x32, 0x14, 0x7c, 0x37, 0xd3, 0xf2, 0xe4, 0x39, + 0xe8, 0xd7, 0x01, 0x8a, 0xca, 0xdd, 0xab, 0x38, 0xd4, 0x34, 0x33, 0x30, 0xb9, 0x0a, 0x97, 0x12, + 0x1c, 0x63, 0x62, 0xd1, 0xb6, 0x83, 0xe2, 0x87, 0xbe, 0x07, 0x8b, 0x3a, 0x4e, 0xb1, 0xb7, 0x64, + 0x29, 0x6c, 0xce, 0x56, 0xb4, 0x8f, 0xe3, 0x14, 0x03, 0x6b, 0x47, 0x57, 0x60, 0x21, 0x55, 0xbc, + 0x77, 0xd9, 0xfa, 0x34, 0x22, 0xbd, 0x01, 0xaf, 0x8c, 0x30, 0xc7, 0x6c, 0x84, 0x59, 0x74, 0x62, + 0xc1, 0x5c, 0xb1, 0x8b, 0x67, 0xb4, 0x94, 0x41, 0xd7, 0xf6, 0xe8, 0xb6, 0xc8, 0x34, 0x7e, 0xad, + 0x7b, 0x6d, 0xbb, 0xab, 0xa1, 0x63, 0xdf, 0x11, 0x58, 0x29, 0x0a, 0xfd, 0x69, 0xa8, 0xa3, 0xc3, + 0x5d, 0xb3, 0x66, 0x4a, 0xa2, 0x4f, 0xf2, 0xb2, 0x45, 0x8d, 0x6c, 0x0e, 0x8a, 0x38, 0x78, 0x88, + 0x91, 0x9e, 0xdf, 0x05, 0x56, 0xf8, 0xbb, 0xf3, 0xd7, 0x65, 0x58, 0x2e, 0x54, 0xfb, 0x28, 0xc7, + 0x71, 0x84, 0xf4, 0x47, 0x02, 0x9d, 0xda, 0x65, 0x4f, 0xfb, 0x9e, 0x9b, 0x6a, 0xe7, 0x27, 0x40, + 0x7f, 0xe7, 0xa2, 0x38, 0x8c, 0x4f, 0xf6, 0xfa, 0xf7, 0xff, 0xfc, 0xf7, 0xb8, 0xf5, 0x1a, 0xbd, + 0x66, 0xc7, 0xd9, 0x78, 0xe0, 0xc6, 0xa8, 0xf2, 0xbf, 0x29, 0x1b, 0xe4, 0x5b, 0x9a, 0x41, 0xa7, + 0x76, 0x26, 0x2a, 0x54, 0xe7, 0x0f, 0x4a, 0x7f, 0xa5, 0x44, 0xec, 0x9a, 0x8f, 0xf9, 0x36, 0xc2, + 0x9b, 0xf4, 0x66, 0x19, 0x41, 0x4b, 0x0c, 0xd3, 0x69, 0x81, 0xfc, 0x44, 0x70, 0xb5, 0x45, 0xa8, + 0x84, 0xae, 0x2d, 0xca, 0x2c, 0x69, 0xe8, 0x35, 0xc1, 0x54, 0xc5, 0x64, 0x9b, 0x36, 0xf0, 0x1b, + 0x94, 0x3d, 0x3f, 0xf0, 0x16, 0xa1, 0x3f, 0x11, 0xe8, 0xd6, 0xa7, 0x1b, 0xbd, 0x36, 0x71, 0x3c, + 0x65, 0xe6, 0xf5, 0x2f, 0xdc, 0x04, 0xec, 0x86, 0x45, 0xb7, 0xc1, 0x9e, 0x95, 0xf8, 0x77, 0xc9, + 0x26, 0xfd, 0x85, 0x40, 0xb7, 0x7e, 0xe3, 0x57, 0xb8, 0xa6, 0xcc, 0x81, 0x39, 0xe0, 0xba, 0x6d, + 0x71, 0xdd, 0xec, 0xb3, 0x67, 0xe0, 0x2a, 0x64, 0x0b, 0xef, 0x18, 0xba, 0xf5, 0x7b, 0xb4, 0x42, + 0x37, 0x65, 0x02, 0xf4, 0xd7, 0xa6, 0x2f, 0xba, 0xab, 0xd7, 0xd5, 0x6b, 0x73, 0x86, 0xc8, 0xf4, + 0x31, 0x81, 0x76, 0x39, 0xa0, 0x69, 0xd9, 0x03, 0x67, 0x67, 0xf6, 0x1c, 0xf2, 0x71, 0xbe, 0x8b, + 0x9e, 0x8a, 0xea, 0x83, 0xdd, 0x3f, 0x4f, 0xd7, 0xc9, 0xdf, 0xa7, 0xeb, 0xe4, 0xdf, 0xd3, 0x75, + 0xf2, 0xf9, 0xdb, 0x4f, 0x7d, 0x03, 0x3e, 0x12, 0xf2, 0xe8, 0xcb, 0x44, 0x3c, 0x2a, 0x9f, 0x81, + 0x8d, 0x97, 0xec, 0xc1, 0x92, 0x7d, 0xf9, 0xdd, 0x7d, 0x12, 0x00, 0x00, 0xff, 0xff, 0xb5, 0x3b, + 0x3b, 0x34, 0xe8, 0x0a, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/pkg/apiclient/sensor/sensor.proto b/pkg/apiclient/sensor/sensor.proto index 07b9a595ad93..f081d351f623 100644 --- a/pkg/apiclient/sensor/sensor.proto +++ b/pkg/apiclient/sensor/sensor.proto @@ -4,7 +4,7 @@ option go_package = "github.com/argoproj/argo-workflows/pkg/apiclient/sensor"; import "google/api/annotations.proto"; import "k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto"; import "k8s.io/api/core/v1/generated.proto"; -import "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1/generated.proto"; +import "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1/generated.proto"; package sensor; @@ -15,7 +15,7 @@ message ListSensorsRequest { message CreateSensorRequest { string namespace = 1; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 2; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor sensor = 2; k8s.io.apimachinery.pkg.apis.meta.v1.CreateOptions createOptions = 3; } @@ -28,7 +28,7 @@ message GetSensorRequest { message UpdateSensorRequest { string namespace = 1; string name = 2; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor sensor = 3; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor sensor = 3; } message DeleteSensorRequest { @@ -68,11 +68,11 @@ message LogEntry { message SensorWatchEvent { string type = 1; - github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor object = 2; + github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor object = 2; } service SensorService { - rpc ListSensors(ListSensorsRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.SensorList) { + rpc ListSensors(ListSensorsRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.SensorList) { option (google.api.http).get = "/api/v1/sensors/{namespace}"; } rpc SensorsLogs(SensorsLogsRequest) returns (stream LogEntry) { @@ -81,13 +81,13 @@ service SensorService { rpc WatchSensors(ListSensorsRequest) returns (stream SensorWatchEvent) { option (google.api.http).get = "/api/v1/stream/sensors/{namespace}"; } - rpc CreateSensor(CreateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + rpc CreateSensor(CreateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor) { option (google.api.http) = { post : "/api/v1/sensors/{namespace}" body : "*" }; } - rpc UpdateSensor(UpdateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + rpc UpdateSensor(UpdateSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor) { option (google.api.http) = { put : "/api/v1/sensors/{namespace}/{name}" body : "*" @@ -96,7 +96,7 @@ service SensorService { rpc DeleteSensor(DeleteSensorRequest) returns (DeleteSensorResponse) { option (google.api.http).delete = "/api/v1/sensors/{namespace}/{name}"; } - rpc GetSensor(GetSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.sensor.v1alpha1.Sensor) { + rpc GetSensor(GetSensorRequest) returns (github.com.argoproj.argo_events.pkg.apis.events.v1alpha1.Sensor) { option (google.api.http).get = "/api/v1/sensors/{namespace}/{name}"; } } diff --git a/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go b/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go index 5e57781e0966..1ce9f44b0e90 100644 --- a/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go +++ b/pkg/apiclient/workflow/mocks/WorkflowServiceClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go b/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go index c37e9c7e8afa..f77a6a60a788 100644 --- a/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go +++ b/pkg/apiclient/workflowtemplate/mocks/WorkflowTemplateServiceClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/pkg/apis/workflow/v1alpha1/anystring_test.go b/pkg/apis/workflow/v1alpha1/anystring_test.go index b2ef224fbef5..231ebeaea798 100644 --- a/pkg/apis/workflow/v1alpha1/anystring_test.go +++ b/pkg/apis/workflow/v1alpha1/anystring_test.go @@ -20,7 +20,7 @@ func TestAnyString(t *testing.T) { require.NoError(t, err) assert.Equal(t, AnyStringPtr(""), i) - assert.Equal(t, "", i.String(), "string value does not have quotes") + assert.Empty(t, i.String(), "string value does not have quotes") }) t.Run("String", func(t *testing.T) { x := AnyStringPtr("my-string") diff --git a/pkg/apis/workflow/v1alpha1/cluster_workflow_template_types.go b/pkg/apis/workflow/v1alpha1/cluster_workflow_template_types.go index a9c27f620e8d..a22d03414e39 100644 --- a/pkg/apis/workflow/v1alpha1/cluster_workflow_template_types.go +++ b/pkg/apis/workflow/v1alpha1/cluster_workflow_template_types.go @@ -25,7 +25,7 @@ func (w ClusterWorkflowTemplates) Len() int { } func (w ClusterWorkflowTemplates) Less(i, j int) bool { - return strings.Compare(w[j].ObjectMeta.Name, w[i].ObjectMeta.Name) > 0 + return strings.Compare(w[j].Name, w[i].Name) > 0 } func (w ClusterWorkflowTemplates) Swap(i, j int) { diff --git a/pkg/apis/workflow/v1alpha1/item_test.go b/pkg/apis/workflow/v1alpha1/item_test.go index b468426fc7b4..1e22dd4eeacc 100644 --- a/pkg/apis/workflow/v1alpha1/item_test.go +++ b/pkg/apis/workflow/v1alpha1/item_test.go @@ -34,7 +34,7 @@ func runItemTest(t *testing.T, data string, expectedType Type) { assert.Equal(t, expectedType, itm.GetType()) jsonBytes, err := json.Marshal(itm) require.NoError(t, err) - assert.Equal(t, data, string(jsonBytes), "marshalling is symmetric") + assert.JSONEq(t, data, string(jsonBytes), "marshalling is symmetric") if strings.HasPrefix(data, `"`) { assert.Equal(t, data, fmt.Sprintf("\"%v\"", itm)) assert.Equal(t, data, fmt.Sprintf("\"%s\"", itm)) diff --git a/pkg/apis/workflow/v1alpha1/marshall.go b/pkg/apis/workflow/v1alpha1/marshall.go index 2d35e5ba953a..2c2dd2b5aee1 100644 --- a/pkg/apis/workflow/v1alpha1/marshall.go +++ b/pkg/apis/workflow/v1alpha1/marshall.go @@ -20,18 +20,19 @@ func MustUnmarshal(text, v interface{}) { if len(x) == 0 { panic("no text to unmarshal") } - if x[0] == '@' { + switch x[0] { + case '@': filename := string(x[1:]) y, err := os.ReadFile(filepath.Clean(filename)) if err != nil { panic(fmt.Errorf("failed to read file %s: %w", filename, err)) } MustUnmarshal(y, v) - } else if x[0] == '{' { + case '{': if err := json.Unmarshal(x, v); err != nil { panic(fmt.Errorf("failed to unmarshal JSON %q: %w", string(x), err)) } - } else { + default: if err := yaml.UnmarshalStrict(x, v); err != nil { panic(fmt.Errorf("failed to unmarshal YAML %q: %w", string(x), err)) } diff --git a/pkg/apis/workflow/v1alpha1/plugin_types.go b/pkg/apis/workflow/v1alpha1/plugin_types.go index a505a2aa261c..10442badfe26 100644 --- a/pkg/apis/workflow/v1alpha1/plugin_types.go +++ b/pkg/apis/workflow/v1alpha1/plugin_types.go @@ -18,7 +18,7 @@ func (p *Plugin) UnmarshalJSON(value []byte) error { // by validating the structure in UnmarshallJSON, we prevent bad data entering the system at the point of // parsing, which means we do not need validate m := map[string]interface{}{} - if err := json.Unmarshal(p.Object.Value, &m); err != nil { + if err := json.Unmarshal(p.Value, &m); err != nil { return err } numKeys := len(m) diff --git a/pkg/apis/workflow/v1alpha1/workflow_template_types.go b/pkg/apis/workflow/v1alpha1/workflow_template_types.go index 1317fc18b2a5..737ac75a4e5b 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_template_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_template_types.go @@ -24,7 +24,7 @@ func (w WorkflowTemplates) Len() int { } func (w WorkflowTemplates) Less(i, j int) bool { - return strings.Compare(w[j].ObjectMeta.Name, w[i].ObjectMeta.Name) > 0 + return strings.Compare(w[j].Name, w[i].Name) > 0 } func (w WorkflowTemplates) Swap(i, j int) { diff --git a/pkg/apis/workflow/v1alpha1/workflow_types.go b/pkg/apis/workflow/v1alpha1/workflow_types.go index 7b3419455945..049f155e494c 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types.go @@ -162,9 +162,9 @@ type Workflows []Workflow func (w Workflows) Len() int { return len(w) } func (w Workflows) Swap(i, j int) { w[i], w[j] = w[j], w[i] } func (w Workflows) Less(i, j int) bool { - iStart := w[i].ObjectMeta.CreationTimestamp + iStart := w[i].CreationTimestamp iFinish := w[i].Status.FinishedAt - jStart := w[j].ObjectMeta.CreationTimestamp + jStart := w[j].CreationTimestamp jFinish := w[j].Status.FinishedAt if iFinish.IsZero() && jFinish.IsZero() { return !iStart.Before(&jStart) @@ -230,7 +230,7 @@ func (w *Workflow) GetArtifactGCStrategy(a *Artifact) ArtifactGCStrategy { var ( WorkflowCreatedAfter = func(t time.Time) WorkflowPredicate { return func(wf Workflow) bool { - return wf.ObjectMeta.CreationTimestamp.After(t) + return wf.CreationTimestamp.After(t) } } WorkflowFinishedBefore = func(t time.Time) WorkflowPredicate { @@ -240,7 +240,7 @@ var ( } WorkflowRanBetween = func(startTime time.Time, endTime time.Time) WorkflowPredicate { return func(wf Workflow) bool { - return wf.ObjectMeta.CreationTimestamp.After(startTime) && !wf.Status.FinishedAt.IsZero() && wf.Status.FinishedAt.Time.Before(endTime) + return wf.CreationTimestamp.After(startTime) && !wf.Status.FinishedAt.IsZero() && wf.Status.FinishedAt.Time.Before(endTime) } } ) @@ -501,7 +501,7 @@ func (wf *Workflow) GetSemaphoreKeys() []string { if wf.Spec.WorkflowTemplateRef == nil { templates = wf.Spec.Templates if wf.Spec.Synchronization != nil { - if configMapRef := wf.Spec.Synchronization.getSemaphoreConfigMapRef(); configMapRef != nil { + for _, configMapRef := range wf.Spec.Synchronization.getSemaphoreConfigMapRefs() { key := fmt.Sprintf("%s/%s", namespace, configMapRef.Name) keyMap[key] = true } @@ -509,7 +509,7 @@ func (wf *Workflow) GetSemaphoreKeys() []string { } else if wf.Status.StoredWorkflowSpec != nil { templates = wf.Status.StoredWorkflowSpec.Templates if wf.Status.StoredWorkflowSpec.Synchronization != nil { - if configMapRef := wf.Status.StoredWorkflowSpec.Synchronization.getSemaphoreConfigMapRef(); configMapRef != nil { + for _, configMapRef := range wf.Status.StoredWorkflowSpec.Synchronization.getSemaphoreConfigMapRefs() { key := fmt.Sprintf("%s/%s", namespace, configMapRef.Name) keyMap[key] = true } @@ -518,7 +518,7 @@ func (wf *Workflow) GetSemaphoreKeys() []string { for _, tmpl := range templates { if tmpl.Synchronization != nil { - if configMapRef := tmpl.Synchronization.getSemaphoreConfigMapRef(); configMapRef != nil { + for _, configMapRef := range tmpl.Synchronization.getSemaphoreConfigMapRefs() { key := fmt.Sprintf("%s/%s", namespace, configMapRef.Name) keyMap[key] = true } @@ -1662,11 +1662,18 @@ type Synchronization struct { Mutexes []*Mutex `json:"mutexes,omitempty" protobuf:"bytes,4,opt,name=mutexes"` } -func (s *Synchronization) getSemaphoreConfigMapRef() *apiv1.ConfigMapKeySelector { +func (s *Synchronization) getSemaphoreConfigMapRefs() []*apiv1.ConfigMapKeySelector { + selectors := make([]*apiv1.ConfigMapKeySelector, 0) if s.Semaphore != nil && s.Semaphore.ConfigMapKeyRef != nil { - return s.Semaphore.ConfigMapKeyRef + selectors = append(selectors, s.Semaphore.ConfigMapKeyRef) } - return nil + + for _, semaphore := range s.Semaphores { + if semaphore.ConfigMapKeyRef != nil { + selectors = append(selectors, semaphore.ConfigMapKeyRef) + } + } + return selectors } // Synchronization selector @@ -2389,7 +2396,7 @@ func (ws *WorkflowStatus) GetDuration() time.Duration { if ws.FinishedAt.IsZero() { return 0 } - return ws.FinishedAt.Time.Sub(ws.StartedAt.Time) + return ws.FinishedAt.Sub(ws.StartedAt.Time) } // Pending returns whether or not the node is in pending state @@ -3434,12 +3441,12 @@ func (wf *Workflow) GetWorkflowSpec() WorkflowSpec { // NodeID creates a deterministic node ID based on a node name func (wf *Workflow) NodeID(name string) string { - if name == wf.ObjectMeta.Name { - return wf.ObjectMeta.Name + if name == wf.Name { + return wf.Name } h := fnv.New32a() _, _ = h.Write([]byte(name)) - return fmt.Sprintf("%s-%v", wf.ObjectMeta.Name, h.Sum32()) + return fmt.Sprintf("%s-%v", wf.Name, h.Sum32()) } // GetStoredTemplate retrieves a template from stored templates of the workflow. diff --git a/pkg/apis/workflow/v1alpha1/workflow_types_test.go b/pkg/apis/workflow/v1alpha1/workflow_types_test.go index c932f97a8684..3a6ba0507d1f 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types_test.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types_test.go @@ -204,7 +204,7 @@ func TestArtifact_ValidatePath(t *testing.T) { a1 := Artifact{Name: "a1", Path: ""} err := a1.CleanPath() require.EqualError(t, err, "Artifact 'a1' did not specify a path") - assert.Equal(t, "", a1.Path) + assert.Empty(t, a1.Path) }) t.Run("directory traversal above safe base dir fails", func(t *testing.T) { @@ -607,7 +607,7 @@ func TestArtifactRepositoryRef_GetConfigMapOr(t *testing.T) { func TestArtifactRepositoryRef_GetKeyOr(t *testing.T) { var r *ArtifactRepositoryRef - assert.Equal(t, "", r.GetKeyOr("")) + assert.Empty(t, r.GetKeyOr("")) assert.Equal(t, "my-key", (&ArtifactRepositoryRef{}).GetKeyOr("my-key")) assert.Equal(t, "my-key", (&ArtifactRepositoryRef{Key: "my-key"}).GetKeyOr("")) } @@ -754,7 +754,7 @@ func TestNestedChildren(t *testing.T) { assert.False(t, ok, "got %s", child.Name) found[child.Name] = true } - assert.Equal(t, len(nodes), len(found)) + assert.Len(t, found, len(nodes)) }) } @@ -967,7 +967,7 @@ func TestWorkflow_SearchArtifacts(t *testing.T) { countArtifactName := func(ars ArtifactSearchResults, name string) int { count := 0 for _, ar := range ars { - if ar.Artifact.Name == name { + if ar.Name == name { count++ } } @@ -1022,7 +1022,7 @@ func TestWorkflow_SearchArtifacts(t *testing.T) { queriedArtifactSearchResults = wf.SearchArtifacts(query) assert.NotNil(t, queriedArtifactSearchResults) assert.Len(t, queriedArtifactSearchResults, 1) - assert.Equal(t, "artifact-foobar", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "artifact-foobar", queriedArtifactSearchResults[0].Name) assert.Equal(t, "node-bar", queriedArtifactSearchResults[0].NodeID) // artifact name @@ -1031,7 +1031,7 @@ func TestWorkflow_SearchArtifacts(t *testing.T) { queriedArtifactSearchResults = wf.SearchArtifacts(query) assert.NotNil(t, queriedArtifactSearchResults) assert.Len(t, queriedArtifactSearchResults, 1) - assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Name) assert.Equal(t, "node-foo", queriedArtifactSearchResults[0].NodeID) // node id @@ -1058,7 +1058,7 @@ func TestWorkflow_SearchArtifacts(t *testing.T) { queriedArtifactSearchResults = wf.SearchArtifacts(query) assert.NotNil(t, queriedArtifactSearchResults) assert.Len(t, queriedArtifactSearchResults, 1) - assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Artifact.Name) + assert.Equal(t, "artifact-foo", queriedArtifactSearchResults[0].Name) assert.Equal(t, "node-foo", queriedArtifactSearchResults[0].NodeID) } @@ -1118,11 +1118,22 @@ func TestWorkflow_GetSemaphoreKeys(t *testing.T) { { Name: "t1", Synchronization: &Synchronization{ - Semaphore: &SemaphoreRef{ConfigMapKeyRef: &corev1.ConfigMapKeySelector{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: "template", + Semaphores: []*SemaphoreRef{ + { + ConfigMapKeyRef: &corev1.ConfigMapKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: "template", + }, + }, }, - }}, + { + ConfigMapKeyRef: &corev1.ConfigMapKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: "template-b", + }, + }, + }, + }, }, }, { @@ -1147,9 +1158,10 @@ func TestWorkflow_GetSemaphoreKeys(t *testing.T) { }, } keys = wf.GetSemaphoreKeys() - assert.Len(keys, 3) + assert.Len(keys, 4) assert.Contains(keys, "test/test") assert.Contains(keys, "test/template") + assert.Contains(keys, "test/template-b") assert.Contains(keys, "test/template1") spec := wf.Spec.DeepCopy() @@ -1160,9 +1172,10 @@ func TestWorkflow_GetSemaphoreKeys(t *testing.T) { } wf.Status.StoredWorkflowSpec = spec keys = wf.GetSemaphoreKeys() - assert.Len(keys, 3) + assert.Len(keys, 4) assert.Contains(keys, "test/test") assert.Contains(keys, "test/template") + assert.Contains(keys, "test/template-b") assert.Contains(keys, "test/template1") } @@ -1380,7 +1393,7 @@ func TestDAGTask_GetExitTemplate(t *testing.T) { } task := DAGTask{ Hooks: map[LifecycleEvent]LifecycleHook{ - ExitLifecycleEvent: LifecycleHook{ + ExitLifecycleEvent: { Template: "test", Arguments: args, }, @@ -1408,7 +1421,7 @@ func TestStep_GetExitTemplate(t *testing.T) { } task := WorkflowStep{ Hooks: map[LifecycleEvent]LifecycleHook{ - ExitLifecycleEvent: LifecycleHook{ + ExitLifecycleEvent: { Template: "test", Arguments: args, }, @@ -1607,7 +1620,7 @@ func TestInlineStore(t *testing.T) { { Name: "step-template", Steps: []ParallelSteps{ - ParallelSteps{ + { []WorkflowStep{ { Name: "hello1", diff --git a/pkg/plugins/executor/swagger.yml b/pkg/plugins/executor/swagger.yml index 5d9226a62da0..19a601b50a75 100644 --- a/pkg/plugins/executor/swagger.yml +++ b/pkg/plugins/executor/swagger.yml @@ -328,6 +328,7 @@ definitions: Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. +optional + +default="ext4" type: string kind: $ref: '#/definitions/AzureDataDiskKind' @@ -336,6 +337,7 @@ definitions: readOnly Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. +optional + +default=false type: boolean title: AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. type: object @@ -563,7 +565,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -591,7 +592,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -633,7 +633,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -686,7 +685,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -1598,6 +1596,7 @@ definitions: +optional +default="" type: string + title: GRPCAction specifies an action involving a GRPC service. type: object Gauge: description: Gauge is a Gauge prometheus metric @@ -1951,6 +1950,7 @@ definitions: iscsiInterface is the interface Name that uses an iSCSI transport. Defaults to 'default' (tcp). +optional + +default="default" type: string lun: description: lun represents iSCSI Target Lun number. @@ -1980,6 +1980,22 @@ definitions: type: string title: Represents an ISCSI disk. type: object + ImageVolumeSource: + properties: + pullPolicy: + $ref: '#/definitions/PullPolicy' + reference: + description: |- + Required: Image or artifact reference to be used. + Behaves in the same way as pod.spec.containers[*].image. + Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. + More info: https://kubernetes.io/docs/concepts/containers/images + This field is optional to allow higher level config management to default or override + container images in workload controllers like Deployments and StatefulSets. + +optional + type: string + title: ImageVolumeSource represents a image volume resource. + type: object Inputs: description: Inputs are the mechanism for passing parameters, artifacts, volumes from one template to another properties: @@ -2143,8 +2159,16 @@ definitions: type: object LocalObjectReference: description: |- - LocalObjectReference contains enough information to let you locate the - referenced object inside the same namespace. + New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. + 1. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular + restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". + Those cannot be well described when embedded. + 2. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. + 3. We cannot easily change it. Because this type is embedded in many locations, updates to this type + will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. + + Instead of using this type, create a locally provided and used type that is well-focused on your reference. + For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 . +structType=atomic properties: name: @@ -2153,13 +2177,15 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" +kubebuilder:default="" TODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896. type: string + title: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. type: object ManagedFieldsEntry: description: |- @@ -2629,7 +2655,7 @@ definitions: set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ - (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled. + (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default). +featureGate=VolumeAttributesClass +optional type: string @@ -2922,7 +2948,8 @@ definitions: pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. - This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. + This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). + +listType=atomic +optional items: @@ -2938,7 +2965,8 @@ definitions: pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. - This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. + This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). + +listType=atomic +optional items: @@ -3006,6 +3034,9 @@ definitions: when volume is mounted. +enum type: string + PodSELinuxChangePolicy: + title: PodSELinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. + type: string PodSecurityContext: description: |- Some fields are also present in container.securityContext. Field values of @@ -3062,18 +3093,22 @@ definitions: +optional format: int64 type: integer + seLinuxChangePolicy: + $ref: '#/definitions/PodSELinuxChangePolicy' seLinuxOptions: $ref: '#/definitions/SELinuxOptions' seccompProfile: $ref: '#/definitions/SeccompProfile' supplementalGroups: description: |- - A list of groups applied to the first process run in each container, in addition - to the container's primary GID, the fsGroup (if specified), and group memberships - defined in the container image for the uid of the container process. If unspecified, - no additional groups are added to any container. Note that group memberships - defined in the container image for the uid of the container process are still effective, - even if they are not included in this list. + A list of groups applied to the first process run in each container, in + addition to the container's primary GID and fsGroup (if specified). If + the SupplementalGroupsPolicy feature is enabled, the + supplementalGroupsPolicy field determines whether these are in addition + to or instead of any group memberships defined in the container image. + If unspecified, no additional groups are added, though group memberships + defined in the container image may still be used, depending on the + supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows. +optional +listType=atomic @@ -3081,6 +3116,8 @@ definitions: format: int64 type: integer type: array + supplementalGroupsPolicy: + $ref: '#/definitions/SupplementalGroupsPolicy' sysctls: description: |- Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported @@ -3213,7 +3250,8 @@ definitions: type: integer sources: description: |- - sources is the list of volume projections + sources is the list of volume projections. Each entry in this list + handles one source. +optional +listType=atomic items: @@ -3386,6 +3424,7 @@ definitions: Default is /etc/ceph/keyring. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional + +default="/etc/ceph/keyring" type: string monitors: description: |- @@ -3401,6 +3440,7 @@ definitions: Default is rbd. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional + +default="rbd" type: string readOnly: description: |- @@ -3417,6 +3457,7 @@ definitions: Default is admin. More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it +optional + +default="admin" type: string title: Represents a Rados Block Device mount that lasts the lifetime of a pod. type: object @@ -3438,6 +3479,14 @@ definitions: the Pod where this field is used. It makes that resource available inside a container. type: string + request: + description: |- + Request is the name chosen for a request in the referenced claim. + If empty, everything from the claim is made available, otherwise + only the result of this request. + + +optional + type: string title: ResourceClaim references one entry in PodSpec.ResourceClaims. type: object ResourceFieldSelector: @@ -3649,6 +3698,7 @@ definitions: Ex. "ext4", "xfs", "ntfs". Default is "xfs". +optional + +default="xfs" type: string gateway: description: gateway is the host address of the ScaleIO API Gateway. @@ -3676,6 +3726,7 @@ definitions: storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. Default is ThinProvisioned. +optional + +default="ThinProvisioned" type: string storagePool: description: |- @@ -3910,7 +3961,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -3938,7 +3988,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -3979,7 +4028,6 @@ definitions: This field is effectively required, but due to backwards compatibility is allowed to be empty. Instances of this type with an empty value here are almost certainly wrong. - TODO: Add other useful fields. apiVersion, kind, uid? More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names +optional +default="" @@ -4217,6 +4265,12 @@ definitions: type: string title: Represents a StorageOS persistent volume resource. type: object + SupplementalGroupsPolicy: + description: |- + SupplementalGroupsPolicy defines how supplemental groups + of the first container processes are calculated. + +enum + type: string SuppliedValueFrom: title: SuppliedValueFrom is a placeholder for a value to be filled in directly, either through the CLI, API, etc. type: object @@ -4508,8 +4562,19 @@ definitions: type: integer TypedLocalObjectReference: description: |- - TypedLocalObjectReference contains enough information to let you locate the - typed referenced object inside the same namespace. + New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. + 1. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular + restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". + Those cannot be well described when embedded. + 2. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. + 3. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity + during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple + and the version of the actual struct is irrelevant. + 4. We cannot easily change it. Because this type is embedded in many locations, updates to this type + will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. + + Instead of using this type, create a locally provided and used type that is well-focused on your reference. + For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 . +structType=atomic properties: apiGroup: @@ -4525,8 +4590,12 @@ definitions: name: description: Name is the name of resource being referenced type: string + title: |- + TypedLocalObjectReference contains enough information to let you locate the + typed referenced object inside the same namespace. type: object TypedObjectReference: + description: TypedObjectReference contains enough information to let you locate the typed referenced object properties: apiGroup: description: |- @@ -4818,6 +4887,8 @@ definitions: $ref: '#/definitions/GlusterfsVolumeSource' hostPath: $ref: '#/definitions/HostPathVolumeSource' + image: + $ref: '#/definitions/ImageVolumeSource' iscsi: $ref: '#/definitions/ISCSIVolumeSource' name: @@ -4897,7 +4968,7 @@ definitions: title: VolumeMount describes a mounting of a Volume within a container. type: object VolumeProjection: - description: Projection that may be projected along with other supported volume types + description: Exactly one of these fields must be set. properties: clusterTrustBundle: $ref: '#/definitions/ClusterTrustBundleProjection' @@ -4909,6 +4980,7 @@ definitions: $ref: '#/definitions/SecretProjection' serviceAccountToken: $ref: '#/definitions/ServiceAccountTokenProjection' + title: Projection that may be projected along with other supported volume types. type: object VolumeResourceRequirements: properties: diff --git a/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md b/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md index 0c83c9ac659e..56c1534ff178 100644 --- a/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md +++ b/sdks/java/client/docs/ClusterWorkflowTemplateServiceApi.md @@ -82,7 +82,7 @@ Name | Type | Description | Notes # **clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate** -> Object clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate(name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate(name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential) @@ -115,8 +115,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. try { - Object result = apiInstance.clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate(name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate(name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ClusterWorkflowTemplateServiceApi#clusterWorkflowTemplateServiceDeleteClusterWorkflowTemplate"); @@ -140,6 +141,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type diff --git a/sdks/java/client/docs/CronWorkflowServiceApi.md b/sdks/java/client/docs/CronWorkflowServiceApi.md index 85eb1cfca171..1238c395a5aa 100644 --- a/sdks/java/client/docs/CronWorkflowServiceApi.md +++ b/sdks/java/client/docs/CronWorkflowServiceApi.md @@ -86,7 +86,7 @@ Name | Type | Description | Notes # **cronWorkflowServiceDeleteCronWorkflow** -> Object cronWorkflowServiceDeleteCronWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object cronWorkflowServiceDeleteCronWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential) @@ -120,8 +120,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. try { - Object result = apiInstance.cronWorkflowServiceDeleteCronWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.cronWorkflowServiceDeleteCronWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling CronWorkflowServiceApi#cronWorkflowServiceDeleteCronWorkflow"); @@ -146,6 +147,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type diff --git a/sdks/java/client/docs/EventSourceServiceApi.md b/sdks/java/client/docs/EventSourceServiceApi.md index 73838d9edadb..39dbc37cc79e 100644 --- a/sdks/java/client/docs/EventSourceServiceApi.md +++ b/sdks/java/client/docs/EventSourceServiceApi.md @@ -15,7 +15,7 @@ Method | HTTP request | Description # **eventSourceServiceCreateEventSource** -> IoArgoprojEventsV1alpha1EventSource eventSourceServiceCreateEventSource(namespace, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource eventSourceServiceCreateEventSource(namespace, body) @@ -44,7 +44,7 @@ public class Example { String namespace = "namespace_example"; // String | EventsourceCreateEventSourceRequest body = new EventsourceCreateEventSourceRequest(); // EventsourceCreateEventSourceRequest | try { - IoArgoprojEventsV1alpha1EventSource result = apiInstance.eventSourceServiceCreateEventSource(namespace, body); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource result = apiInstance.eventSourceServiceCreateEventSource(namespace, body); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceCreateEventSource"); @@ -66,7 +66,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization @@ -85,7 +85,7 @@ Name | Type | Description | Notes # **eventSourceServiceDeleteEventSource** -> Object eventSourceServiceDeleteEventSource(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object eventSourceServiceDeleteEventSource(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential) @@ -119,8 +119,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. try { - Object result = apiInstance.eventSourceServiceDeleteEventSource(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.eventSourceServiceDeleteEventSource(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceDeleteEventSource"); @@ -145,6 +146,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -167,7 +169,7 @@ Name | Type | Description | Notes # **eventSourceServiceEventSourcesLogs** -> StreamResultOfEventsourceLogEntry eventSourceServiceEventSourcesLogs(namespace, name, eventSourceType, eventName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend) +> StreamResultOfEventsourceLogEntry eventSourceServiceEventSourcesLogs(namespace, name, eventSourceType, eventName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend, podLogOptionsStream) @@ -205,11 +207,12 @@ public class Example { String podLogOptionsSinceTimeSeconds = "podLogOptionsSinceTimeSeconds_example"; // String | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. Integer podLogOptionsSinceTimeNanos = 56; // Integer | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. Boolean podLogOptionsTimestamps = true; // Boolean | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. - String podLogOptionsTailLines = "podLogOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + String podLogOptionsTailLines = "podLogOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. String podLogOptionsLimitBytes = "podLogOptionsLimitBytes_example"; // String | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. Boolean podLogOptionsInsecureSkipTLSVerifyBackend = true; // Boolean | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + String podLogOptionsStream = "podLogOptionsStream_example"; // String | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. try { - StreamResultOfEventsourceLogEntry result = apiInstance.eventSourceServiceEventSourcesLogs(namespace, name, eventSourceType, eventName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend); + StreamResultOfEventsourceLogEntry result = apiInstance.eventSourceServiceEventSourcesLogs(namespace, name, eventSourceType, eventName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend, podLogOptionsStream); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceEventSourcesLogs"); @@ -238,9 +241,10 @@ Name | Type | Description | Notes **podLogOptionsSinceTimeSeconds** | **String**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **podLogOptionsSinceTimeNanos** | **Integer**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **podLogOptionsTimestamps** | **Boolean**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **podLogOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **podLogOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **podLogOptionsLimitBytes** | **String**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **podLogOptionsInsecureSkipTLSVerifyBackend** | **Boolean**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **podLogOptionsStream** | **String**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] ### Return type @@ -263,7 +267,7 @@ Name | Type | Description | Notes # **eventSourceServiceGetEventSource** -> IoArgoprojEventsV1alpha1EventSource eventSourceServiceGetEventSource(namespace, name) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource eventSourceServiceGetEventSource(namespace, name) @@ -292,7 +296,7 @@ public class Example { String namespace = "namespace_example"; // String | String name = "name_example"; // String | try { - IoArgoprojEventsV1alpha1EventSource result = apiInstance.eventSourceServiceGetEventSource(namespace, name); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource result = apiInstance.eventSourceServiceGetEventSource(namespace, name); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceGetEventSource"); @@ -314,7 +318,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization @@ -333,7 +337,7 @@ Name | Type | Description | Notes # **eventSourceServiceListEventSources** -> IoArgoprojEventsV1alpha1EventSourceList eventSourceServiceListEventSources(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList eventSourceServiceListEventSources(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents) @@ -371,7 +375,7 @@ public class Example { String listOptionsContinue = "listOptionsContinue_example"; // String | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. Boolean listOptionsSendInitialEvents = true; // Boolean | `sendInitialEvents=true` may be set together with `watch=true`. In that case, the watch stream will begin with synthetic events to produce the current state of objects in the collection. Once all such events have been sent, a synthetic \"Bookmark\" event will be sent. The bookmark will report the ResourceVersion (RV) corresponding to the set of objects, and be marked with `\"io.k8s.initial-events-end\": \"true\"` annotation. Afterwards, the watch stream will proceed as usual, sending watch events corresponding to changes (subsequent to the RV) to objects watched. When `sendInitialEvents` option is set, we require `resourceVersionMatch` option to also be set. The semantic of the watch request is as following: - `resourceVersionMatch` = NotOlderThan is interpreted as \"data at least as new as the provided `resourceVersion`\" and the bookmark event is send when the state is synced to a `resourceVersion` at least as fresh as the one provided by the ListOptions. If `resourceVersion` is unset, this is interpreted as \"consistent read\" and the bookmark event is send when the state is synced at least to the moment when request started being processed. - `resourceVersionMatch` set to any other value or unset Invalid error is returned. Defaults to true if `resourceVersion=\"\"` or `resourceVersion=\"0\"` (for backward compatibility reasons) and to false otherwise. +optional try { - IoArgoprojEventsV1alpha1EventSourceList result = apiInstance.eventSourceServiceListEventSources(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList result = apiInstance.eventSourceServiceListEventSources(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceListEventSources"); @@ -402,7 +406,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSourceList**](IoArgoprojEventsV1alpha1EventSourceList.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md) ### Authorization @@ -421,7 +425,7 @@ Name | Type | Description | Notes # **eventSourceServiceUpdateEventSource** -> IoArgoprojEventsV1alpha1EventSource eventSourceServiceUpdateEventSource(namespace, name, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource eventSourceServiceUpdateEventSource(namespace, name, body) @@ -451,7 +455,7 @@ public class Example { String name = "name_example"; // String | EventsourceUpdateEventSourceRequest body = new EventsourceUpdateEventSourceRequest(); // EventsourceUpdateEventSourceRequest | try { - IoArgoprojEventsV1alpha1EventSource result = apiInstance.eventSourceServiceUpdateEventSource(namespace, name, body); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource result = apiInstance.eventSourceServiceUpdateEventSource(namespace, name, body); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling EventSourceServiceApi#eventSourceServiceUpdateEventSource"); @@ -474,7 +478,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization diff --git a/sdks/java/client/docs/EventsourceCreateEventSourceRequest.md b/sdks/java/client/docs/EventsourceCreateEventSourceRequest.md index 1e30caa1d2d1..70ca3392c142 100644 --- a/sdks/java/client/docs/EventsourceCreateEventSourceRequest.md +++ b/sdks/java/client/docs/EventsourceCreateEventSourceRequest.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**eventSource** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**eventSource** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **namespace** | **String** | | [optional] diff --git a/sdks/java/client/docs/EventsourceEventSourceWatchEvent.md b/sdks/java/client/docs/EventsourceEventSourceWatchEvent.md index 300295760aac..8f2fd97768dc 100644 --- a/sdks/java/client/docs/EventsourceEventSourceWatchEvent.md +++ b/sdks/java/client/docs/EventsourceEventSourceWatchEvent.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_object** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**_object** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **type** | **String** | | [optional] diff --git a/sdks/java/client/docs/EventsourceUpdateEventSourceRequest.md b/sdks/java/client/docs/EventsourceUpdateEventSourceRequest.md index 2e4ae9dbb2f7..eed1067164ae 100644 --- a/sdks/java/client/docs/EventsourceUpdateEventSourceRequest.md +++ b/sdks/java/client/docs/EventsourceUpdateEventSourceRequest.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**eventSource** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**eventSource** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **name** | **String** | | [optional] **namespace** | **String** | | [optional] diff --git a/sdks/java/client/docs/GRPCAction.md b/sdks/java/client/docs/GRPCAction.md index caaeacd920dd..6e4df803cead 100644 --- a/sdks/java/client/docs/GRPCAction.md +++ b/sdks/java/client/docs/GRPCAction.md @@ -2,6 +2,7 @@ # GRPCAction +GRPCAction specifies an action involving a GRPC service. ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md similarity index 83% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md index 77fd8375e7c0..28e770618bdb 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AMQPConsumeConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md new file mode 100644 index 000000000000..accb41b3e5ae --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md @@ -0,0 +1,27 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**consume** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md) | | [optional] +**exchangeDeclare** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md) | | [optional] +**exchangeName** | **String** | | [optional] +**exchangeType** | **String** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**jsonBody** | **Boolean** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**queueBind** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md) | | [optional] +**queueDeclare** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md) | | [optional] +**routingKey** | **String** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **String** | | [optional] +**urlSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md similarity index 79% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md index e84684c3f1a2..3559ebb1e7d2 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md similarity index 69% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md index 6caa72a5cc32..ac13c8877c10 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AMQPQueueBindConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md similarity index 83% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md index bdf69169aaea..4840c7863670 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md similarity index 70% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md index a88e04482839..e7b73d7953b3 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AWSLambdaTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger ## Properties @@ -10,8 +10,8 @@ Name | Type | Description | Notes **accessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **functionName** | **String** | FunctionName refers to the name of the function to invoke. | [optional] **invocationType** | **String** | Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **region** | **String** | | [optional] **roleARN** | **String** | | [optional] **secretKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Amount.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md similarity index 77% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Amount.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md index f27919e26494..94ca6adcf57a 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Amount.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Amount +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount Amount represent a numeric amount. diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md new file mode 100644 index 000000000000..8047e5018852 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md @@ -0,0 +1,16 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**args** | **List<String>** | | [optional] +**operation** | **String** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md new file mode 100644 index 000000000000..9bfe1aaabfe4 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md @@ -0,0 +1,19 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**configmap** | [**io.kubernetes.client.openapi.models.V1ConfigMapKeySelector**](io.kubernetes.client.openapi.models.V1ConfigMapKeySelector.md) | | [optional] +**file** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md) | | [optional] +**git** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md) | | [optional] +**inline** | **String** | | [optional] +**resource** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md) | | [optional] +**s3** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md) | | [optional] +**url** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md new file mode 100644 index 000000000000..a4790a19b187 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md @@ -0,0 +1,18 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**fqdn** | **String** | | [optional] +**hubName** | **String** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**sharedAccessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**sharedAccessKeyName** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md similarity index 70% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md index acd27c9579a8..f78e7f9bfd15 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1AzureEventsHubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **fqdn** | **String** | | [optional] **hubName** | **String** | | [optional] **metadata** | **Map<String, String>** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md similarity index 71% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md index 8a860ba82ecb..01e0fd4f53e9 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AzureQueueStorageEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource ## Properties @@ -10,7 +10,7 @@ Name | Type | Description | Notes **connectionString** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **decodeMessage** | **Boolean** | | [optional] **dlq** | **Boolean** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **queueName** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md similarity index 59% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md index 8655c7a86c6a..7cd2d2181cc8 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1AzureServiceBusEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource ## Properties @@ -8,13 +8,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **connectionString** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **fullyQualifiedNamespace** | **String** | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **queueName** | **String** | | [optional] **subscriptionName** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **topicName** | **String** | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md new file mode 100644 index 000000000000..0555bb9d39ac --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md @@ -0,0 +1,19 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**connectionString** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**queueName** | **String** | | [optional] +**subscriptionName** | **String** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topicName** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md new file mode 100644 index 000000000000..dac546360b5a --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md @@ -0,0 +1,16 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**duration** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md) | | [optional] +**factor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md) | | [optional] +**jitter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md) | | [optional] +**steps** | **Integer** | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitCreds.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md similarity index 87% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GitCreds.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md index 35c3900e561a..24dcabdc28fe 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitCreds.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GitCreds +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md similarity index 53% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md index fd8ba3966149..5fe5418ccdaa 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1BitbucketAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**basic** | [**IoArgoprojEventsV1alpha1BitbucketBasicAuth**](IoArgoprojEventsV1alpha1BitbucketBasicAuth.md) | | [optional] +**basic** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md) | | [optional] **oauthToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md similarity index 86% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md index 05a0c3b3a87a..e8f75e99d38d 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1BasicAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md new file mode 100644 index 000000000000..61f6bd8ce64c --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md @@ -0,0 +1,22 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md) | | [optional] +**deleteHookOnFinish** | **Boolean** | | [optional] +**events** | **List<String>** | Events this webhook is subscribed to. | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**owner** | **String** | | [optional] +**projectKey** | **String** | | [optional] +**repositories** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md) | | [optional] +**repositorySlug** | **String** | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md index 34e4b7398b25..e2142195ab17 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1BitbucketRepository +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md new file mode 100644 index 000000000000..e42d1b278b18 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md @@ -0,0 +1,28 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**accessToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**bitbucketserverBaseURL** | **String** | BitbucketServerBaseURL is the base URL for API requests to a custom endpoint. | [optional] +**checkInterval** | **String** | | [optional] +**deleteHookOnFinish** | **Boolean** | | [optional] +**events** | **List<String>** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**oneEventPerChange** | **Boolean** | | [optional] +**projectKey** | **String** | | [optional] +**projects** | **List<String>** | | [optional] +**repositories** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md) | | [optional] +**repositorySlug** | **String** | | [optional] +**skipBranchRefsChangedOnOpenPR** | **Boolean** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] +**webhookSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md new file mode 100644 index 000000000000..67fed54d5387 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**projectKey** | **String** | ProjectKey is the key of project for which integration needs to set up. | [optional] +**repositorySlug** | **String** | RepositorySlug is the slug of the repository for which integration needs to set up. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md similarity index 51% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md index a9dfe5461f1c..35fa3bebcf24 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1CalendarEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource ## Properties @@ -8,10 +8,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **exclusionDates** | **List<String>** | ExclusionDates defines the list of DATE-TIME exceptions for recurring events. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **interval** | **String** | | [optional] **metadata** | **Map<String, String>** | | [optional] -**persistence** | [**IoArgoprojEventsV1alpha1EventPersistence**](IoArgoprojEventsV1alpha1EventPersistence.md) | | [optional] +**persistence** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md) | | [optional] **schedule** | **String** | | [optional] **timezone** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md index 3b24ff4b4602..d0585f103626 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1CatchupConfiguration +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Condition.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md similarity index 85% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Condition.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md index a82cf464d305..a9dbcc3f7807 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Condition.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Condition +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md similarity index 73% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md index 3648728278cf..9c3fc5b0929e 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1ConditionsResetByTime +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md new file mode 100644 index 000000000000..c277be9d4a22 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md @@ -0,0 +1,13 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**byTime** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md index 1d8e3ea46cbd..bc1608bb8092 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1ConfigMapPersistence +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md new file mode 100644 index 000000000000..e59269f7e554 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md @@ -0,0 +1,18 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**env** | [**List<io.kubernetes.client.openapi.models.V1EnvVar>**](io.kubernetes.client.openapi.models.V1EnvVar.md) | | [optional] +**envFrom** | [**List<io.kubernetes.client.openapi.models.V1EnvFromSource>**](io.kubernetes.client.openapi.models.V1EnvFromSource.md) | | [optional] +**imagePullPolicy** | **String** | | [optional] +**resources** | [**io.kubernetes.client.openapi.models.V1ResourceRequirements**](io.kubernetes.client.openapi.models.V1ResourceRequirements.md) | | [optional] +**securityContext** | [**io.kubernetes.client.openapi.models.V1SecurityContext**](io.kubernetes.client.openapi.models.V1SecurityContext.md) | | [optional] +**volumeMounts** | [**List<io.kubernetes.client.openapi.models.V1VolumeMount>**](io.kubernetes.client.openapi.models.V1VolumeMount.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md similarity index 53% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md index cee9ce1e2eac..4d2e70fe7052 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1CustomTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger CustomTrigger refers to the specification of the custom trigger. @@ -9,8 +9,8 @@ CustomTrigger refers to the specification of the custom trigger. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **certSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **secure** | **Boolean** | | [optional] **serverNameOverride** | **String** | ServerNameOverride for the secure connection between sensor and custom trigger gRPC server. | [optional] **serverURL** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1DataFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md similarity index 94% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1DataFilter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md index a5ca5f6b88c7..8d265e7627f7 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1DataFilter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1DataFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md similarity index 73% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md index 2583af5b263c..a9533f962344 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EmailTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger EmailTrigger refers to the specification of the email notification trigger. @@ -11,7 +11,7 @@ Name | Type | Description | Notes **body** | **String** | | [optional] **from** | **String** | | [optional] **host** | **String** | Host refers to the smtp host url to which email is send. | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] **port** | **Integer** | | [optional] **smtpPassword** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **subject** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md similarity index 55% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md index 73601d137341..6875a516bb09 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EmitterEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource ## Properties @@ -10,12 +10,12 @@ Name | Type | Description | Notes **broker** | **String** | Broker URI to connect to. | [optional] **channelKey** | **String** | | [optional] **channelName** | **String** | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **password** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventContext.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md similarity index 92% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventContext.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md index f4a17371ca56..828c907075c3 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventContext.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EventContext +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependency.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md similarity index 52% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependency.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md index 14ae8da2da02..e08ff7512b5b 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependency.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EventDependency +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency ## Properties @@ -9,10 +9,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **eventName** | **String** | | [optional] **eventSourceName** | **String** | | [optional] -**filters** | [**IoArgoprojEventsV1alpha1EventDependencyFilter**](IoArgoprojEventsV1alpha1EventDependencyFilter.md) | | [optional] +**filters** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md) | | [optional] **filtersLogicalOperator** | **String** | FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **name** | **String** | | [optional] -**transform** | [**IoArgoprojEventsV1alpha1EventDependencyTransformer**](IoArgoprojEventsV1alpha1EventDependencyTransformer.md) | | [optional] +**transform** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md similarity index 51% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md index cde905e83a0a..a34515d001d8 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EventDependencyFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1. @@ -8,13 +8,13 @@ EventDependencyFilter defines filters and constraints for a io.argoproj.workflow Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**context** | [**IoArgoprojEventsV1alpha1EventContext**](IoArgoprojEventsV1alpha1EventContext.md) | | [optional] -**data** | [**List<IoArgoprojEventsV1alpha1DataFilter>**](IoArgoprojEventsV1alpha1DataFilter.md) | | [optional] +**context** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md) | | [optional] +**data** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md) | | [optional] **dataLogicalOperator** | **String** | DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **exprLogicalOperator** | **String** | ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] -**exprs** | [**List<IoArgoprojEventsV1alpha1ExprFilter>**](IoArgoprojEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] +**exprs** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] **script** | **String** | Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1. | [optional] -**time** | [**IoArgoprojEventsV1alpha1TimeFilter**](IoArgoprojEventsV1alpha1TimeFilter.md) | | [optional] +**time** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md similarity index 71% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md index 7d26d594caa1..7569f62bc1ac 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EventDependencyTransformer +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md new file mode 100644 index 000000000000..de59a156fafe --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**catchup** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md) | | [optional] +**configMap** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md new file mode 100644 index 000000000000..b77c01f4bd23 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md @@ -0,0 +1,15 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] +**spec** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md similarity index 70% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md index cdadd5180200..d3912eb969e4 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1EventSourceFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorList.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md similarity index 51% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorList.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md index 742b26394f4d..ed841f62d2d0 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorList.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1SensorList +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**List<IoArgoprojEventsV1alpha1Sensor>**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**items** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **metadata** | [**io.kubernetes.client.openapi.models.V1ListMeta**](io.kubernetes.client.openapi.models.V1ListMeta.md) | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md new file mode 100644 index 000000000000..e66805c8cde9 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md @@ -0,0 +1,47 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**amqp** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md) | | [optional] +**azureEventsHub** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] +**azureQueueStorage** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] +**azureServiceBus** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] +**bitbucket** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md) | | [optional] +**bitbucketserver** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md) | | [optional] +**calendar** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md) | | [optional] +**emitter** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md) | | [optional] +**eventBusName** | **String** | | [optional] +**file** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md) | | [optional] +**generic** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md) | | [optional] +**gerrit** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md) | | [optional] +**github** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md) | | [optional] +**gitlab** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md) | | [optional] +**hdfs** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md) | | [optional] +**kafka** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md) | | [optional] +**minio** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md) | | [optional] +**mqtt** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md) | | [optional] +**nats** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md) | | [optional] +**nsq** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md) | | [optional] +**pubSub** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md) | | [optional] +**pulsar** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md) | | [optional] +**redis** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md) | | [optional] +**redisStream** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md) | | [optional] +**replicas** | **Integer** | | [optional] +**resource** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md) | | [optional] +**service** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md) | | [optional] +**sftp** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md) | | [optional] +**slack** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md) | | [optional] +**sns** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md) | | [optional] +**sqs** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md) | | [optional] +**storageGrid** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md) | | [optional] +**stripe** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md) | | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md) | | [optional] +**webhook** | [**Map<String, GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md new file mode 100644 index 000000000000..d55ddb79880c --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md @@ -0,0 +1,13 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md new file mode 100644 index 000000000000..ce80d2adf7ba --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**expr** | **String** | Expr refers to the expression that determines the outcome of the filter. | [optional] +**fields** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md similarity index 71% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md index 171a7a117b48..90fd8e7efcaa 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1FileArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md new file mode 100644 index 000000000000..0d8451bccf0f --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md @@ -0,0 +1,18 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource + +FileEventSource describes an event-source for file related events. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**eventType** | **String** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**polling** | **Boolean** | | [optional] +**watchPathConfig** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md index 4e5ba2527495..a9b536b3e77e 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GenericEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource GenericEventSource refers to a generic event source. It can be used to implement a custom event source. @@ -10,7 +10,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **authSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **config** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **insecure** | **Boolean** | Insecure determines the type of connection. | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md new file mode 100644 index 000000000000..fb4756de23dd --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md @@ -0,0 +1,22 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**deleteHookOnFinish** | **Boolean** | | [optional] +**events** | **List<String>** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**gerritBaseURL** | **String** | | [optional] +**hookName** | **String** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**projects** | **List<String>** | List of project namespace paths like \"whynowy/test\". | [optional] +**sslVerify** | **Boolean** | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md similarity index 66% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md index 6c0681d8996b..14c9af62b365 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GitArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact ## Properties @@ -9,11 +9,11 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **branch** | **String** | | [optional] **cloneDirectory** | **String** | Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client. | [optional] -**creds** | [**IoArgoprojEventsV1alpha1GitCreds**](IoArgoprojEventsV1alpha1GitCreds.md) | | [optional] +**creds** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md) | | [optional] **filePath** | **String** | | [optional] **insecureIgnoreHostKey** | **Boolean** | | [optional] **ref** | **String** | | [optional] -**remote** | [**IoArgoprojEventsV1alpha1GitRemoteConfig**](IoArgoprojEventsV1alpha1GitRemoteConfig.md) | | [optional] +**remote** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md) | | [optional] **sshKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **tag** | **String** | | [optional] **url** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md similarity index 87% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md index afec20f6f624..5ec2d637e540 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1BitbucketBasicAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md similarity index 84% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md index 4f869a1924a9..76aea69b7ffa 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GitRemoteConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md similarity index 84% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md index 285322c4216c..bebb0fe46d03 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GithubAppCreds +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md similarity index 55% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md index 2dfadecf39dc..185b5cf6fb15 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GithubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource ## Properties @@ -12,8 +12,8 @@ Name | Type | Description | Notes **contentType** | **String** | | [optional] **deleteHookOnFinish** | **Boolean** | | [optional] **events** | **List<String>** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**githubApp** | [**IoArgoprojEventsV1alpha1GithubAppCreds**](IoArgoprojEventsV1alpha1GithubAppCreds.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**githubApp** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md) | | [optional] **githubBaseURL** | **String** | | [optional] **githubUploadURL** | **String** | | [optional] **id** | **String** | | [optional] @@ -21,9 +21,9 @@ Name | Type | Description | Notes **metadata** | **Map<String, String>** | | [optional] **organizations** | **List<String>** | Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set. | [optional] **owner** | **String** | | [optional] -**repositories** | [**List<IoArgoprojEventsV1alpha1OwnedRepositories>**](IoArgoprojEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] +**repositories** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] **repository** | **String** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **webhookSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md similarity index 71% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md index 7334ea806012..72e7bd4fd555 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1GitlabEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource ## Properties @@ -11,14 +11,14 @@ Name | Type | Description | Notes **deleteHookOnFinish** | **Boolean** | | [optional] **enableSSLVerification** | **Boolean** | | [optional] **events** | **List<String>** | Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **gitlabBaseURL** | **String** | | [optional] **groups** | **List<String>** | | [optional] **metadata** | **Map<String, String>** | | [optional] **projectID** | **String** | | [optional] **projects** | **List<String>** | | [optional] **secretToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md similarity index 77% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md index fca8d059f514..e0fdf1495a1e 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1HDFSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource ## Properties @@ -9,7 +9,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **addresses** | **List<String>** | | [optional] **checkInterval** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **hdfsUser** | **String** | HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used. | [optional] **krbCCacheSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **krbConfigConfigMap** | [**io.kubernetes.client.openapi.models.V1ConfigMapKeySelector**](io.kubernetes.client.openapi.models.V1ConfigMapKeySelector.md) | | [optional] @@ -19,7 +19,7 @@ Name | Type | Description | Notes **krbUsername** | **String** | KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used. | [optional] **metadata** | **Map<String, String>** | | [optional] **type** | **String** | | [optional] -**watchPathConfig** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] +**watchPathConfig** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md new file mode 100644 index 000000000000..8f4cb2b04975 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md @@ -0,0 +1,21 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**basicAuth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**headers** | **Map<String, String>** | | [optional] +**method** | **String** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**secureHeaders** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md) | | [optional] +**timeout** | **String** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **String** | URL refers to the URL to send HTTP request to. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md similarity index 78% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md index 333f2c803a0f..fa699a1ad7d5 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Int64OrString +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Resource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md similarity index 58% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Resource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md index 84d8b0f6979e..78ce8d41d1b8 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Resource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md @@ -1,8 +1,8 @@ -# IoArgoprojEventsV1alpha1Resource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource -Resource represent arbitrary structured data. +K8SResource represent arbitrary structured data. ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md similarity index 51% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md index 6b85fc4f5986..f6e617a5c05c 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1K8SResourcePolicy +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] +**backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] **errorOnBackoffTimeout** | **Boolean** | | [optional] **labels** | **Map<String, String>** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md similarity index 78% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md index 2563981b8d7a..1f2c0e64e30d 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1KafkaConsumerGroup +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md new file mode 100644 index 000000000000..f384e436a194 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md @@ -0,0 +1,25 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | **String** | Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**consumerGroup** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**jsonBody** | **Boolean** | | [optional] +**limitEventsPerSecond** | **String** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**partition** | **String** | | [optional] +**sasl** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **String** | | [optional] +**url** | **String** | | [optional] +**version** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md new file mode 100644 index 000000000000..16626af78779 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md @@ -0,0 +1,28 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger + +KafkaTrigger refers to the specification of the Kafka trigger. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**compress** | **Boolean** | | [optional] +**flushFrequency** | **Integer** | | [optional] +**headers** | **Map<String, String>** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**partition** | **Integer** | | [optional] +**partitioningKey** | **String** | The partitioning key for the messages put on the Kafka topic. +optional. | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**requiredAcks** | **Integer** | RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional. | [optional] +**sasl** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md) | | [optional] +**schemaRegistry** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md) | | [optional] +**secureHeaders** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **String** | | [optional] +**url** | **String** | URL of the Kafka broker, multiple URLs separated by comma. | [optional] +**version** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md similarity index 73% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md index e790352e523b..b2edbd66629b 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1LogTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md new file mode 100644 index 000000000000..90ef5413cdbc --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md @@ -0,0 +1,21 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**clientId** | **String** | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**jsonBody** | **Boolean** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **String** | | [optional] +**url** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Metadata.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md similarity index 80% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Metadata.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md index 8d8fd3b19dbb..3920eec2bdf7 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Metadata.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Metadata +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md similarity index 72% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md index 24562a8e8be9..0e6c4212faf7 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1NATSAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**basic** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] +**basic** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] **credential** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **nkey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **token** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md new file mode 100644 index 000000000000..a67bc5df152c --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md @@ -0,0 +1,21 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md) | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**jsonBody** | **Boolean** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**queue** | **String** | | [optional] +**subject** | **String** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md new file mode 100644 index 000000000000..12f268a048f5 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md @@ -0,0 +1,19 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger + +NATSTrigger refers to the specification of the NATS trigger. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md) | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**subject** | **String** | Name of the subject to put message on. | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **String** | URL of the NATS cluster. | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md new file mode 100644 index 000000000000..deefdfef5c5c --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md @@ -0,0 +1,20 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**channel** | **String** | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**hostAddress** | **String** | | [optional] +**jsonBody** | **Boolean** | | [optional] +**metadata** | **Map<String, String>** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **String** | Topic to subscribe to. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md similarity index 54% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md index be3bb050123f..10fab3463195 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1OpenWhiskTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger OpenWhiskTrigger refers to the specification of the OpenWhisk trigger. @@ -12,8 +12,8 @@ Name | Type | Description | Notes **authToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **host** | **String** | Host URL of the OpenWhisk. | [optional] **namespace** | **String** | Namespace for the action. Defaults to \"_\". +optional. | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **version** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md similarity index 75% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md index df507126977d..b42c1822605d 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1OwnedRepositories +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PayloadField.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md similarity index 91% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1PayloadField.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md index 3fd6594e6328..c8290fd67744 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PayloadField.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1PayloadField +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField PayloadField binds a value at path within the event payload against a name. diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md index cf83dd76c533..f9fdd5a362e8 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1PubSubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource PubSubEventSource refers to event-source for GCP PubSub related events. @@ -10,7 +10,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **credentialSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **deleteSubscriptionOnFinish** | **Boolean** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **projectID** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md similarity index 64% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md index b83cce930730..d8a4de219ac5 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1PulsarEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource ## Properties @@ -10,11 +10,11 @@ Name | Type | Description | Notes **authAthenzParams** | **Map<String, String>** | | [optional] **authAthenzSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **authTokenSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **tlsAllowInsecureConnection** | **Boolean** | | [optional] **tlsTrustCertsSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **tlsValidateHostname** | **Boolean** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md similarity index 50% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md index f14a58bc27b7..e223940c99ad 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1PulsarTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger PulsarTrigger refers to the specification of the Pulsar trigger. @@ -11,10 +11,10 @@ Name | Type | Description | Notes **authAthenzParams** | **Map<String, String>** | | [optional] **authAthenzSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **authTokenSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**connectionBackoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**payload** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **tlsAllowInsecureConnection** | **Boolean** | | [optional] **tlsTrustCertsSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **tlsValidateHostname** | **Boolean** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RateLimit.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md similarity index 77% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1RateLimit.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md index 14a4fc03b1fe..c80025e1a1e0 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RateLimit.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1RateLimit +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md similarity index 60% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md index 6ecf5745bd46..23889b722a3a 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1RedisEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource ## Properties @@ -9,13 +9,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **channels** | **List<String>** | | [optional] **db** | **Integer** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **hostAddress** | **String** | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **namespace** | **String** | | [optional] **password** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md similarity index 64% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md index bf318d50ceaf..87921c738fb1 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1RedisStreamEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource ## Properties @@ -9,13 +9,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **consumerGroup** | **String** | | [optional] **db** | **Integer** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **hostAddress** | **String** | | [optional] **maxMsgCountPerRead** | **Integer** | | [optional] **metadata** | **Map<String, String>** | | [optional] **password** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **streams** | **List<String>** | Streams to look for entries. XREADGROUP is used on all streams using a single consumer group. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md similarity index 69% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md index bcad0c15ffff..f382d3d61661 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1ResourceEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource ResourceEventSource refers to a event-source for K8s resource related events. @@ -9,7 +9,7 @@ ResourceEventSource refers to a event-source for K8s resource related events. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **eventTypes** | **List<String>** | EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1ResourceFilter**](IoArgoprojEventsV1alpha1ResourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md) | | [optional] **groupVersionResource** | [**GroupVersionResource**](GroupVersionResource.md) | | [optional] **metadata** | **Map<String, String>** | | [optional] **namespace** | **String** | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md new file mode 100644 index 000000000000..aeaeea6090e3 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md @@ -0,0 +1,17 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**afterStart** | **Boolean** | | [optional] +**createdBy** | **java.time.Instant** | | [optional] +**fields** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md) | | [optional] +**labels** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md) | | [optional] +**prefix** | **String** | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md similarity index 69% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md index e2ba0c1154cc..550f23bf8484 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1S3Artifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact ## Properties @@ -8,11 +8,11 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **accessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**bucket** | [**IoArgoprojEventsV1alpha1S3Bucket**](IoArgoprojEventsV1alpha1S3Bucket.md) | | [optional] +**bucket** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md) | | [optional] **caCertificate** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **endpoint** | **String** | | [optional] **events** | **List<String>** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1S3Filter**](IoArgoprojEventsV1alpha1S3Filter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md) | | [optional] **insecure** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **region** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md similarity index 76% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md index 9a617bc7ea0c..87cbb3b69ae4 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1S3Bucket +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Filter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md similarity index 77% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Filter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md index 7ba08dcb932d..57c45e8ffe25 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1S3Filter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1S3Filter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md similarity index 88% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md index 64759096753b..c1fc6be997ea 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SASLConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md similarity index 69% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md index 5cd179fa68f2..d9cf4d363ced 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SFTPEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource SFTPEventSource describes an event-source for sftp related events. @@ -10,13 +10,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **address** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **eventType** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **Map<String, String>** | | [optional] **password** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **pollIntervalDuration** | **String** | | [optional] **sshKeySecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **username** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**watchPathConfig** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] +**watchPathConfig** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md similarity index 63% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md index 85252a3db5f3..5cebb5d6a5ca 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SNSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource ## Properties @@ -9,14 +9,14 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **accessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **endpoint** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **Map<String, String>** | | [optional] **region** | **String** | | [optional] **roleARN** | **String** | | [optional] **secretKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **topicArn** | **String** | | [optional] **validateSignature** | **Boolean** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md similarity index 82% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md index 78fab09a205f..5064ef62b962 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SQSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource ## Properties @@ -10,7 +10,7 @@ Name | Type | Description | Notes **accessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **dlq** | **Boolean** | | [optional] **endpoint** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **jsonBody** | **Boolean** | | [optional] **metadata** | **Map<String, String>** | | [optional] **queue** | **String** | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md new file mode 100644 index 000000000000..45d1a2181874 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md @@ -0,0 +1,15 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**schemaId** | **Integer** | | [optional] +**url** | **String** | Schema Registry URL. | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md new file mode 100644 index 000000000000..1f90f1b67bc1 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **String** | | [optional] +**valueFrom** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Selector.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md similarity index 83% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Selector.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md index 3e2d1067d4e2..9bfbf98ee05b 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Selector.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Selector +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector Selector represents conditional operation to select K8s objects. diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md new file mode 100644 index 000000000000..49eb2ada1541 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md @@ -0,0 +1,15 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] +**spec** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md similarity index 53% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md index 1821062bdfc9..c38d28ce6dd4 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1EventSourceList +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**List<IoArgoprojEventsV1alpha1EventSource>**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**items** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **metadata** | [**io.kubernetes.client.openapi.models.V1ListMeta**](io.kubernetes.client.openapi.models.V1ListMeta.md) | | [optional] diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md new file mode 100644 index 000000000000..3b02215f971a --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md @@ -0,0 +1,20 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**dependencies** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] +**errorOnFailedRound** | **Boolean** | ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed. | [optional] +**eventBusName** | **String** | | [optional] +**loggingFields** | **Map<String, String>** | | [optional] +**replicas** | **Integer** | | [optional] +**revisionHistoryLimit** | **Integer** | | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md) | | [optional] +**triggers** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md new file mode 100644 index 000000000000..411ec27bd7b4 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus + +SensorStatus contains information about the status of a sensor. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Service.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md similarity index 52% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Service.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md index 6db7d8cbdf18..7ce6afe764f5 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Service.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Service +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service ## Properties @@ -8,6 +8,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **clusterIP** | **String** | | [optional] +**metadata** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md) | | [optional] **ports** | [**List<ServicePort>**](ServicePort.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md similarity index 54% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md index 95485709a2e5..7f1353603e4c 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md @@ -1,17 +1,17 @@ -# IoArgoprojEventsV1alpha1SlackEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **Map<String, String>** | | [optional] **signingSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **token** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackSender.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md similarity index 76% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackSender.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md index db8b6c4ae8b0..0d871ed29dc4 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackSender.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SlackSender +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackThread.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md similarity index 79% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackThread.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md index 0c93483cef58..96cb0a2febda 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackThread.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1SlackThread +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md new file mode 100644 index 000000000000..47e8a57f2cde --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md @@ -0,0 +1,21 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger + +SlackTrigger refers to the specification of the slack notification trigger. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**attachments** | **String** | | [optional] +**blocks** | **String** | | [optional] +**channel** | **String** | | [optional] +**message** | **String** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**sender** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md) | | [optional] +**slackToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] +**thread** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md new file mode 100644 index 000000000000..c710d803bb66 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md @@ -0,0 +1,17 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**liveObject** | **Boolean** | | [optional] +**operation** | **String** | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] +**patchStrategy** | **String** | | [optional] +**source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md new file mode 100644 index 000000000000..2b71753b77e3 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status + +Status is a common structure which can be used for Status field. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**conditions** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md similarity index 72% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md index a9e7a7e531e1..44eb4d27430c 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1StatusPolicy +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md similarity index 60% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md index d5a89a94ef21..c31a749ad3d5 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1StorageGridEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource ## Properties @@ -11,11 +11,11 @@ Name | Type | Description | Notes **authToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] **bucket** | **String** | Name of the bucket to register notifications for. | [optional] **events** | **List<String>** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1StorageGridFilter**](IoArgoprojEventsV1alpha1StorageGridFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md) | | [optional] **metadata** | **Map<String, String>** | | [optional] **region** | **String** | | [optional] **topicArn** | **String** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md index 27b965ba8c42..586e1492ed4c 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1StorageGridFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md similarity index 65% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md index 5ab3ee91cf2b..b273485ffb65 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1StripeEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource ## Properties @@ -11,7 +11,7 @@ Name | Type | Description | Notes **createWebhook** | **Boolean** | | [optional] **eventFilter** | **List<String>** | | [optional] **metadata** | **Map<String, String>** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md similarity index 92% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md index fc81bcb099b5..185a716afb06 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1TLSConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig TLSConfig refers to TLS configuration for a client. diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Template.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md similarity index 74% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1Template.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md index 3a9db545b4a8..c4d937996cfe 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Template.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Template +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template ## Properties @@ -8,9 +8,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **affinity** | [**io.kubernetes.client.openapi.models.V1Affinity**](io.kubernetes.client.openapi.models.V1Affinity.md) | | [optional] -**container** | [**io.kubernetes.client.openapi.models.V1Container**](io.kubernetes.client.openapi.models.V1Container.md) | | [optional] +**container** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md) | | [optional] **imagePullSecrets** | [**List<io.kubernetes.client.openapi.models.V1LocalObjectReference>**](io.kubernetes.client.openapi.models.V1LocalObjectReference.md) | | [optional] -**metadata** | [**IoArgoprojEventsV1alpha1Metadata**](IoArgoprojEventsV1alpha1Metadata.md) | | [optional] +**metadata** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md) | | [optional] **nodeSelector** | **Map<String, String>** | | [optional] **priority** | **Integer** | | [optional] **priorityClassName** | **String** | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md similarity index 92% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md index 2583f0cc0633..f029f41f5b5f 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1TimeFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter TimeFilter describes a window in time. It filters out events that occur outside the time limits. In other words, only events that occur after Start and before Stop will pass this filter. diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md new file mode 100644 index 000000000000..0b0774574974 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md @@ -0,0 +1,19 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**atLeastOnce** | **Boolean** | | [optional] +**dlqTrigger** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md) | | [optional] +**parameters** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**policy** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md) | | [optional] +**rateLimit** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md) | | [optional] +**retryStrategy** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md similarity index 71% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md index ec51f552f0f8..bbbea200030a 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1TriggerParameter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter ## Properties @@ -9,7 +9,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **dest** | **String** | Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used. | [optional] **operation** | **String** | Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it. | [optional] -**src** | [**IoArgoprojEventsV1alpha1TriggerParameterSource**](IoArgoprojEventsV1alpha1TriggerParameterSource.md) | | [optional] +**src** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md) | | [optional] diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md similarity index 95% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md index 354031067697..94a70bb84e12 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1TriggerParameterSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md new file mode 100644 index 000000000000..c14478075829 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**k8s** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md) | | [optional] + + + diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md new file mode 100644 index 000000000000..bc16f479cc73 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md @@ -0,0 +1,30 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate + +TriggerTemplate is the template that describes trigger specification. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**argoWorkflow** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md) | | [optional] +**awsLambda** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md) | | [optional] +**azureEventHubs** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md) | | [optional] +**azureServiceBus** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md) | | [optional] +**conditions** | **String** | | [optional] +**conditionsReset** | [**List<GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria>**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md) | | [optional] +**custom** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md) | | [optional] +**email** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md) | | [optional] +**http** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md) | | [optional] +**k8s** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md) | | [optional] +**kafka** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md) | | [optional] +**log** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md) | | [optional] +**name** | **String** | Name is a unique name of the action to take. | [optional] +**nats** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md) | | [optional] +**openWhisk** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md) | | [optional] +**pulsar** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md) | | [optional] +**slack** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md) | | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md similarity index 81% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md index c97891aaaf57..a8fd570f7de5 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1URLArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact URLArtifact contains information about an artifact at an http endpoint. diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md similarity index 87% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md index d902b38a331c..458b7033b05d 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1ValueFromSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md similarity index 78% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md index 62685a2a5766..78b95401162a 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1WatchPathConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig ## Properties diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md similarity index 93% rename from sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md rename to sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md index e16d6f842b88..606ccd7aa9fc 100644 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1WebhookContext +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext ## Properties diff --git a/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md new file mode 100644 index 000000000000..76b306e7e867 --- /dev/null +++ b/sdks/java/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md @@ -0,0 +1,14 @@ + + +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**webhookContext** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] + + + diff --git a/sdks/java/client/docs/ImageVolumeSource.md b/sdks/java/client/docs/ImageVolumeSource.md new file mode 100644 index 000000000000..38700ffc2087 --- /dev/null +++ b/sdks/java/client/docs/ImageVolumeSource.md @@ -0,0 +1,15 @@ + + +# ImageVolumeSource + +ImageVolumeSource represents a image volume resource. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pullPolicy** | **String** | Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. | [optional] +**reference** | **String** | Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] + + + diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md deleted file mode 100644 index 4e3867740686..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md +++ /dev/null @@ -1,27 +0,0 @@ - - -# IoArgoprojEventsV1alpha1AMQPEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**consume** | [**IoArgoprojEventsV1alpha1AMQPConsumeConfig**](IoArgoprojEventsV1alpha1AMQPConsumeConfig.md) | | [optional] -**exchangeDeclare** | [**IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig**](IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md) | | [optional] -**exchangeName** | **String** | | [optional] -**exchangeType** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**jsonBody** | **Boolean** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**queueBind** | [**IoArgoprojEventsV1alpha1AMQPQueueBindConfig**](IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md) | | [optional] -**queueDeclare** | [**IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig**](IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md) | | [optional] -**routingKey** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **String** | | [optional] -**urlSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md deleted file mode 100644 index 82903b1c6223..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**args** | **List<String>** | | [optional] -**operation** | **String** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md deleted file mode 100644 index 8dc05249c58e..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md +++ /dev/null @@ -1,19 +0,0 @@ - - -# IoArgoprojEventsV1alpha1ArtifactLocation - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**configmap** | [**io.kubernetes.client.openapi.models.V1ConfigMapKeySelector**](io.kubernetes.client.openapi.models.V1ConfigMapKeySelector.md) | | [optional] -**file** | [**IoArgoprojEventsV1alpha1FileArtifact**](IoArgoprojEventsV1alpha1FileArtifact.md) | | [optional] -**git** | [**IoArgoprojEventsV1alpha1GitArtifact**](IoArgoprojEventsV1alpha1GitArtifact.md) | | [optional] -**inline** | **String** | | [optional] -**resource** | [**IoArgoprojEventsV1alpha1Resource**](IoArgoprojEventsV1alpha1Resource.md) | | [optional] -**s3** | [**IoArgoprojEventsV1alpha1S3Artifact**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] -**url** | [**IoArgoprojEventsV1alpha1URLArtifact**](IoArgoprojEventsV1alpha1URLArtifact.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md deleted file mode 100644 index 0ce8d54efb07..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# IoArgoprojEventsV1alpha1AzureEventHubsTrigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fqdn** | **String** | | [optional] -**hubName** | **String** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**sharedAccessKey** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**sharedAccessKeyName** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md deleted file mode 100644 index 399ed34df3ce..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md +++ /dev/null @@ -1,19 +0,0 @@ - - -# IoArgoprojEventsV1alpha1AzureServiceBusTrigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**connectionString** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**queueName** | **String** | | [optional] -**subscriptionName** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topicName** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Backoff.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1Backoff.md deleted file mode 100644 index cad1c41a1a82..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Backoff.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# IoArgoprojEventsV1alpha1Backoff - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**duration** | [**IoArgoprojEventsV1alpha1Int64OrString**](IoArgoprojEventsV1alpha1Int64OrString.md) | | [optional] -**factor** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] -**jitter** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] -**steps** | **Integer** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md deleted file mode 100644 index 64625bbdc684..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md +++ /dev/null @@ -1,22 +0,0 @@ - - -# IoArgoprojEventsV1alpha1BitbucketEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BitbucketAuth**](IoArgoprojEventsV1alpha1BitbucketAuth.md) | | [optional] -**deleteHookOnFinish** | **Boolean** | | [optional] -**events** | **List<String>** | Events this webhook is subscribed to. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**owner** | **String** | | [optional] -**projectKey** | **String** | | [optional] -**repositories** | [**List<IoArgoprojEventsV1alpha1BitbucketRepository>**](IoArgoprojEventsV1alpha1BitbucketRepository.md) | | [optional] -**repositorySlug** | **String** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md deleted file mode 100644 index bfa0828acc0b..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md +++ /dev/null @@ -1,24 +0,0 @@ - - -# IoArgoprojEventsV1alpha1BitbucketServerEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**accessToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**bitbucketserverBaseURL** | **String** | | [optional] -**deleteHookOnFinish** | **Boolean** | | [optional] -**events** | **List<String>** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**projectKey** | **String** | | [optional] -**repositories** | [**List<IoArgoprojEventsV1alpha1BitbucketServerRepository>**](IoArgoprojEventsV1alpha1BitbucketServerRepository.md) | | [optional] -**repositorySlug** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**webhookSecret** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md deleted file mode 100644 index cc5597a0cdce..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1BitbucketServerRepository - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**projectKey** | **String** | | [optional] -**repositorySlug** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md deleted file mode 100644 index 763f48320b4b..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# IoArgoprojEventsV1alpha1ConditionsResetCriteria - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**byTime** | [**IoArgoprojEventsV1alpha1ConditionsResetByTime**](IoArgoprojEventsV1alpha1ConditionsResetByTime.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md deleted file mode 100644 index 5d3338716352..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1EventPersistence - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**catchup** | [**IoArgoprojEventsV1alpha1CatchupConfiguration**](IoArgoprojEventsV1alpha1CatchupConfiguration.md) | | [optional] -**configMap** | [**IoArgoprojEventsV1alpha1ConfigMapPersistence**](IoArgoprojEventsV1alpha1ConfigMapPersistence.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSource.md deleted file mode 100644 index 31c2033234fa..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSource.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# IoArgoprojEventsV1alpha1EventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] -**spec** | [**IoArgoprojEventsV1alpha1EventSourceSpec**](IoArgoprojEventsV1alpha1EventSourceSpec.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1EventSourceStatus**](IoArgoprojEventsV1alpha1EventSourceStatus.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md deleted file mode 100644 index b8e069e89bdf..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md +++ /dev/null @@ -1,47 +0,0 @@ - - -# IoArgoprojEventsV1alpha1EventSourceSpec - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**amqp** | [**Map<String, IoArgoprojEventsV1alpha1AMQPEventSource>**](IoArgoprojEventsV1alpha1AMQPEventSource.md) | | [optional] -**azureEventsHub** | [**Map<String, IoArgoprojEventsV1alpha1AzureEventsHubEventSource>**](IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] -**azureQueueStorage** | [**Map<String, IoArgoprojEventsV1alpha1AzureQueueStorageEventSource>**](IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] -**azureServiceBus** | [**Map<String, IoArgoprojEventsV1alpha1AzureServiceBusEventSource>**](IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] -**bitbucket** | [**Map<String, IoArgoprojEventsV1alpha1BitbucketEventSource>**](IoArgoprojEventsV1alpha1BitbucketEventSource.md) | | [optional] -**bitbucketserver** | [**Map<String, IoArgoprojEventsV1alpha1BitbucketServerEventSource>**](IoArgoprojEventsV1alpha1BitbucketServerEventSource.md) | | [optional] -**calendar** | [**Map<String, IoArgoprojEventsV1alpha1CalendarEventSource>**](IoArgoprojEventsV1alpha1CalendarEventSource.md) | | [optional] -**emitter** | [**Map<String, IoArgoprojEventsV1alpha1EmitterEventSource>**](IoArgoprojEventsV1alpha1EmitterEventSource.md) | | [optional] -**eventBusName** | **String** | | [optional] -**file** | [**Map<String, IoArgoprojEventsV1alpha1FileEventSource>**](IoArgoprojEventsV1alpha1FileEventSource.md) | | [optional] -**generic** | [**Map<String, IoArgoprojEventsV1alpha1GenericEventSource>**](IoArgoprojEventsV1alpha1GenericEventSource.md) | | [optional] -**gerrit** | [**Map<String, IoArgoprojEventsV1alpha1GerritEventSource>**](IoArgoprojEventsV1alpha1GerritEventSource.md) | | [optional] -**github** | [**Map<String, IoArgoprojEventsV1alpha1GithubEventSource>**](IoArgoprojEventsV1alpha1GithubEventSource.md) | | [optional] -**gitlab** | [**Map<String, IoArgoprojEventsV1alpha1GitlabEventSource>**](IoArgoprojEventsV1alpha1GitlabEventSource.md) | | [optional] -**hdfs** | [**Map<String, IoArgoprojEventsV1alpha1HDFSEventSource>**](IoArgoprojEventsV1alpha1HDFSEventSource.md) | | [optional] -**kafka** | [**Map<String, IoArgoprojEventsV1alpha1KafkaEventSource>**](IoArgoprojEventsV1alpha1KafkaEventSource.md) | | [optional] -**minio** | [**Map<String, IoArgoprojEventsV1alpha1S3Artifact>**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] -**mqtt** | [**Map<String, IoArgoprojEventsV1alpha1MQTTEventSource>**](IoArgoprojEventsV1alpha1MQTTEventSource.md) | | [optional] -**nats** | [**Map<String, IoArgoprojEventsV1alpha1NATSEventsSource>**](IoArgoprojEventsV1alpha1NATSEventsSource.md) | | [optional] -**nsq** | [**Map<String, IoArgoprojEventsV1alpha1NSQEventSource>**](IoArgoprojEventsV1alpha1NSQEventSource.md) | | [optional] -**pubSub** | [**Map<String, IoArgoprojEventsV1alpha1PubSubEventSource>**](IoArgoprojEventsV1alpha1PubSubEventSource.md) | | [optional] -**pulsar** | [**Map<String, IoArgoprojEventsV1alpha1PulsarEventSource>**](IoArgoprojEventsV1alpha1PulsarEventSource.md) | | [optional] -**redis** | [**Map<String, IoArgoprojEventsV1alpha1RedisEventSource>**](IoArgoprojEventsV1alpha1RedisEventSource.md) | | [optional] -**redisStream** | [**Map<String, IoArgoprojEventsV1alpha1RedisStreamEventSource>**](IoArgoprojEventsV1alpha1RedisStreamEventSource.md) | | [optional] -**replicas** | **Integer** | | [optional] -**resource** | [**Map<String, IoArgoprojEventsV1alpha1ResourceEventSource>**](IoArgoprojEventsV1alpha1ResourceEventSource.md) | | [optional] -**service** | [**IoArgoprojEventsV1alpha1Service**](IoArgoprojEventsV1alpha1Service.md) | | [optional] -**sftp** | [**Map<String, IoArgoprojEventsV1alpha1SFTPEventSource>**](IoArgoprojEventsV1alpha1SFTPEventSource.md) | | [optional] -**slack** | [**Map<String, IoArgoprojEventsV1alpha1SlackEventSource>**](IoArgoprojEventsV1alpha1SlackEventSource.md) | | [optional] -**sns** | [**Map<String, IoArgoprojEventsV1alpha1SNSEventSource>**](IoArgoprojEventsV1alpha1SNSEventSource.md) | | [optional] -**sqs** | [**Map<String, IoArgoprojEventsV1alpha1SQSEventSource>**](IoArgoprojEventsV1alpha1SQSEventSource.md) | | [optional] -**storageGrid** | [**Map<String, IoArgoprojEventsV1alpha1StorageGridEventSource>**](IoArgoprojEventsV1alpha1StorageGridEventSource.md) | | [optional] -**stripe** | [**Map<String, IoArgoprojEventsV1alpha1StripeEventSource>**](IoArgoprojEventsV1alpha1StripeEventSource.md) | | [optional] -**template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**webhook** | [**Map<String, IoArgoprojEventsV1alpha1WebhookEventSource>**](IoArgoprojEventsV1alpha1WebhookEventSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md deleted file mode 100644 index 0f4b465f6735..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md +++ /dev/null @@ -1,13 +0,0 @@ - - -# IoArgoprojEventsV1alpha1EventSourceStatus - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md deleted file mode 100644 index e3e353fd1def..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1ExprFilter - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**expr** | **String** | Expr refers to the expression that determines the outcome of the filter. | [optional] -**fields** | [**List<IoArgoprojEventsV1alpha1PayloadField>**](IoArgoprojEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md deleted file mode 100644 index 23c0ac16358b..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# IoArgoprojEventsV1alpha1FileEventSource - -FileEventSource describes an event-source for file related events. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**eventType** | **String** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**polling** | **Boolean** | | [optional] -**watchPathConfig** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md deleted file mode 100644 index 12432356eded..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md +++ /dev/null @@ -1,22 +0,0 @@ - - -# IoArgoprojEventsV1alpha1GerritEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**deleteHookOnFinish** | **Boolean** | | [optional] -**events** | **List<String>** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**gerritBaseURL** | **String** | | [optional] -**hookName** | **String** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**projects** | **List<String>** | List of project namespace paths like \"whynowy/test\". | [optional] -**sslVerify** | **Boolean** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md deleted file mode 100644 index 30fd1d67fd89..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md +++ /dev/null @@ -1,21 +0,0 @@ - - -# IoArgoprojEventsV1alpha1HTTPTrigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**basicAuth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**headers** | **Map<String, String>** | | [optional] -**method** | **String** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**secureHeaders** | [**List<IoArgoprojEventsV1alpha1SecureHeader>**](IoArgoprojEventsV1alpha1SecureHeader.md) | | [optional] -**timeout** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **String** | URL refers to the URL to send HTTP request to. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md deleted file mode 100644 index 2ef2540dd210..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md +++ /dev/null @@ -1,25 +0,0 @@ - - -# IoArgoprojEventsV1alpha1KafkaEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**config** | **String** | Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**consumerGroup** | [**IoArgoprojEventsV1alpha1KafkaConsumerGroup**](IoArgoprojEventsV1alpha1KafkaConsumerGroup.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**jsonBody** | **Boolean** | | [optional] -**limitEventsPerSecond** | **String** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**partition** | **String** | | [optional] -**sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **String** | | [optional] -**url** | **String** | | [optional] -**version** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md deleted file mode 100644 index 19e6e6b794ed..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md +++ /dev/null @@ -1,26 +0,0 @@ - - -# IoArgoprojEventsV1alpha1KafkaTrigger - -KafkaTrigger refers to the specification of the Kafka trigger. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**compress** | **Boolean** | | [optional] -**flushFrequency** | **Integer** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] -**partition** | **Integer** | | [optional] -**partitioningKey** | **String** | The partitioning key for the messages put on the Kafka topic. +optional. | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**requiredAcks** | **Integer** | RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional. | [optional] -**sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] -**schemaRegistry** | [**IoArgoprojEventsV1alpha1SchemaRegistryConfig**](IoArgoprojEventsV1alpha1SchemaRegistryConfig.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **String** | | [optional] -**url** | **String** | URL of the Kafka broker, multiple URLs separated by comma. | [optional] -**version** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md deleted file mode 100644 index 32a4de27ed76..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md +++ /dev/null @@ -1,21 +0,0 @@ - - -# IoArgoprojEventsV1alpha1MQTTEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**clientId** | **String** | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**jsonBody** | **Boolean** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **String** | | [optional] -**url** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md deleted file mode 100644 index f6504a7dfef9..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md +++ /dev/null @@ -1,20 +0,0 @@ - - -# IoArgoprojEventsV1alpha1NATSEventsSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1NATSAuth**](IoArgoprojEventsV1alpha1NATSAuth.md) | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**jsonBody** | **Boolean** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**subject** | **String** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md deleted file mode 100644 index 011b57a07722..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# IoArgoprojEventsV1alpha1NATSTrigger - -NATSTrigger refers to the specification of the NATS trigger. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**subject** | **String** | Name of the subject to put message on. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **String** | URL of the NATS cluster. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md deleted file mode 100644 index 03d6ed100cb4..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md +++ /dev/null @@ -1,20 +0,0 @@ - - -# IoArgoprojEventsV1alpha1NSQEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**channel** | **String** | | [optional] -**connectionBackoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**hostAddress** | **String** | | [optional] -**jsonBody** | **Boolean** | | [optional] -**metadata** | **Map<String, String>** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **String** | Topic to subscribe to. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md deleted file mode 100644 index f658aecded26..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# IoArgoprojEventsV1alpha1ResourceFilter - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**afterStart** | **Boolean** | | [optional] -**createdBy** | **java.time.Instant** | | [optional] -**fields** | [**List<IoArgoprojEventsV1alpha1Selector>**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] -**labels** | [**List<IoArgoprojEventsV1alpha1Selector>**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] -**prefix** | **String** | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md deleted file mode 100644 index 040e52d6e3c3..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# IoArgoprojEventsV1alpha1SchemaRegistryConfig - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**schemaId** | **Integer** | | [optional] -**url** | **String** | Schema Registry URL. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md deleted file mode 100644 index 50bad3dbd9d0..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1SecureHeader - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**name** | **String** | | [optional] -**valueFrom** | [**IoArgoprojEventsV1alpha1ValueFromSource**](IoArgoprojEventsV1alpha1ValueFromSource.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Sensor.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1Sensor.md deleted file mode 100644 index 4aca796013fb..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Sensor.md +++ /dev/null @@ -1,15 +0,0 @@ - - -# IoArgoprojEventsV1alpha1Sensor - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**metadata** | [**io.kubernetes.client.openapi.models.V1ObjectMeta**](io.kubernetes.client.openapi.models.V1ObjectMeta.md) | | [optional] -**spec** | [**IoArgoprojEventsV1alpha1SensorSpec**](IoArgoprojEventsV1alpha1SensorSpec.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1SensorStatus**](IoArgoprojEventsV1alpha1SensorStatus.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md deleted file mode 100644 index e790e2cab872..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md +++ /dev/null @@ -1,20 +0,0 @@ - - -# IoArgoprojEventsV1alpha1SensorSpec - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**dependencies** | [**List<IoArgoprojEventsV1alpha1EventDependency>**](IoArgoprojEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] -**errorOnFailedRound** | **Boolean** | ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed. | [optional] -**eventBusName** | **String** | | [optional] -**loggingFields** | **Map<String, String>** | | [optional] -**replicas** | **Integer** | | [optional] -**revisionHistoryLimit** | **Integer** | | [optional] -**template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**triggers** | [**List<IoArgoprojEventsV1alpha1Trigger>**](IoArgoprojEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md deleted file mode 100644 index 03c171d8f763..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1SensorStatus - -SensorStatus contains information about the status of a sensor. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md deleted file mode 100644 index 37c09c65bb05..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md +++ /dev/null @@ -1,21 +0,0 @@ - - -# IoArgoprojEventsV1alpha1SlackTrigger - -SlackTrigger refers to the specification of the slack notification trigger. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**attachments** | **String** | | [optional] -**blocks** | **String** | | [optional] -**channel** | **String** | | [optional] -**message** | **String** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**sender** | [**IoArgoprojEventsV1alpha1SlackSender**](IoArgoprojEventsV1alpha1SlackSender.md) | | [optional] -**slackToken** | [**io.kubernetes.client.openapi.models.V1SecretKeySelector**](io.kubernetes.client.openapi.models.V1SecretKeySelector.md) | | [optional] -**thread** | [**IoArgoprojEventsV1alpha1SlackThread**](IoArgoprojEventsV1alpha1SlackThread.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md deleted file mode 100644 index 87d610b40423..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# IoArgoprojEventsV1alpha1StandardK8STrigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**liveObject** | **Boolean** | | [optional] -**operation** | **String** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] -**patchStrategy** | **String** | | [optional] -**source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Status.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1Status.md deleted file mode 100644 index 5bb95654b5d0..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Status.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1Status - -Status is a common structure which can be used for Status field. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**conditions** | [**List<IoArgoprojEventsV1alpha1Condition>**](IoArgoprojEventsV1alpha1Condition.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Trigger.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1Trigger.md deleted file mode 100644 index d135e3698175..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1Trigger.md +++ /dev/null @@ -1,18 +0,0 @@ - - -# IoArgoprojEventsV1alpha1Trigger - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**atLeastOnce** | **Boolean** | | [optional] -**parameters** | [**List<IoArgoprojEventsV1alpha1TriggerParameter>**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**policy** | [**IoArgoprojEventsV1alpha1TriggerPolicy**](IoArgoprojEventsV1alpha1TriggerPolicy.md) | | [optional] -**rateLimit** | [**IoArgoprojEventsV1alpha1RateLimit**](IoArgoprojEventsV1alpha1RateLimit.md) | | [optional] -**retryStrategy** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**template** | [**IoArgoprojEventsV1alpha1TriggerTemplate**](IoArgoprojEventsV1alpha1TriggerTemplate.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md deleted file mode 100644 index 685223b472bc..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1TriggerPolicy - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**k8s** | [**IoArgoprojEventsV1alpha1K8SResourcePolicy**](IoArgoprojEventsV1alpha1K8SResourcePolicy.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1StatusPolicy**](IoArgoprojEventsV1alpha1StatusPolicy.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md deleted file mode 100644 index 8c6d3263b49e..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md +++ /dev/null @@ -1,30 +0,0 @@ - - -# IoArgoprojEventsV1alpha1TriggerTemplate - -TriggerTemplate is the template that describes trigger specification. - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**argoWorkflow** | [**IoArgoprojEventsV1alpha1ArgoWorkflowTrigger**](IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md) | | [optional] -**awsLambda** | [**IoArgoprojEventsV1alpha1AWSLambdaTrigger**](IoArgoprojEventsV1alpha1AWSLambdaTrigger.md) | | [optional] -**azureEventHubs** | [**IoArgoprojEventsV1alpha1AzureEventHubsTrigger**](IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md) | | [optional] -**azureServiceBus** | [**IoArgoprojEventsV1alpha1AzureServiceBusTrigger**](IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md) | | [optional] -**conditions** | **String** | | [optional] -**conditionsReset** | [**List<IoArgoprojEventsV1alpha1ConditionsResetCriteria>**](IoArgoprojEventsV1alpha1ConditionsResetCriteria.md) | | [optional] -**custom** | [**IoArgoprojEventsV1alpha1CustomTrigger**](IoArgoprojEventsV1alpha1CustomTrigger.md) | | [optional] -**email** | [**IoArgoprojEventsV1alpha1EmailTrigger**](IoArgoprojEventsV1alpha1EmailTrigger.md) | | [optional] -**http** | [**IoArgoprojEventsV1alpha1HTTPTrigger**](IoArgoprojEventsV1alpha1HTTPTrigger.md) | | [optional] -**k8s** | [**IoArgoprojEventsV1alpha1StandardK8STrigger**](IoArgoprojEventsV1alpha1StandardK8STrigger.md) | | [optional] -**kafka** | [**IoArgoprojEventsV1alpha1KafkaTrigger**](IoArgoprojEventsV1alpha1KafkaTrigger.md) | | [optional] -**log** | [**IoArgoprojEventsV1alpha1LogTrigger**](IoArgoprojEventsV1alpha1LogTrigger.md) | | [optional] -**name** | **String** | Name is a unique name of the action to take. | [optional] -**nats** | [**IoArgoprojEventsV1alpha1NATSTrigger**](IoArgoprojEventsV1alpha1NATSTrigger.md) | | [optional] -**openWhisk** | [**IoArgoprojEventsV1alpha1OpenWhiskTrigger**](IoArgoprojEventsV1alpha1OpenWhiskTrigger.md) | | [optional] -**pulsar** | [**IoArgoprojEventsV1alpha1PulsarTrigger**](IoArgoprojEventsV1alpha1PulsarTrigger.md) | | [optional] -**slack** | [**IoArgoprojEventsV1alpha1SlackTrigger**](IoArgoprojEventsV1alpha1SlackTrigger.md) | | [optional] - - - diff --git a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md b/sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md deleted file mode 100644 index 144988ba1721..000000000000 --- a/sdks/java/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IoArgoprojEventsV1alpha1WebhookEventSource - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**webhookContext** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] - - - diff --git a/sdks/java/client/docs/PersistentVolumeClaimCondition.md b/sdks/java/client/docs/PersistentVolumeClaimCondition.md index 636f7abf0171..3e702ca3e801 100644 --- a/sdks/java/client/docs/PersistentVolumeClaimCondition.md +++ b/sdks/java/client/docs/PersistentVolumeClaimCondition.md @@ -12,8 +12,8 @@ Name | Type | Description | Notes **lastTransitionTime** | **java.time.Instant** | | [optional] **message** | **String** | message is the human-readable message indicating details about last transition. | [optional] **reason** | **String** | reason is a unique, this should be a short, machine understandable string that gives the reason for condition's last transition. If it reports \"Resizing\" that means the underlying persistent volume is being resized. | [optional] -**status** | **String** | | -**type** | **String** | | +**status** | **String** | Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required | +**type** | **String** | Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about | diff --git a/sdks/java/client/docs/PersistentVolumeClaimSpec.md b/sdks/java/client/docs/PersistentVolumeClaimSpec.md index 5832b622d773..c9cb0fcb3575 100644 --- a/sdks/java/client/docs/PersistentVolumeClaimSpec.md +++ b/sdks/java/client/docs/PersistentVolumeClaimSpec.md @@ -14,7 +14,7 @@ Name | Type | Description | Notes **resources** | [**VolumeResourceRequirements**](VolumeResourceRequirements.md) | | [optional] **selector** | [**LabelSelector**](LabelSelector.md) | | [optional] **storageClassName** | **String** | storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 | [optional] -**volumeAttributesClassName** | **String** | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled. | [optional] +**volumeAttributesClassName** | **String** | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default). | [optional] **volumeMode** | **String** | volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec. | [optional] **volumeName** | **String** | volumeName is the binding reference to the PersistentVolume backing this claim. | [optional] diff --git a/sdks/java/client/docs/PersistentVolumeClaimStatus.md b/sdks/java/client/docs/PersistentVolumeClaimStatus.md index a53e1a085396..ed0f621ff089 100644 --- a/sdks/java/client/docs/PersistentVolumeClaimStatus.md +++ b/sdks/java/client/docs/PersistentVolumeClaimStatus.md @@ -13,7 +13,7 @@ Name | Type | Description | Notes **allocatedResources** | **Map<String, String>** | allocatedResources tracks the resources allocated to a PVC including its capacity. Key names follow standard Kubernetes label syntax. Valid values are either: * Un-prefixed keys: - storage - the capacity of the volume. * Custom resources must use implementation-defined prefixed names such as \"example.com/my-custom-resource\" Apart from above values - keys that are unprefixed or have kubernetes.io prefix are considered reserved and hence may not be used. Capacity reported here may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. A controller that receives PVC update with previously unknown resourceName should ignore the update for the purpose it was designed. For example - a controller that only is responsible for resizing capacity of the volume, should ignore PVC updates that change other valid resources associated with PVC. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature. | [optional] **capacity** | **Map<String, String>** | capacity represents the actual resources of the underlying volume. | [optional] **conditions** | [**List<PersistentVolumeClaimCondition>**](PersistentVolumeClaimCondition.md) | conditions is the current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'Resizing'. | [optional] -**currentVolumeAttributesClassName** | **String** | currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature. | [optional] +**currentVolumeAttributesClassName** | **String** | currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default). | [optional] **modifyVolumeStatus** | [**ModifyVolumeStatus**](ModifyVolumeStatus.md) | | [optional] **phase** | **String** | phase represents the current phase of PersistentVolumeClaim. | [optional] diff --git a/sdks/java/client/docs/PodAffinityTerm.md b/sdks/java/client/docs/PodAffinityTerm.md index bf9d7fd256d2..1cd68ca721d5 100644 --- a/sdks/java/client/docs/PodAffinityTerm.md +++ b/sdks/java/client/docs/PodAffinityTerm.md @@ -9,8 +9,8 @@ Defines a set of pods (namely those matching the labelSelector relative to the g Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **labelSelector** | [**LabelSelector**](LabelSelector.md) | | [optional] -**matchLabelKeys** | **List<String>** | MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. | [optional] -**mismatchLabelKeys** | **List<String>** | MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. | [optional] +**matchLabelKeys** | **List<String>** | MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). | [optional] +**mismatchLabelKeys** | **List<String>** | MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). | [optional] **namespaceSelector** | [**LabelSelector**](LabelSelector.md) | | [optional] **namespaces** | **List<String>** | namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\". | [optional] **topologyKey** | **String** | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. | diff --git a/sdks/java/client/docs/PodDNSConfigOption.md b/sdks/java/client/docs/PodDNSConfigOption.md index 98799fc20e95..8d54a13230ad 100644 --- a/sdks/java/client/docs/PodDNSConfigOption.md +++ b/sdks/java/client/docs/PodDNSConfigOption.md @@ -8,8 +8,8 @@ PodDNSConfigOption defines DNS resolver options of a pod. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **String** | Required. | [optional] -**value** | **String** | | [optional] +**name** | **String** | Name is this DNS resolver option's name. Required. | [optional] +**value** | **String** | Value is this DNS resolver option's value. | [optional] diff --git a/sdks/java/client/docs/ProjectedVolumeSource.md b/sdks/java/client/docs/ProjectedVolumeSource.md index 842bac85a0ea..6db522ebfec5 100644 --- a/sdks/java/client/docs/ProjectedVolumeSource.md +++ b/sdks/java/client/docs/ProjectedVolumeSource.md @@ -9,7 +9,7 @@ Represents a projected volume source Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **defaultMode** | **Integer** | defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**sources** | [**List<VolumeProjection>**](VolumeProjection.md) | sources is the list of volume projections | [optional] +**sources** | [**List<VolumeProjection>**](VolumeProjection.md) | sources is the list of volume projections. Each entry in this list handles one source. | [optional] diff --git a/sdks/java/client/docs/ResourceClaim.md b/sdks/java/client/docs/ResourceClaim.md index 60050a483e2b..6fd8873a4cb1 100644 --- a/sdks/java/client/docs/ResourceClaim.md +++ b/sdks/java/client/docs/ResourceClaim.md @@ -9,6 +9,7 @@ ResourceClaim references one entry in PodSpec.ResourceClaims. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **String** | Name must match the name of one entry in pod.spec.resourceClaims of the Pod where this field is used. It makes that resource available inside a container. | +**request** | **String** | Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request. | [optional] diff --git a/sdks/java/client/docs/SensorCreateSensorRequest.md b/sdks/java/client/docs/SensorCreateSensorRequest.md index 36542c83316f..a18fbb4812d3 100644 --- a/sdks/java/client/docs/SensorCreateSensorRequest.md +++ b/sdks/java/client/docs/SensorCreateSensorRequest.md @@ -9,7 +9,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **createOptions** | [**CreateOptions**](CreateOptions.md) | | [optional] **namespace** | **String** | | [optional] -**sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**sensor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] diff --git a/sdks/java/client/docs/SensorSensorWatchEvent.md b/sdks/java/client/docs/SensorSensorWatchEvent.md index e082f68005f8..8d4bdd0e8486 100644 --- a/sdks/java/client/docs/SensorSensorWatchEvent.md +++ b/sdks/java/client/docs/SensorSensorWatchEvent.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**_object** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**_object** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **type** | **String** | | [optional] diff --git a/sdks/java/client/docs/SensorServiceApi.md b/sdks/java/client/docs/SensorServiceApi.md index 71dc45b67d5c..e62b2ff89220 100644 --- a/sdks/java/client/docs/SensorServiceApi.md +++ b/sdks/java/client/docs/SensorServiceApi.md @@ -15,7 +15,7 @@ Method | HTTP request | Description # **sensorServiceCreateSensor** -> IoArgoprojEventsV1alpha1Sensor sensorServiceCreateSensor(namespace, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor sensorServiceCreateSensor(namespace, body) @@ -44,7 +44,7 @@ public class Example { String namespace = "namespace_example"; // String | SensorCreateSensorRequest body = new SensorCreateSensorRequest(); // SensorCreateSensorRequest | try { - IoArgoprojEventsV1alpha1Sensor result = apiInstance.sensorServiceCreateSensor(namespace, body); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor result = apiInstance.sensorServiceCreateSensor(namespace, body); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceCreateSensor"); @@ -66,7 +66,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization @@ -85,7 +85,7 @@ Name | Type | Description | Notes # **sensorServiceDeleteSensor** -> Object sensorServiceDeleteSensor(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object sensorServiceDeleteSensor(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential) @@ -119,8 +119,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. try { - Object result = apiInstance.sensorServiceDeleteSensor(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.sensorServiceDeleteSensor(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceDeleteSensor"); @@ -145,6 +146,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -167,7 +169,7 @@ Name | Type | Description | Notes # **sensorServiceGetSensor** -> IoArgoprojEventsV1alpha1Sensor sensorServiceGetSensor(namespace, name, getOptionsResourceVersion) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor sensorServiceGetSensor(namespace, name, getOptionsResourceVersion) @@ -197,7 +199,7 @@ public class Example { String name = "name_example"; // String | String getOptionsResourceVersion = "getOptionsResourceVersion_example"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional try { - IoArgoprojEventsV1alpha1Sensor result = apiInstance.sensorServiceGetSensor(namespace, name, getOptionsResourceVersion); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor result = apiInstance.sensorServiceGetSensor(namespace, name, getOptionsResourceVersion); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceGetSensor"); @@ -220,7 +222,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization @@ -239,7 +241,7 @@ Name | Type | Description | Notes # **sensorServiceListSensors** -> IoArgoprojEventsV1alpha1SensorList sensorServiceListSensors(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList sensorServiceListSensors(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents) @@ -277,7 +279,7 @@ public class Example { String listOptionsContinue = "listOptionsContinue_example"; // String | The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. Boolean listOptionsSendInitialEvents = true; // Boolean | `sendInitialEvents=true` may be set together with `watch=true`. In that case, the watch stream will begin with synthetic events to produce the current state of objects in the collection. Once all such events have been sent, a synthetic \"Bookmark\" event will be sent. The bookmark will report the ResourceVersion (RV) corresponding to the set of objects, and be marked with `\"io.k8s.initial-events-end\": \"true\"` annotation. Afterwards, the watch stream will proceed as usual, sending watch events corresponding to changes (subsequent to the RV) to objects watched. When `sendInitialEvents` option is set, we require `resourceVersionMatch` option to also be set. The semantic of the watch request is as following: - `resourceVersionMatch` = NotOlderThan is interpreted as \"data at least as new as the provided `resourceVersion`\" and the bookmark event is send when the state is synced to a `resourceVersion` at least as fresh as the one provided by the ListOptions. If `resourceVersion` is unset, this is interpreted as \"consistent read\" and the bookmark event is send when the state is synced at least to the moment when request started being processed. - `resourceVersionMatch` set to any other value or unset Invalid error is returned. Defaults to true if `resourceVersion=\"\"` or `resourceVersion=\"0\"` (for backward compatibility reasons) and to false otherwise. +optional try { - IoArgoprojEventsV1alpha1SensorList result = apiInstance.sensorServiceListSensors(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList result = apiInstance.sensorServiceListSensors(namespace, listOptionsLabelSelector, listOptionsFieldSelector, listOptionsWatch, listOptionsAllowWatchBookmarks, listOptionsResourceVersion, listOptionsResourceVersionMatch, listOptionsTimeoutSeconds, listOptionsLimit, listOptionsContinue, listOptionsSendInitialEvents); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceListSensors"); @@ -308,7 +310,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1SensorList**](IoArgoprojEventsV1alpha1SensorList.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md) ### Authorization @@ -327,7 +329,7 @@ Name | Type | Description | Notes # **sensorServiceSensorsLogs** -> StreamResultOfSensorLogEntry sensorServiceSensorsLogs(namespace, name, triggerName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend) +> StreamResultOfSensorLogEntry sensorServiceSensorsLogs(namespace, name, triggerName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend, podLogOptionsStream) @@ -364,11 +366,12 @@ public class Example { String podLogOptionsSinceTimeSeconds = "podLogOptionsSinceTimeSeconds_example"; // String | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. Integer podLogOptionsSinceTimeNanos = 56; // Integer | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. Boolean podLogOptionsTimestamps = true; // Boolean | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. - String podLogOptionsTailLines = "podLogOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + String podLogOptionsTailLines = "podLogOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. String podLogOptionsLimitBytes = "podLogOptionsLimitBytes_example"; // String | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. Boolean podLogOptionsInsecureSkipTLSVerifyBackend = true; // Boolean | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + String podLogOptionsStream = "podLogOptionsStream_example"; // String | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. try { - StreamResultOfSensorLogEntry result = apiInstance.sensorServiceSensorsLogs(namespace, name, triggerName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend); + StreamResultOfSensorLogEntry result = apiInstance.sensorServiceSensorsLogs(namespace, name, triggerName, grep, podLogOptionsContainer, podLogOptionsFollow, podLogOptionsPrevious, podLogOptionsSinceSeconds, podLogOptionsSinceTimeSeconds, podLogOptionsSinceTimeNanos, podLogOptionsTimestamps, podLogOptionsTailLines, podLogOptionsLimitBytes, podLogOptionsInsecureSkipTLSVerifyBackend, podLogOptionsStream); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceSensorsLogs"); @@ -396,9 +399,10 @@ Name | Type | Description | Notes **podLogOptionsSinceTimeSeconds** | **String**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **podLogOptionsSinceTimeNanos** | **Integer**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **podLogOptionsTimestamps** | **Boolean**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **podLogOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **podLogOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **podLogOptionsLimitBytes** | **String**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **podLogOptionsInsecureSkipTLSVerifyBackend** | **Boolean**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **podLogOptionsStream** | **String**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] ### Return type @@ -421,7 +425,7 @@ Name | Type | Description | Notes # **sensorServiceUpdateSensor** -> IoArgoprojEventsV1alpha1Sensor sensorServiceUpdateSensor(namespace, name, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor sensorServiceUpdateSensor(namespace, name, body) @@ -451,7 +455,7 @@ public class Example { String name = "name_example"; // String | SensorUpdateSensorRequest body = new SensorUpdateSensorRequest(); // SensorUpdateSensorRequest | try { - IoArgoprojEventsV1alpha1Sensor result = apiInstance.sensorServiceUpdateSensor(namespace, name, body); + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor result = apiInstance.sensorServiceUpdateSensor(namespace, name, body); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling SensorServiceApi#sensorServiceUpdateSensor"); @@ -474,7 +478,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization diff --git a/sdks/java/client/docs/SensorUpdateSensorRequest.md b/sdks/java/client/docs/SensorUpdateSensorRequest.md index f54eaf191aad..07199ae7f443 100644 --- a/sdks/java/client/docs/SensorUpdateSensorRequest.md +++ b/sdks/java/client/docs/SensorUpdateSensorRequest.md @@ -9,7 +9,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **String** | | [optional] **namespace** | **String** | | [optional] -**sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**sensor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] diff --git a/sdks/java/client/docs/TypedObjectReference.md b/sdks/java/client/docs/TypedObjectReference.md index 9230e47d5268..1f2f89aa0423 100644 --- a/sdks/java/client/docs/TypedObjectReference.md +++ b/sdks/java/client/docs/TypedObjectReference.md @@ -2,6 +2,7 @@ # TypedObjectReference +TypedObjectReference contains enough information to let you locate the typed referenced object ## Properties diff --git a/sdks/java/client/docs/VolumeProjection.md b/sdks/java/client/docs/VolumeProjection.md index 9f0fefea1505..761d8e6cf6b0 100644 --- a/sdks/java/client/docs/VolumeProjection.md +++ b/sdks/java/client/docs/VolumeProjection.md @@ -2,7 +2,7 @@ # VolumeProjection -Projection that may be projected along with other supported volume types +Projection that may be projected along with other supported volume types. Exactly one of these fields must be set. ## Properties diff --git a/sdks/java/client/docs/WorkflowServiceApi.md b/sdks/java/client/docs/WorkflowServiceApi.md index d99bc3d91623..b7d541397e1b 100644 --- a/sdks/java/client/docs/WorkflowServiceApi.md +++ b/sdks/java/client/docs/WorkflowServiceApi.md @@ -95,7 +95,7 @@ Name | Type | Description | Notes # **workflowServiceDeleteWorkflow** -> Object workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, force) +> Object workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential, force) @@ -129,9 +129,10 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. Boolean force = true; // Boolean | try { - Object result = apiInstance.workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, force); + Object result = apiInstance.workflowServiceDeleteWorkflow(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential, force); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling WorkflowServiceApi#workflowServiceDeleteWorkflow"); @@ -156,6 +157,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] **force** | **Boolean**| | [optional] ### Return type @@ -415,7 +417,7 @@ Name | Type | Description | Notes # **workflowServicePodLogs** -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflowServicePodLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, grep, selector) +> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflowServicePodLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, logOptionsStream, grep, selector) DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. @@ -451,13 +453,14 @@ public class Example { String logOptionsSinceTimeSeconds = "logOptionsSinceTimeSeconds_example"; // String | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. Integer logOptionsSinceTimeNanos = 56; // Integer | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. Boolean logOptionsTimestamps = true; // Boolean | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. - String logOptionsTailLines = "logOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + String logOptionsTailLines = "logOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. String logOptionsLimitBytes = "logOptionsLimitBytes_example"; // String | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. Boolean logOptionsInsecureSkipTLSVerifyBackend = true; // Boolean | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + String logOptionsStream = "logOptionsStream_example"; // String | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. String grep = "grep_example"; // String | String selector = "selector_example"; // String | try { - StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry result = apiInstance.workflowServicePodLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, grep, selector); + StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry result = apiInstance.workflowServicePodLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, logOptionsStream, grep, selector); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling WorkflowServiceApi#workflowServicePodLogs"); @@ -484,9 +487,10 @@ Name | Type | Description | Notes **logOptionsSinceTimeSeconds** | **String**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **logOptionsSinceTimeNanos** | **Integer**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **logOptionsTimestamps** | **Boolean**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **logOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **logOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **logOptionsLimitBytes** | **String**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **logOptionsInsecureSkipTLSVerifyBackend** | **Boolean**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **logOptionsStream** | **String**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] **grep** | **String**| | [optional] **selector** | **String**| | [optional] @@ -1263,7 +1267,7 @@ Name | Type | Description | Notes # **workflowServiceWorkflowLogs** -> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflowServiceWorkflowLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, grep, selector) +> StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry workflowServiceWorkflowLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, logOptionsStream, grep, selector) @@ -1299,13 +1303,14 @@ public class Example { String logOptionsSinceTimeSeconds = "logOptionsSinceTimeSeconds_example"; // String | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. Integer logOptionsSinceTimeNanos = 56; // Integer | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. Boolean logOptionsTimestamps = true; // Boolean | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. - String logOptionsTailLines = "logOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. + String logOptionsTailLines = "logOptionsTailLines_example"; // String | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. String logOptionsLimitBytes = "logOptionsLimitBytes_example"; // String | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. Boolean logOptionsInsecureSkipTLSVerifyBackend = true; // Boolean | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. + String logOptionsStream = "logOptionsStream_example"; // String | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. String grep = "grep_example"; // String | String selector = "selector_example"; // String | try { - StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry result = apiInstance.workflowServiceWorkflowLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, grep, selector); + StreamResultOfIoArgoprojWorkflowV1alpha1LogEntry result = apiInstance.workflowServiceWorkflowLogs(namespace, name, podName, logOptionsContainer, logOptionsFollow, logOptionsPrevious, logOptionsSinceSeconds, logOptionsSinceTimeSeconds, logOptionsSinceTimeNanos, logOptionsTimestamps, logOptionsTailLines, logOptionsLimitBytes, logOptionsInsecureSkipTLSVerifyBackend, logOptionsStream, grep, selector); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling WorkflowServiceApi#workflowServiceWorkflowLogs"); @@ -1332,9 +1337,10 @@ Name | Type | Description | Notes **logOptionsSinceTimeSeconds** | **String**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **logOptionsSinceTimeNanos** | **Integer**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **logOptionsTimestamps** | **Boolean**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **logOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **logOptionsTailLines** | **String**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **logOptionsLimitBytes** | **String**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **logOptionsInsecureSkipTLSVerifyBackend** | **Boolean**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **logOptionsStream** | **String**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] **grep** | **String**| | [optional] **selector** | **String**| | [optional] diff --git a/sdks/java/client/docs/WorkflowTemplateServiceApi.md b/sdks/java/client/docs/WorkflowTemplateServiceApi.md index bfa34c84e4c1..bf552a53a9d8 100644 --- a/sdks/java/client/docs/WorkflowTemplateServiceApi.md +++ b/sdks/java/client/docs/WorkflowTemplateServiceApi.md @@ -84,7 +84,7 @@ Name | Type | Description | Notes # **workflowTemplateServiceDeleteWorkflowTemplate** -> Object workflowTemplateServiceDeleteWorkflowTemplate(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun) +> Object workflowTemplateServiceDeleteWorkflowTemplate(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential) @@ -118,8 +118,9 @@ public class Example { Boolean deleteOptionsOrphanDependents = true; // Boolean | Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. String deleteOptionsPropagationPolicy = "deleteOptionsPropagationPolicy_example"; // String | Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. List deleteOptionsDryRun = Arrays.asList(); // List | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. + Boolean deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential = true; // Boolean | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. try { - Object result = apiInstance.workflowTemplateServiceDeleteWorkflowTemplate(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun); + Object result = apiInstance.workflowTemplateServiceDeleteWorkflowTemplate(namespace, name, deleteOptionsGracePeriodSeconds, deleteOptionsPreconditionsUid, deleteOptionsPreconditionsResourceVersion, deleteOptionsOrphanDependents, deleteOptionsPropagationPolicy, deleteOptionsDryRun, deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling WorkflowTemplateServiceApi#workflowTemplateServiceDeleteWorkflowTemplate"); @@ -144,6 +145,7 @@ Name | Type | Description | Notes **deleteOptionsOrphanDependents** | **Boolean**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **deleteOptionsPropagationPolicy** | **String**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **deleteOptionsDryRun** | [**List<String>**](String.md)| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **deleteOptionsIgnoreStoreReadErrorWithClusterBreakingPotential** | **Boolean**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type diff --git a/sdks/python/Makefile b/sdks/python/Makefile index a9b71d2df457..e29efda9e2b7 100755 --- a/sdks/python/Makefile +++ b/sdks/python/Makefile @@ -11,7 +11,7 @@ DOCKER = docker run --rm --user $(shell id -u):$(shell id -g) -v $(WD):/wd --wor CHOWN = chown -R $(shell id -u):$(shell id -g) publish: generate - pip install setuptools twine build + pip install -U packaging setuptools twine build python -m build --sdist --wheel --outdir client/dist/ client twine check client/dist/* twine upload client/dist/* -u __token__ -p ${PYPI_API_TOKEN} @@ -44,7 +44,7 @@ else --model-name-suffix '' \ --artifact-id argo-python-client \ --global-property modelTests=false \ - --global-property packageName=argo_workflows \ + --global-property packageName=argo_workflows \ --generate-alias-as-model # https://vsupalov.com/docker-shared-permissions/#set-the-docker-user-when-running-your-container $(CHOWN) $(WD) || sudo $(CHOWN) $(WD) diff --git a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py index 32e4f961da96..489389fddb44 100644 --- a/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/cluster_workflow_template_service_api.py @@ -112,6 +112,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', ], 'required': [ 'name', @@ -143,6 +144,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), }, 'attribute_map': { 'name': 'name', @@ -152,6 +155,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', }, 'location_map': { 'name': 'path', @@ -161,6 +165,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -535,6 +540,7 @@ def delete_cluster_workflow_template( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object diff --git a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py index 411c83e309e6..d133c4fd9c41 100644 --- a/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/cron_workflow_service_api.py @@ -121,6 +121,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', ], 'required': [ 'namespace', @@ -155,6 +156,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), }, 'attribute_map': { 'namespace': 'namespace', @@ -165,6 +168,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', }, 'location_map': { 'namespace': 'path', @@ -175,6 +179,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -708,6 +713,7 @@ def delete_cron_workflow( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object diff --git a/sdks/python/client/argo_workflows/api/event_source_service_api.py b/sdks/python/client/argo_workflows/api/event_source_service_api.py index 8ae41de842ae..21f6af203a37 100644 --- a/sdks/python/client/argo_workflows/api/event_source_service_api.py +++ b/sdks/python/client/argo_workflows/api/event_source_service_api.py @@ -23,9 +23,9 @@ ) from argo_workflows.model.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest from argo_workflows.model.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList from argo_workflows.model.stream_result_of_eventsource_event_source_watch_event import StreamResultOfEventsourceEventSourceWatchEvent from argo_workflows.model.stream_result_of_eventsource_log_entry import StreamResultOfEventsourceLogEntry @@ -43,7 +43,7 @@ def __init__(self, api_client=None): self.api_client = api_client self.create_event_source_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), 'auth': [ 'BearerToken' ], @@ -120,6 +120,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', ], 'required': [ 'namespace', @@ -154,6 +155,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), }, 'attribute_map': { 'namespace': 'namespace', @@ -164,6 +167,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', }, 'location_map': { 'namespace': 'path', @@ -174,6 +178,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -215,6 +220,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines', 'pod_log_options_limit_bytes', 'pod_log_options_insecure_skip_tls_verify_backend', + 'pod_log_options_stream', ], 'required': [ 'namespace', @@ -262,6 +268,8 @@ def __init__(self, api_client=None): (str,), 'pod_log_options_insecure_skip_tls_verify_backend': (bool,), + 'pod_log_options_stream': + (str,), }, 'attribute_map': { 'namespace': 'namespace', @@ -279,6 +287,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines': 'podLogOptions.tailLines', 'pod_log_options_limit_bytes': 'podLogOptions.limitBytes', 'pod_log_options_insecure_skip_tls_verify_backend': 'podLogOptions.insecureSkipTLSVerifyBackend', + 'pod_log_options_stream': 'podLogOptions.stream', }, 'location_map': { 'namespace': 'path', @@ -296,6 +305,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines': 'query', 'pod_log_options_limit_bytes': 'query', 'pod_log_options_insecure_skip_tls_verify_backend': 'query', + 'pod_log_options_stream': 'query', }, 'collection_format_map': { } @@ -310,7 +320,7 @@ def __init__(self, api_client=None): ) self.get_event_source_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), 'auth': [ 'BearerToken' ], @@ -367,7 +377,7 @@ def __init__(self, api_client=None): ) self.list_event_sources_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSourceList,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList,), 'auth': [ 'BearerToken' ], @@ -468,7 +478,7 @@ def __init__(self, api_client=None): ) self.update_event_source_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1EventSource,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), 'auth': [ 'BearerToken' ], @@ -679,7 +689,7 @@ def create_event_source( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1EventSource + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource If the method is called asynchronously, returns the request thread. """ @@ -738,6 +748,7 @@ def delete_event_source( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -828,9 +839,10 @@ def event_sources_logs( pod_log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] pod_log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] pod_log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] + pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional.. [optional] pod_log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] pod_log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] + pod_log_options_stream (str): Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -938,7 +950,7 @@ def get_event_source( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1EventSource + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource If the method is called asynchronously, returns the request thread. """ @@ -1027,7 +1039,7 @@ def list_event_sources( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1EventSourceList + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList If the method is called asynchronously, returns the request thread. """ @@ -1108,7 +1120,7 @@ def update_event_source( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1EventSource + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource If the method is called asynchronously, returns the request thread. """ diff --git a/sdks/python/client/argo_workflows/api/sensor_service_api.py b/sdks/python/client/argo_workflows/api/sensor_service_api.py index 698b773e7826..46d0c7515a1d 100644 --- a/sdks/python/client/argo_workflows/api/sensor_service_api.py +++ b/sdks/python/client/argo_workflows/api/sensor_service_api.py @@ -21,9 +21,9 @@ none_type, validate_and_convert_types ) +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList from argo_workflows.model.sensor_create_sensor_request import SensorCreateSensorRequest from argo_workflows.model.sensor_update_sensor_request import SensorUpdateSensorRequest from argo_workflows.model.stream_result_of_sensor_log_entry import StreamResultOfSensorLogEntry @@ -43,7 +43,7 @@ def __init__(self, api_client=None): self.api_client = api_client self.create_sensor_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), 'auth': [ 'BearerToken' ], @@ -120,6 +120,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', ], 'required': [ 'namespace', @@ -154,6 +155,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), }, 'attribute_map': { 'namespace': 'namespace', @@ -164,6 +167,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', }, 'location_map': { 'namespace': 'path', @@ -174,6 +178,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -189,7 +194,7 @@ def __init__(self, api_client=None): ) self.get_sensor_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), 'auth': [ 'BearerToken' ], @@ -251,7 +256,7 @@ def __init__(self, api_client=None): ) self.list_sensors_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1SensorList,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList,), 'auth': [ 'BearerToken' ], @@ -377,6 +382,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines', 'pod_log_options_limit_bytes', 'pod_log_options_insecure_skip_tls_verify_backend', + 'pod_log_options_stream', ], 'required': [ 'namespace', @@ -422,6 +428,8 @@ def __init__(self, api_client=None): (str,), 'pod_log_options_insecure_skip_tls_verify_backend': (bool,), + 'pod_log_options_stream': + (str,), }, 'attribute_map': { 'namespace': 'namespace', @@ -438,6 +446,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines': 'podLogOptions.tailLines', 'pod_log_options_limit_bytes': 'podLogOptions.limitBytes', 'pod_log_options_insecure_skip_tls_verify_backend': 'podLogOptions.insecureSkipTLSVerifyBackend', + 'pod_log_options_stream': 'podLogOptions.stream', }, 'location_map': { 'namespace': 'path', @@ -454,6 +463,7 @@ def __init__(self, api_client=None): 'pod_log_options_tail_lines': 'query', 'pod_log_options_limit_bytes': 'query', 'pod_log_options_insecure_skip_tls_verify_backend': 'query', + 'pod_log_options_stream': 'query', }, 'collection_format_map': { } @@ -468,7 +478,7 @@ def __init__(self, api_client=None): ) self.update_sensor_endpoint = _Endpoint( settings={ - 'response_type': (IoArgoprojEventsV1alpha1Sensor,), + 'response_type': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), 'auth': [ 'BearerToken' ], @@ -679,7 +689,7 @@ def create_sensor( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1Sensor + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor If the method is called asynchronously, returns the request thread. """ @@ -738,6 +748,7 @@ def delete_sensor( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -848,7 +859,7 @@ def get_sensor( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1Sensor + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor If the method is called asynchronously, returns the request thread. """ @@ -937,7 +948,7 @@ def list_sensors( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1SensorList + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList If the method is called asynchronously, returns the request thread. """ @@ -996,9 +1007,10 @@ def sensors_logs( pod_log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] pod_log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] pod_log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] + pod_log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional.. [optional] pod_log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] pod_log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] + pod_log_options_stream (str): Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -1108,7 +1120,7 @@ def update_sensor( async_req (bool): execute request asynchronously Returns: - IoArgoprojEventsV1alpha1Sensor + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor If the method is called asynchronously, returns the request thread. """ diff --git a/sdks/python/client/argo_workflows/api/workflow_service_api.py b/sdks/python/client/argo_workflows/api/workflow_service_api.py index 29d73f0bac40..f898d0521c5a 100644 --- a/sdks/python/client/argo_workflows/api/workflow_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_service_api.py @@ -129,6 +129,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', 'force', ], 'required': [ @@ -164,6 +165,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), 'force': (bool,), }, @@ -176,6 +179,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', 'force': 'force', }, 'location_map': { @@ -187,6 +191,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', 'force': 'query', }, 'collection_format_map': { @@ -463,6 +468,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines', 'log_options_limit_bytes', 'log_options_insecure_skip_tls_verify_backend', + 'log_options_stream', 'grep', 'selector', ], @@ -510,6 +516,8 @@ def __init__(self, api_client=None): (str,), 'log_options_insecure_skip_tls_verify_backend': (bool,), + 'log_options_stream': + (str,), 'grep': (str,), 'selector': @@ -529,6 +537,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines': 'logOptions.tailLines', 'log_options_limit_bytes': 'logOptions.limitBytes', 'log_options_insecure_skip_tls_verify_backend': 'logOptions.insecureSkipTLSVerifyBackend', + 'log_options_stream': 'logOptions.stream', 'grep': 'grep', 'selector': 'selector', }, @@ -546,6 +555,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines': 'query', 'log_options_limit_bytes': 'query', 'log_options_insecure_skip_tls_verify_backend': 'query', + 'log_options_stream': 'query', 'grep': 'query', 'selector': 'query', }, @@ -1299,6 +1309,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines', 'log_options_limit_bytes', 'log_options_insecure_skip_tls_verify_backend', + 'log_options_stream', 'grep', 'selector', ], @@ -1345,6 +1356,8 @@ def __init__(self, api_client=None): (str,), 'log_options_insecure_skip_tls_verify_backend': (bool,), + 'log_options_stream': + (str,), 'grep': (str,), 'selector': @@ -1364,6 +1377,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines': 'logOptions.tailLines', 'log_options_limit_bytes': 'logOptions.limitBytes', 'log_options_insecure_skip_tls_verify_backend': 'logOptions.insecureSkipTLSVerifyBackend', + 'log_options_stream': 'logOptions.stream', 'grep': 'grep', 'selector': 'selector', }, @@ -1381,6 +1395,7 @@ def __init__(self, api_client=None): 'log_options_tail_lines': 'query', 'log_options_limit_bytes': 'query', 'log_options_insecure_skip_tls_verify_backend': 'query', + 'log_options_stream': 'query', 'grep': 'query', 'selector': 'query', }, @@ -1502,6 +1517,7 @@ def delete_workflow( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] force (bool): [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. @@ -1846,9 +1862,10 @@ def pod_logs( log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] + log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional.. [optional] log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] + log_options_stream (str): Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional.. [optional] grep (str): [optional] selector (str): [optional] _return_http_data_only (bool): response data without head status @@ -2793,9 +2810,10 @@ def workflow_logs( log_options_since_time_seconds (str): Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.. [optional] log_options_since_time_nanos (int): Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context.. [optional] log_options_timestamps (bool): If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional.. [optional] - log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional.. [optional] + log_options_tail_lines (str): If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional.. [optional] log_options_limit_bytes (str): If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional.. [optional] log_options_insecure_skip_tls_verify_backend (bool): insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional.. [optional] + log_options_stream (str): Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional.. [optional] grep (str): [optional] selector (str): [optional] _return_http_data_only (bool): response data without head status diff --git a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py index b68d7bc92408..4ce97a53e4e4 100644 --- a/sdks/python/client/argo_workflows/api/workflow_template_service_api.py +++ b/sdks/python/client/argo_workflows/api/workflow_template_service_api.py @@ -119,6 +119,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential', ], 'required': [ 'namespace', @@ -153,6 +154,8 @@ def __init__(self, api_client=None): (str,), 'delete_options_dry_run': ([str],), + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': + (bool,), }, 'attribute_map': { 'namespace': 'namespace', @@ -163,6 +166,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'deleteOptions.orphanDependents', 'delete_options_propagation_policy': 'deleteOptions.propagationPolicy', 'delete_options_dry_run': 'deleteOptions.dryRun', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'deleteOptions.ignoreStoreReadErrorWithClusterBreakingPotential', }, 'location_map': { 'namespace': 'path', @@ -173,6 +177,7 @@ def __init__(self, api_client=None): 'delete_options_orphan_dependents': 'query', 'delete_options_propagation_policy': 'query', 'delete_options_dry_run': 'query', + 'delete_options_ignore_store_read_error_with_cluster_breaking_potential': 'query', }, 'collection_format_map': { 'delete_options_dry_run': 'multi', @@ -583,6 +588,7 @@ def delete_workflow_template( delete_options_orphan_dependents (bool): Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional.. [optional] delete_options_propagation_policy (str): Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional.. [optional] delete_options_dry_run ([str]): When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic.. [optional] + delete_options_ignore_store_read_error_with_cluster_breaking_potential (bool): if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object diff --git a/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py b/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py index 83af421a7a84..1ea409b4a656 100644 --- a/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py +++ b/sdks/python/client/argo_workflows/model/eventsource_create_event_source_request.py @@ -30,8 +30,8 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource class EventsourceCreateEventSourceRequest(ModelNormal): @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'event_source': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 + 'event_source': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), # noqa: E501 'namespace': (str,), # noqa: E501 } @@ -142,7 +142,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + event_source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 """ @@ -225,7 +225,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + event_source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py b/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py index e0bb4c50191b..49be73597056 100644 --- a/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py +++ b/sdks/python/client/argo_workflows/model/eventsource_event_source_watch_event.py @@ -30,8 +30,8 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource class EventsourceEventSourceWatchEvent(ModelNormal): @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'object': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 + 'object': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), # noqa: E501 'type': (str,), # noqa: E501 } @@ -142,7 +142,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + object (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 type (str): [optional] # noqa: E501 """ @@ -225,7 +225,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + object (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 type (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py b/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py index 1f00975b5e79..3ebb8d3af453 100644 --- a/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py +++ b/sdks/python/client/argo_workflows/model/eventsource_update_event_source_request.py @@ -30,8 +30,8 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource class EventsourceUpdateEventSourceRequest(ModelNormal): @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'event_source': (IoArgoprojEventsV1alpha1EventSource,), # noqa: E501 + 'event_source': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource,), # noqa: E501 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 } @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + event_source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 """ @@ -228,7 +228,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - event_source (IoArgoprojEventsV1alpha1EventSource): [optional] # noqa: E501 + event_source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource): [optional] # noqa: E501 name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amount.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amount.py index 6cb7bbe57499..33be52734fe5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amount.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amount.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Amount(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -106,7 +106,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Amount - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -188,7 +188,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Amount - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_consume_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_consume_config.py index eaa7dd8acc3f..e02936601229 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_consume_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_consume_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1AMQPConsumeConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_event_source.py similarity index 69% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_event_source.py index c48740be319f..0f17ff786d7c 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_event_source.py @@ -30,27 +30,27 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_consume_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_exchange_declare_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_bind_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_declare_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1AMQPConsumeConfig'] = IoArgoprojEventsV1alpha1AMQPConsumeConfig - globals()['IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig'] = IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - globals()['IoArgoprojEventsV1alpha1AMQPQueueBindConfig'] = IoArgoprojEventsV1alpha1AMQPQueueBindConfig - globals()['IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig'] = IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AMQPEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,19 +103,19 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'consume': (IoArgoprojEventsV1alpha1AMQPConsumeConfig,), # noqa: E501 - 'exchange_declare': (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'consume': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig,), # noqa: E501 + 'exchange_declare': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig,), # noqa: E501 'exchange_name': (str,), # noqa: E501 'exchange_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'queue_bind': (IoArgoprojEventsV1alpha1AMQPQueueBindConfig,), # noqa: E501 - 'queue_declare': (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig,), # noqa: E501 + 'queue_bind': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig,), # noqa: E501 + 'queue_declare': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig,), # noqa: E501 'routing_key': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'url': (str,), # noqa: E501 'url_secret': (SecretKeySelector,), # noqa: E501 } @@ -151,7 +151,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -184,19 +184,19 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consume (IoArgoprojEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 - exchange_declare (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + consume (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 + exchange_declare (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 exchange_name (str): [optional] # noqa: E501 exchange_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - queue_bind (IoArgoprojEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 - queue_declare (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 + queue_bind (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 + queue_declare (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 routing_key (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): [optional] # noqa: E501 url_secret (SecretKeySelector): [optional] # noqa: E501 """ @@ -247,7 +247,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -280,19 +280,19 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consume (IoArgoprojEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 - exchange_declare (IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + consume (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig): [optional] # noqa: E501 + exchange_declare (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig): [optional] # noqa: E501 exchange_name (str): [optional] # noqa: E501 exchange_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - queue_bind (IoArgoprojEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 - queue_declare (IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 + queue_bind (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig): [optional] # noqa: E501 + queue_declare (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig): [optional] # noqa: E501 routing_key (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): [optional] # noqa: E501 url_secret (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_exchange_declare_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_exchange_declare_config.py index 78e154206e11..3b55aa1cfc5f 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_exchange_declare_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_exchange_declare_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_bind_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_bind_config.py index 23675a1c82ed..a6ccb6817c73 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_bind_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_bind_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1AMQPQueueBindConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_declare_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_declare_config.py index 589529111ca7..1f3c73e5c746 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_amqp_queue_declare_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_declare_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -198,7 +198,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_argo_workflow_trigger.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_argo_workflow_trigger.py index 3658a3e4e57c..ceeb77278865 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_argo_workflow_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_argo_workflow_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1ArtifactLocation'] = IoArgoprojEventsV1alpha1ArtifactLocation - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter -class IoArgoprojEventsV1alpha1ArgoWorkflowTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,8 +91,8 @@ def openapi_types(): return { 'args': ([str],), # noqa: E501 'operation': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'source': (IoArgoprojEventsV1alpha1ArtifactLocation,), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'source': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation,), # noqa: E501 } @cached_property @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -150,8 +150,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) args ([str]): [optional] # noqa: E501 operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -200,7 +200,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -235,8 +235,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) args ([str]): [optional] # noqa: E501 operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location.py similarity index 77% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location.py index 1f2c2e946883..2ca773031ccc 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_artifact_location.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location.py @@ -31,20 +31,20 @@ def lazy_import(): from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact - from argo_workflows.model.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact - from argo_workflows.model.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact - from argo_workflows.model.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_url_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['IoArgoprojEventsV1alpha1FileArtifact'] = IoArgoprojEventsV1alpha1FileArtifact - globals()['IoArgoprojEventsV1alpha1GitArtifact'] = IoArgoprojEventsV1alpha1GitArtifact - globals()['IoArgoprojEventsV1alpha1Resource'] = IoArgoprojEventsV1alpha1Resource - globals()['IoArgoprojEventsV1alpha1S3Artifact'] = IoArgoprojEventsV1alpha1S3Artifact - globals()['IoArgoprojEventsV1alpha1URLArtifact'] = IoArgoprojEventsV1alpha1URLArtifact + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact -class IoArgoprojEventsV1alpha1ArtifactLocation(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -98,12 +98,12 @@ def openapi_types(): lazy_import() return { 'configmap': (ConfigMapKeySelector,), # noqa: E501 - 'file': (IoArgoprojEventsV1alpha1FileArtifact,), # noqa: E501 - 'git': (IoArgoprojEventsV1alpha1GitArtifact,), # noqa: E501 + 'file': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact,), # noqa: E501 + 'git': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact,), # noqa: E501 'inline': (str,), # noqa: E501 - 'resource': (IoArgoprojEventsV1alpha1Resource,), # noqa: E501 - 's3': (IoArgoprojEventsV1alpha1S3Artifact,), # noqa: E501 - 'url': (IoArgoprojEventsV1alpha1URLArtifact,), # noqa: E501 + 'resource': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource,), # noqa: E501 + 's3': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact,), # noqa: E501 + 'url': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact,), # noqa: E501 } @cached_property @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArtifactLocation - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -163,12 +163,12 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) configmap (ConfigMapKeySelector): [optional] # noqa: E501 - file (IoArgoprojEventsV1alpha1FileArtifact): [optional] # noqa: E501 - git (IoArgoprojEventsV1alpha1GitArtifact): [optional] # noqa: E501 + file (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact): [optional] # noqa: E501 + git (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact): [optional] # noqa: E501 inline (str): [optional] # noqa: E501 - resource (IoArgoprojEventsV1alpha1Resource): [optional] # noqa: E501 - s3 (IoArgoprojEventsV1alpha1S3Artifact): [optional] # noqa: E501 - url (IoArgoprojEventsV1alpha1URLArtifact): [optional] # noqa: E501 + resource (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource): [optional] # noqa: E501 + s3 (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact): [optional] # noqa: E501 + url (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -217,7 +217,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ArtifactLocation - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,12 +251,12 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) configmap (ConfigMapKeySelector): [optional] # noqa: E501 - file (IoArgoprojEventsV1alpha1FileArtifact): [optional] # noqa: E501 - git (IoArgoprojEventsV1alpha1GitArtifact): [optional] # noqa: E501 + file (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact): [optional] # noqa: E501 + git (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact): [optional] # noqa: E501 inline (str): [optional] # noqa: E501 - resource (IoArgoprojEventsV1alpha1Resource): [optional] # noqa: E501 - s3 (IoArgoprojEventsV1alpha1S3Artifact): [optional] # noqa: E501 - url (IoArgoprojEventsV1alpha1URLArtifact): [optional] # noqa: E501 + resource (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource): [optional] # noqa: E501 + s3 (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact): [optional] # noqa: E501 + url (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_aws_lambda_trigger.py similarity index 90% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_aws_lambda_trigger.py index 9c3805876c99..80233461e77e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_aws_lambda_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_aws_lambda_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AWSLambdaTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,8 +92,8 @@ def openapi_types(): 'access_key': (SecretKeySelector,), # noqa: E501 'function_name': (str,), # noqa: E501 'invocation_type': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'region': (str,), # noqa: E501 'role_arn': (str,), # noqa: E501 'secret_key': (SecretKeySelector,), # noqa: E501 @@ -123,7 +123,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -159,8 +159,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 access_key (SecretKeySelector): [optional] # noqa: E501 function_name (str): FunctionName refers to the name of the function to invoke.. [optional] # noqa: E501 invocation_type (str): Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 region (str): [optional] # noqa: E501 role_arn (str): [optional] # noqa: E501 secret_key (SecretKeySelector): [optional] # noqa: E501 @@ -212,7 +212,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -248,8 +248,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 access_key (SecretKeySelector): [optional] # noqa: E501 function_name (str): FunctionName refers to the name of the function to invoke.. [optional] # noqa: E501 invocation_type (str): Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 region (str): [optional] # noqa: E501 role_arn (str): [optional] # noqa: E501 secret_key (SecretKeySelector): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_event_hubs_trigger.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_event_hubs_trigger.py index 31711cb45c67..ddc455b752a1 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_event_hubs_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_event_hubs_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AzureEventHubsTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,8 +91,8 @@ def openapi_types(): return { 'fqdn': (str,), # noqa: E501 'hub_name': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'shared_access_key': (SecretKeySelector,), # noqa: E501 'shared_access_key_name': (SecretKeySelector,), # noqa: E501 } @@ -119,7 +119,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -154,8 +154,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) fqdn (str): [optional] # noqa: E501 hub_name (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 shared_access_key (SecretKeySelector): [optional] # noqa: E501 shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 """ @@ -206,7 +206,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -241,8 +241,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) fqdn (str): [optional] # noqa: E501 hub_name (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 shared_access_key (SecretKeySelector): [optional] # noqa: E501 shared_access_key_name (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_events_hub_event_source.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_events_hub_event_source.py index a4c54b585749..7c37313fd7eb 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_events_hub_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_events_hub_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AzureEventsHubEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): """ lazy_import() return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'fqdn': (str,), # noqa: E501 'hub_name': (str,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 @@ -119,7 +119,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -152,7 +152,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 fqdn (str): [optional] # noqa: E501 hub_name (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 @@ -206,7 +206,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -239,7 +239,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 fqdn (str): [optional] # noqa: E501 hub_name (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_queue_storage_event_source.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_queue_storage_event_source.py index ee10fbdf3c58..c9bdd41c9af6 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_queue_storage_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_queue_storage_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AzureQueueStorageEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,7 +92,7 @@ def openapi_types(): 'connection_string': (SecretKeySelector,), # noqa: E501 'decode_message': (bool,), # noqa: E501 'dlq': (bool,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'queue_name': (str,), # noqa: E501 @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -161,7 +161,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 connection_string (SecretKeySelector): [optional] # noqa: E501 decode_message (bool): [optional] # noqa: E501 dlq (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 @@ -215,7 +215,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,7 +251,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 connection_string (SecretKeySelector): [optional] # noqa: E501 decode_message (bool): [optional] # noqa: E501 dlq (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_event_source.py index ca4a16b9b92c..05faaf138870 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AzureServiceBusEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,13 +92,13 @@ def openapi_types(): lazy_import() return { 'connection_string': (SecretKeySelector,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'fully_qualified_namespace': (str,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'queue_name': (str,), # noqa: E501 'subscription_name': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic_name': (str,), # noqa: E501 } @@ -127,7 +127,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -161,13 +161,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) connection_string (SecretKeySelector): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 fully_qualified_namespace (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic_name (str): [optional] # noqa: E501 """ @@ -217,7 +217,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,13 +251,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) connection_string (SecretKeySelector): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 fully_qualified_namespace (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic_name (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_trigger.py similarity index 85% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_trigger.py index 19b67af614dd..e34551df6317 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_azure_service_bus_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_trigger.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1AzureServiceBusTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,11 +92,11 @@ def openapi_types(): lazy_import() return { 'connection_string': (SecretKeySelector,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'queue_name': (str,), # noqa: E501 'subscription_name': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic_name': (str,), # noqa: E501 } @@ -123,7 +123,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -157,11 +157,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) connection_string (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic_name (str): [optional] # noqa: E501 """ @@ -211,7 +211,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -245,11 +245,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) connection_string (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 queue_name (str): [optional] # noqa: E501 subscription_name (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic_name (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff.py similarity index 86% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff.py index 347afcc7221d..8553c179c44d 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_backoff.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount - from argo_workflows.model.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString - globals()['IoArgoprojEventsV1alpha1Amount'] = IoArgoprojEventsV1alpha1Amount - globals()['IoArgoprojEventsV1alpha1Int64OrString'] = IoArgoprojEventsV1alpha1Int64OrString + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amount import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_int64_or_string import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString -class IoArgoprojEventsV1alpha1Backoff(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,9 +89,9 @@ def openapi_types(): """ lazy_import() return { - 'duration': (IoArgoprojEventsV1alpha1Int64OrString,), # noqa: E501 - 'factor': (IoArgoprojEventsV1alpha1Amount,), # noqa: E501 - 'jitter': (IoArgoprojEventsV1alpha1Amount,), # noqa: E501 + 'duration': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString,), # noqa: E501 + 'factor': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount,), # noqa: E501 + 'jitter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount,), # noqa: E501 'steps': (int,), # noqa: E501 } @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Backoff - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -148,9 +148,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - duration (IoArgoprojEventsV1alpha1Int64OrString): [optional] # noqa: E501 - factor (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - jitter (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 + duration (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString): [optional] # noqa: E501 + factor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount): [optional] # noqa: E501 + jitter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount): [optional] # noqa: E501 steps (int): [optional] # noqa: E501 """ @@ -200,7 +200,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Backoff - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -233,9 +233,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - duration (IoArgoprojEventsV1alpha1Int64OrString): [optional] # noqa: E501 - factor (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 - jitter (IoArgoprojEventsV1alpha1Amount): [optional] # noqa: E501 + duration (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString): [optional] # noqa: E501 + factor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount): [optional] # noqa: E501 + jitter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount): [optional] # noqa: E501 steps (int): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth.py index f92b06a3619e..7a58c1c3d982 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_basic_auth.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1BasicAuth(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BasicAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BasicAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_auth.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_auth.py index 04f5b1e53f80..10dc4e2a8d45 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_auth.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_auth.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BitbucketBasicAuth'] = IoArgoprojEventsV1alpha1BitbucketBasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1BitbucketAuth(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): """ lazy_import() return { - 'basic': (IoArgoprojEventsV1alpha1BitbucketBasicAuth,), # noqa: E501 + 'basic': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth,), # noqa: E501 'oauth_token': (SecretKeySelector,), # noqa: E501 } @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 + basic (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 oauth_token (SecretKeySelector): [optional] # noqa: E501 """ @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,7 +227,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 + basic (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth): [optional] # noqa: E501 oauth_token (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_basic_auth.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_basic_auth.py index 916f7366d9c5..1654ceaed19f 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_basic_auth.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_basic_auth.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1BitbucketBasicAuth(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_event_source.py similarity index 80% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_event_source.py index 3939065eec16..e1eaa1fd3983 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_event_source.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1BitbucketAuth'] = IoArgoprojEventsV1alpha1BitbucketAuth - globals()['IoArgoprojEventsV1alpha1BitbucketRepository'] = IoArgoprojEventsV1alpha1BitbucketRepository - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_repository import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext -class IoArgoprojEventsV1alpha1BitbucketEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,16 +93,16 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1BitbucketAuth,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth,), # noqa: E501 'delete_hook_on_finish': (bool,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'owner': (str,), # noqa: E501 'project_key': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1BitbucketRepository],), # noqa: E501 + 'repositories': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository],), # noqa: E501 'repository_slug': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -131,7 +131,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,16 +164,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): Events this webhook is subscribed to.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 owner (str): [optional] # noqa: E501 project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 repository_slug (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -222,7 +222,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -255,16 +255,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): Events this webhook is subscribed to.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 owner (str): [optional] # noqa: E501 project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository]): [optional] # noqa: E501 repository_slug (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_repository.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_repository.py index 9a41746db1a8..57b8fc089604 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_repository.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_repository.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1BitbucketRepository(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketRepository - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketRepository - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_event_source.py similarity index 75% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_event_source.py index f036bb0e8806..e442bcda024d 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_event_source.py @@ -30,19 +30,19 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_repository import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BitbucketServerRepository'] = IoArgoprojEventsV1alpha1BitbucketServerRepository - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1BitbucketServerEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -97,15 +97,19 @@ def openapi_types(): return { 'access_token': (SecretKeySelector,), # noqa: E501 'bitbucketserver_base_url': (str,), # noqa: E501 + 'check_interval': (str,), # noqa: E501 'delete_hook_on_finish': (bool,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 + 'one_event_per_change': (bool,), # noqa: E501 'project_key': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1BitbucketServerRepository],), # noqa: E501 + 'projects': ([str],), # noqa: E501 + 'repositories': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository],), # noqa: E501 'repository_slug': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'skip_branch_refs_changed_on_open_pr': (bool,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 'webhook_secret': (SecretKeySelector,), # noqa: E501 } @@ -117,13 +121,17 @@ def discriminator(): attribute_map = { 'access_token': 'accessToken', # noqa: E501 'bitbucketserver_base_url': 'bitbucketserverBaseURL', # noqa: E501 + 'check_interval': 'checkInterval', # noqa: E501 'delete_hook_on_finish': 'deleteHookOnFinish', # noqa: E501 'events': 'events', # noqa: E501 'filter': 'filter', # noqa: E501 'metadata': 'metadata', # noqa: E501 + 'one_event_per_change': 'oneEventPerChange', # noqa: E501 'project_key': 'projectKey', # noqa: E501 + 'projects': 'projects', # noqa: E501 'repositories': 'repositories', # noqa: E501 'repository_slug': 'repositorySlug', # noqa: E501 + 'skip_branch_refs_changed_on_open_pr': 'skipBranchRefsChangedOnOpenPR', # noqa: E501 'tls': 'tls', # noqa: E501 'webhook': 'webhook', # noqa: E501 'webhook_secret': 'webhookSecret', # noqa: E501 @@ -137,7 +145,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -171,16 +179,20 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) access_token (SecretKeySelector): [optional] # noqa: E501 - bitbucketserver_base_url (str): [optional] # noqa: E501 + bitbucketserver_base_url (str): BitbucketServerBaseURL is the base URL for API requests to a custom endpoint.. [optional] # noqa: E501 + check_interval (str): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 + one_event_per_change (bool): [optional] # noqa: E501 project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 + projects ([str]): [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 repository_slug (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + skip_branch_refs_changed_on_open_pr (bool): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 webhook_secret (SecretKeySelector): [optional] # noqa: E501 """ @@ -230,7 +242,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -264,16 +276,20 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) access_token (SecretKeySelector): [optional] # noqa: E501 - bitbucketserver_base_url (str): [optional] # noqa: E501 + bitbucketserver_base_url (str): BitbucketServerBaseURL is the base URL for API requests to a custom endpoint.. [optional] # noqa: E501 + check_interval (str): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 + one_event_per_change (bool): [optional] # noqa: E501 project_key (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 + projects ([str]): [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository]): [optional] # noqa: E501 repository_slug (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + skip_branch_refs_changed_on_open_pr (bool): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 webhook_secret (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_repository.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_repository.py index a913f26341d1..7e9a639f97ec 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_bitbucket_server_repository.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_repository.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1BitbucketServerRepository(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -136,8 +136,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - project_key (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 + project_key (str): ProjectKey is the key of project for which integration needs to set up.. [optional] # noqa: E501 + repository_slug (str): RepositorySlug is the slug of the repository for which integration needs to set up.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -219,8 +219,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - project_key (str): [optional] # noqa: E501 - repository_slug (str): [optional] # noqa: E501 + project_key (str): ProjectKey is the key of project for which integration needs to set up.. [optional] # noqa: E501 + repository_slug (str): RepositorySlug is the slug of the repository for which integration needs to set up.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_calendar_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_calendar_event_source.py index d62e991558ec..2abdc0d9e6cd 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_calendar_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_calendar_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1EventPersistence'] = IoArgoprojEventsV1alpha1EventPersistence - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_persistence import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter -class IoArgoprojEventsV1alpha1CalendarEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -90,10 +90,10 @@ def openapi_types(): lazy_import() return { 'exclusion_dates': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'interval': (str,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'persistence': (IoArgoprojEventsV1alpha1EventPersistence,), # noqa: E501 + 'persistence': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence,), # noqa: E501 'schedule': (str,), # noqa: E501 'timezone': (str,), # noqa: E501 } @@ -121,7 +121,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CalendarEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -155,10 +155,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) exclusion_dates ([str]): ExclusionDates defines the list of DATE-TIME exceptions for recurring events.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 interval (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - persistence (IoArgoprojEventsV1alpha1EventPersistence): [optional] # noqa: E501 + persistence (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence): [optional] # noqa: E501 schedule (str): [optional] # noqa: E501 timezone (str): [optional] # noqa: E501 """ @@ -209,7 +209,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CalendarEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -243,10 +243,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) exclusion_dates ([str]): ExclusionDates defines the list of DATE-TIME exceptions for recurring events.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 interval (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - persistence (IoArgoprojEventsV1alpha1EventPersistence): [optional] # noqa: E501 + persistence (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence): [optional] # noqa: E501 schedule (str): [optional] # noqa: E501 timezone (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_catchup_configuration.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_catchup_configuration.py index b2aa2cb7bb84..2e841b20975a 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_catchup_configuration.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1CatchupConfiguration(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_condition.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_condition.py index 07df4c4bd186..522e0ece39b0 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_condition.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_condition.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Condition(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Condition - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Condition - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_by_time.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_by_time.py index 1e0270d7cc4f..574b7e614ace 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_by_time.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_by_time.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1ConditionsResetByTime(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_criteria.py similarity index 91% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_criteria.py index 0bf7d9e76916..cb94baad56f5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_conditions_reset_criteria.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_criteria.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime - globals()['IoArgoprojEventsV1alpha1ConditionsResetByTime'] = IoArgoprojEventsV1alpha1ConditionsResetByTime + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_by_time import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime -class IoArgoprojEventsV1alpha1ConditionsResetCriteria(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'by_time': (IoArgoprojEventsV1alpha1ConditionsResetByTime,), # noqa: E501 + 'by_time': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime,), # noqa: E501 } @cached_property @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - by_time (IoArgoprojEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 + by_time (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - by_time (IoArgoprojEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 + by_time (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_config_map_persistence.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_config_map_persistence.py index a394297c566d..c67e562ae1ef 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_config_map_persistence.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_config_map_persistence.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1ConfigMapPersistence(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_container.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_container.py new file mode 100644 index 000000000000..b9d71063c84d --- /dev/null +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_container.py @@ -0,0 +1,289 @@ +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + + The version of the OpenAPI document: VERSION + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from argo_workflows.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, + OpenApiModel +) +from argo_workflows.exceptions import ApiAttributeError + + +def lazy_import(): + from argo_workflows.model.env_from_source import EnvFromSource + from argo_workflows.model.env_var import EnvVar + from argo_workflows.model.resource_requirements import ResourceRequirements + from argo_workflows.model.security_context import SecurityContext + from argo_workflows.model.volume_mount import VolumeMount + globals()['EnvFromSource'] = EnvFromSource + globals()['EnvVar'] = EnvVar + globals()['ResourceRequirements'] = ResourceRequirements + globals()['SecurityContext'] = SecurityContext + globals()['VolumeMount'] = VolumeMount + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container(ModelNormal): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + 'env': ([EnvVar],), # noqa: E501 + 'env_from': ([EnvFromSource],), # noqa: E501 + 'image_pull_policy': (str,), # noqa: E501 + 'resources': (ResourceRequirements,), # noqa: E501 + 'security_context': (SecurityContext,), # noqa: E501 + 'volume_mounts': ([VolumeMount],), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'env': 'env', # noqa: E501 + 'env_from': 'envFrom', # noqa: E501 + 'image_pull_policy': 'imagePullPolicy', # noqa: E501 + 'resources': 'resources', # noqa: E501 + 'security_context': 'securityContext', # noqa: E501 + 'volume_mounts': 'volumeMounts', # noqa: E501 + } + + read_only_vars = { + } + + _composed_schemas = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + env ([EnvVar]): [optional] # noqa: E501 + env_from ([EnvFromSource]): [optional] # noqa: E501 + image_pull_policy (str): [optional] # noqa: E501 + resources (ResourceRequirements): [optional] # noqa: E501 + security_context (SecurityContext): [optional] # noqa: E501 + volume_mounts ([VolumeMount]): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + ]) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs): # noqa: E501 + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + env ([EnvVar]): [optional] # noqa: E501 + env_from ([EnvFromSource]): [optional] # noqa: E501 + image_pull_policy (str): [optional] # noqa: E501 + resources (ResourceRequirements): [optional] # noqa: E501 + security_context (SecurityContext): [optional] # noqa: E501 + volume_mounts ([VolumeMount]): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_custom_trigger.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_custom_trigger.py index ee11737c1aae..53a245f24f85 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_custom_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_custom_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1CustomTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -90,8 +90,8 @@ def openapi_types(): lazy_import() return { 'cert_secret': (SecretKeySelector,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'secure': (bool,), # noqa: E501 'server_name_override': (str,), # noqa: E501 'server_url': (str,), # noqa: E501 @@ -121,7 +121,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CustomTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -155,8 +155,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) cert_secret (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 secure (bool): [optional] # noqa: E501 server_name_override (str): ServerNameOverride for the secure connection between sensor and custom trigger gRPC server.. [optional] # noqa: E501 server_url (str): [optional] # noqa: E501 @@ -209,7 +209,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1CustomTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -243,8 +243,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) cert_secret (SecretKeySelector): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved custom trigger trigger object.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 secure (bool): [optional] # noqa: E501 server_name_override (str): ServerNameOverride for the secure connection between sensor and custom trigger gRPC server.. [optional] # noqa: E501 server_url (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_data_filter.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_data_filter.py index 551820efed73..097f8b698a5a 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_data_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_data_filter.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1DataFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1DataFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1DataFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_email_trigger.py similarity index 93% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_email_trigger.py index 9c14217b56a2..47f6117239f5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_email_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_email_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1EmailTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,7 +92,7 @@ def openapi_types(): 'body': (str,), # noqa: E501 '_from': (str,), # noqa: E501 'host': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'port': (int,), # noqa: E501 'smtp_password': (SecretKeySelector,), # noqa: E501 'subject': (str,), # noqa: E501 @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmailTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -161,7 +161,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 body (str): [optional] # noqa: E501 _from (str): [optional] # noqa: E501 host (str): Host refers to the smtp host url to which email is send.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 port (int): [optional] # noqa: E501 smtp_password (SecretKeySelector): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 @@ -215,7 +215,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmailTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,7 +251,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 body (str): [optional] # noqa: E501 _from (str): [optional] # noqa: E501 host (str): Host refers to the smtp host url to which email is send.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 port (int): [optional] # noqa: E501 smtp_password (SecretKeySelector): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_emitter_event_source.py similarity index 85% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_emitter_event_source.py index c51403b60782..75a2c3f375cd 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_emitter_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_emitter_event_source.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1EmitterEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -96,12 +96,12 @@ def openapi_types(): 'broker': (str,), # noqa: E501 'channel_key': (str,), # noqa: E501 'channel_name': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'password': (SecretKeySelector,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'username': (SecretKeySelector,), # noqa: E501 } @@ -131,7 +131,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmitterEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -167,12 +167,12 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 broker (str): Broker URI to connect to.. [optional] # noqa: E501 channel_key (str): [optional] # noqa: E501 channel_name (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (SecretKeySelector): [optional] # noqa: E501 """ @@ -222,7 +222,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EmitterEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -258,12 +258,12 @@ def __init__(self, *args, **kwargs): # noqa: E501 broker (str): Broker URI to connect to.. [optional] # noqa: E501 channel_key (str): [optional] # noqa: E501 channel_name (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_context.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_context.py index 407be77edd35..d6ed6ef9b5fb 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_context.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_context.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1EventContext(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -113,7 +113,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventContext - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -201,7 +201,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventContext - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency.py index f75cdcd89a61..50b84b30e14b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer - globals()['IoArgoprojEventsV1alpha1EventDependencyFilter'] = IoArgoprojEventsV1alpha1EventDependencyFilter - globals()['IoArgoprojEventsV1alpha1EventDependencyTransformer'] = IoArgoprojEventsV1alpha1EventDependencyTransformer + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_transformer import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer -class IoArgoprojEventsV1alpha1EventDependency(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,10 +91,10 @@ def openapi_types(): return { 'event_name': (str,), # noqa: E501 'event_source_name': (str,), # noqa: E501 - 'filters': (IoArgoprojEventsV1alpha1EventDependencyFilter,), # noqa: E501 + 'filters': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter,), # noqa: E501 'filters_logical_operator': (str,), # noqa: E501 'name': (str,), # noqa: E501 - 'transform': (IoArgoprojEventsV1alpha1EventDependencyTransformer,), # noqa: E501 + 'transform': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer,), # noqa: E501 } @cached_property @@ -119,7 +119,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependency - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -154,10 +154,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) event_name (str): [optional] # noqa: E501 event_source_name (str): [optional] # noqa: E501 - filters (IoArgoprojEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 + filters (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 filters_logical_operator (str): FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 name (str): [optional] # noqa: E501 - transform (IoArgoprojEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 + transform (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -206,7 +206,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependency - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -241,10 +241,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) event_name (str): [optional] # noqa: E501 event_source_name (str): [optional] # noqa: E501 - filters (IoArgoprojEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 + filters (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter): [optional] # noqa: E501 filters_logical_operator (str): FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 name (str): [optional] # noqa: E501 - transform (IoArgoprojEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 + transform (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_filter.py similarity index 81% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_filter.py index 335a7f9b9a9a..414b321e41e9 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_filter.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext - from argo_workflows.model.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter - globals()['IoArgoprojEventsV1alpha1DataFilter'] = IoArgoprojEventsV1alpha1DataFilter - globals()['IoArgoprojEventsV1alpha1EventContext'] = IoArgoprojEventsV1alpha1EventContext - globals()['IoArgoprojEventsV1alpha1ExprFilter'] = IoArgoprojEventsV1alpha1ExprFilter - globals()['IoArgoprojEventsV1alpha1TimeFilter'] = IoArgoprojEventsV1alpha1TimeFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_data_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_expr_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_time_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter -class IoArgoprojEventsV1alpha1EventDependencyFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,13 @@ def openapi_types(): """ lazy_import() return { - 'context': (IoArgoprojEventsV1alpha1EventContext,), # noqa: E501 - 'data': ([IoArgoprojEventsV1alpha1DataFilter],), # noqa: E501 + 'context': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext,), # noqa: E501 + 'data': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter],), # noqa: E501 'data_logical_operator': (str,), # noqa: E501 'expr_logical_operator': (str,), # noqa: E501 - 'exprs': ([IoArgoprojEventsV1alpha1ExprFilter],), # noqa: E501 + 'exprs': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter],), # noqa: E501 'script': (str,), # noqa: E501 - 'time': (IoArgoprojEventsV1alpha1TimeFilter,), # noqa: E501 + 'time': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter,), # noqa: E501 } @cached_property @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -158,13 +158,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - context (IoArgoprojEventsV1alpha1EventContext): [optional] # noqa: E501 - data ([IoArgoprojEventsV1alpha1DataFilter]): [optional] # noqa: E501 + context (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext): [optional] # noqa: E501 + data ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter]): [optional] # noqa: E501 data_logical_operator (str): DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 expr_logical_operator (str): ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - exprs ([IoArgoprojEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 + exprs ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 script (str): Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - time (IoArgoprojEventsV1alpha1TimeFilter): [optional] # noqa: E501 + time (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -213,7 +213,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -246,13 +246,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - context (IoArgoprojEventsV1alpha1EventContext): [optional] # noqa: E501 - data ([IoArgoprojEventsV1alpha1DataFilter]): [optional] # noqa: E501 + context (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext): [optional] # noqa: E501 + data ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter]): [optional] # noqa: E501 data_logical_operator (str): DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 expr_logical_operator (str): ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&).. [optional] # noqa: E501 - exprs ([IoArgoprojEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 + exprs ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter]): Exprs contains the list of expressions evaluated against the event payload.. [optional] # noqa: E501 script (str): Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1.. [optional] # noqa: E501 - time (IoArgoprojEventsV1alpha1TimeFilter): [optional] # noqa: E501 + time (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_transformer.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_transformer.py index 0412a1b0f59b..dbb3ab253971 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_dependency_transformer.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_transformer.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1EventDependencyTransformer(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_persistence.py similarity index 86% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_persistence.py index 7a750f023c47..b4e843ea8845 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_persistence.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_persistence.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration - from argo_workflows.model.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence - globals()['IoArgoprojEventsV1alpha1CatchupConfiguration'] = IoArgoprojEventsV1alpha1CatchupConfiguration - globals()['IoArgoprojEventsV1alpha1ConfigMapPersistence'] = IoArgoprojEventsV1alpha1ConfigMapPersistence + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_catchup_configuration import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_config_map_persistence import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence -class IoArgoprojEventsV1alpha1EventPersistence(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,8 +89,8 @@ def openapi_types(): """ lazy_import() return { - 'catchup': (IoArgoprojEventsV1alpha1CatchupConfiguration,), # noqa: E501 - 'config_map': (IoArgoprojEventsV1alpha1ConfigMapPersistence,), # noqa: E501 + 'catchup': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration,), # noqa: E501 + 'config_map': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence,), # noqa: E501 } @cached_property @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventPersistence - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,8 +144,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - catchup (IoArgoprojEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 - config_map (IoArgoprojEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 + catchup (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 + config_map (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventPersistence - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,8 +227,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - catchup (IoArgoprojEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 - config_map (IoArgoprojEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 + catchup (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration): [optional] # noqa: E501 + config_map (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source.py index 6931d2dc53e7..d5b11d3b4243 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_spec import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojEventsV1alpha1SensorSpec'] = IoArgoprojEventsV1alpha1SensorSpec - globals()['IoArgoprojEventsV1alpha1SensorStatus'] = IoArgoprojEventsV1alpha1SensorStatus + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus globals()['ObjectMeta'] = ObjectMeta -class IoArgoprojEventsV1alpha1Sensor(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,8 +92,8 @@ def openapi_types(): lazy_import() return { 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojEventsV1alpha1SensorSpec,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1SensorStatus,), # noqa: E501 + 'spec': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec,), # noqa: E501 + 'status': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus,), # noqa: E501 } @cached_property @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Sensor - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -149,8 +149,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1SensorSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1SensorStatus): [optional] # noqa: E501 + spec (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -199,7 +199,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Sensor - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -233,8 +233,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1SensorSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1SensorStatus): [optional] # noqa: E501 + spec (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter.py index 3cd42535b945..a393de31bf68 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1EventSourceFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list.py index dee62a1f3f90..ef91f0ce70f5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_list.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource globals()['ListMeta'] = ListMeta -class IoArgoprojEventsV1alpha1SensorList(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): """ lazy_import() return { - 'items': ([IoArgoprojEventsV1alpha1Sensor],), # noqa: E501 + 'items': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource],), # noqa: E501 'metadata': (ListMeta,), # noqa: E501 } @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorList - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1Sensor]): [optional] # noqa: E501 + items ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource]): [optional] # noqa: E501 metadata (ListMeta): [optional] # noqa: E501 """ @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorList - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,7 +227,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1Sensor]): [optional] # noqa: E501 + items ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource]): [optional] # noqa: E501 metadata (ListMeta): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_spec.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_spec.py new file mode 100644 index 000000000000..287abc5e878c --- /dev/null +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_spec.py @@ -0,0 +1,461 @@ +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + + The version of the OpenAPI document: VERSION + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from argo_workflows.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, + OpenApiModel +) +from argo_workflows.exceptions import ApiAttributeError + + +def lazy_import(): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_events_hub_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_queue_storage_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_calendar_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_emitter_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_generic_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gerrit_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gitlab_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_hdfs_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_mqtt_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_events_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nsq_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pub_sub_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_stream_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_service import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sftp_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sns_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sqs_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_stripe_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec(ModelNormal): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + 'amqp': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource,)},), # noqa: E501 + 'azure_events_hub': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource,)},), # noqa: E501 + 'azure_queue_storage': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource,)},), # noqa: E501 + 'azure_service_bus': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource,)},), # noqa: E501 + 'bitbucket': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource,)},), # noqa: E501 + 'bitbucketserver': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource,)},), # noqa: E501 + 'calendar': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource,)},), # noqa: E501 + 'emitter': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource,)},), # noqa: E501 + 'event_bus_name': (str,), # noqa: E501 + 'file': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource,)},), # noqa: E501 + 'generic': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource,)},), # noqa: E501 + 'gerrit': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource,)},), # noqa: E501 + 'github': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource,)},), # noqa: E501 + 'gitlab': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource,)},), # noqa: E501 + 'hdfs': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource,)},), # noqa: E501 + 'kafka': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource,)},), # noqa: E501 + 'minio': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact,)},), # noqa: E501 + 'mqtt': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource,)},), # noqa: E501 + 'nats': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource,)},), # noqa: E501 + 'nsq': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource,)},), # noqa: E501 + 'pub_sub': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource,)},), # noqa: E501 + 'pulsar': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource,)},), # noqa: E501 + 'redis': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource,)},), # noqa: E501 + 'redis_stream': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource,)},), # noqa: E501 + 'replicas': (int,), # noqa: E501 + 'resource': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource,)},), # noqa: E501 + 'service': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service,), # noqa: E501 + 'sftp': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource,)},), # noqa: E501 + 'slack': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource,)},), # noqa: E501 + 'sns': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource,)},), # noqa: E501 + 'sqs': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource,)},), # noqa: E501 + 'storage_grid': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource,)},), # noqa: E501 + 'stripe': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource,)},), # noqa: E501 + 'template': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template,), # noqa: E501 + 'webhook': ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource,)},), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'amqp': 'amqp', # noqa: E501 + 'azure_events_hub': 'azureEventsHub', # noqa: E501 + 'azure_queue_storage': 'azureQueueStorage', # noqa: E501 + 'azure_service_bus': 'azureServiceBus', # noqa: E501 + 'bitbucket': 'bitbucket', # noqa: E501 + 'bitbucketserver': 'bitbucketserver', # noqa: E501 + 'calendar': 'calendar', # noqa: E501 + 'emitter': 'emitter', # noqa: E501 + 'event_bus_name': 'eventBusName', # noqa: E501 + 'file': 'file', # noqa: E501 + 'generic': 'generic', # noqa: E501 + 'gerrit': 'gerrit', # noqa: E501 + 'github': 'github', # noqa: E501 + 'gitlab': 'gitlab', # noqa: E501 + 'hdfs': 'hdfs', # noqa: E501 + 'kafka': 'kafka', # noqa: E501 + 'minio': 'minio', # noqa: E501 + 'mqtt': 'mqtt', # noqa: E501 + 'nats': 'nats', # noqa: E501 + 'nsq': 'nsq', # noqa: E501 + 'pub_sub': 'pubSub', # noqa: E501 + 'pulsar': 'pulsar', # noqa: E501 + 'redis': 'redis', # noqa: E501 + 'redis_stream': 'redisStream', # noqa: E501 + 'replicas': 'replicas', # noqa: E501 + 'resource': 'resource', # noqa: E501 + 'service': 'service', # noqa: E501 + 'sftp': 'sftp', # noqa: E501 + 'slack': 'slack', # noqa: E501 + 'sns': 'sns', # noqa: E501 + 'sqs': 'sqs', # noqa: E501 + 'storage_grid': 'storageGrid', # noqa: E501 + 'stripe': 'stripe', # noqa: E501 + 'template': 'template', # noqa: E501 + 'webhook': 'webhook', # noqa: E501 + } + + read_only_vars = { + } + + _composed_schemas = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + amqp ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 + azure_events_hub ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 + azure_queue_storage ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 + azure_service_bus ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 + bitbucket ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 + bitbucketserver ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 + calendar ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 + emitter ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 + event_bus_name (str): [optional] # noqa: E501 + file ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 + generic ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 + gerrit ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 + github ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 + gitlab ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 + hdfs ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 + kafka ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 + minio ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 + mqtt ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 + nats ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 + nsq ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 + pub_sub ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 + pulsar ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 + redis ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 + redis_stream ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 + replicas (int): [optional] # noqa: E501 + resource ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 + service (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service): [optional] # noqa: E501 + sftp ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 + slack ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 + sns ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 + sqs ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 + storage_grid ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 + stripe ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template): [optional] # noqa: E501 + webhook ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + ]) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs): # noqa: E501 + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + amqp ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 + azure_events_hub ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 + azure_queue_storage ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 + azure_service_bus ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 + bitbucket ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 + bitbucketserver ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 + calendar ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 + emitter ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 + event_bus_name (str): [optional] # noqa: E501 + file ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 + generic ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 + gerrit ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 + github ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 + gitlab ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 + hdfs ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 + kafka ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 + minio ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 + mqtt ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 + nats ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 + nsq ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 + pub_sub ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 + pulsar ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 + redis ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 + redis_stream ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 + replicas (int): [optional] # noqa: E501 + resource ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 + service (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service): [optional] # noqa: E501 + sftp ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 + slack ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 + sns ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 + sqs ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 + storage_grid ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 + stripe ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template): [optional] # noqa: E501 + webhook ({str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_status.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_status.py index 6dfa9291858e..be909255806d 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_status.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_status.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status - globals()['IoArgoprojEventsV1alpha1Status'] = IoArgoprojEventsV1alpha1Status + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status -class IoArgoprojEventsV1alpha1SensorStatus(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'status': (IoArgoprojEventsV1alpha1Status,), # noqa: E501 + 'status': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status,), # noqa: E501 } @cached_property @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorStatus - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorStatus - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_expr_filter.py similarity index 91% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_expr_filter.py index 5416f95c7cc0..dc26292f0eef 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_expr_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_expr_filter.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField - globals()['IoArgoprojEventsV1alpha1PayloadField'] = IoArgoprojEventsV1alpha1PayloadField + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_payload_field import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField -class IoArgoprojEventsV1alpha1ExprFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -88,7 +88,7 @@ def openapi_types(): lazy_import() return { 'expr': (str,), # noqa: E501 - 'fields': ([IoArgoprojEventsV1alpha1PayloadField],), # noqa: E501 + 'fields': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField],), # noqa: E501 } @cached_property @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ExprFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -143,7 +143,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) expr (str): Expr refers to the expression that determines the outcome of the filter.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 + fields ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ExprFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -226,7 +226,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) expr (str): Expr refers to the expression that determines the outcome of the filter.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 + fields ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField]): Fields refers to set of keys that refer to the paths within event payload.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_artifact.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_artifact.py index 20d7c7e94cf7..04c83954d5da 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_artifact.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_artifact.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1FileArtifact(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_event_source.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_event_source.py index 9c181014e3ae..b94cff661a77 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_file_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig -class IoArgoprojEventsV1alpha1FileEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -90,10 +90,10 @@ def openapi_types(): lazy_import() return { 'event_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'polling': (bool,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 + 'watch_path_config': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig,), # noqa: E501 } @cached_property @@ -117,7 +117,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -151,10 +151,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 polling (bool): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -203,7 +203,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1FileEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -237,10 +237,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 polling (bool): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_generic_event_source.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_generic_event_source.py index b65fa8ee43e3..203f82d5f2e7 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_generic_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_generic_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GenericEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,7 +91,7 @@ def openapi_types(): return { 'auth_secret': (SecretKeySelector,), # noqa: E501 'config': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'insecure': (bool,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 @@ -121,7 +121,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GenericEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -156,7 +156,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) auth_secret (SecretKeySelector): [optional] # noqa: E501 config (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 insecure (bool): Insecure determines the type of connection.. [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 @@ -209,7 +209,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GenericEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -244,7 +244,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) auth_secret (SecretKeySelector): [optional] # noqa: E501 config (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 insecure (bool): Insecure determines the type of connection.. [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gerrit_event_source.py similarity index 85% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gerrit_event_source.py index bfc6c009aaeb..4e004e7bb11e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gerrit_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gerrit_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext -class IoArgoprojEventsV1alpha1GerritEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,16 +91,16 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 'delete_hook_on_finish': (bool,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'gerrit_base_url': (str,), # noqa: E501 'hook_name': (str,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'projects': ([str],), # noqa: E501 'ssl_verify': (bool,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GerritEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -162,16 +162,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 gerrit_base_url (str): [optional] # noqa: E501 hook_name (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 projects ([str]): List of project namespace paths like \"whynowy/test\".. [optional] # noqa: E501 ssl_verify (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GerritEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -253,16 +253,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 gerrit_base_url (str): [optional] # noqa: E501 hook_name (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 projects ([str]): List of project namespace paths like \"whynowy/test\".. [optional] # noqa: E501 ssl_verify (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_artifact.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_artifact.py index 5d87658c70c0..3818641c1ee5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_artifact.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_artifact.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds - from argo_workflows.model.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_creds import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_remote_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1GitCreds'] = IoArgoprojEventsV1alpha1GitCreds - globals()['IoArgoprojEventsV1alpha1GitRemoteConfig'] = IoArgoprojEventsV1alpha1GitRemoteConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GitArtifact(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,11 +93,11 @@ def openapi_types(): return { 'branch': (str,), # noqa: E501 'clone_directory': (str,), # noqa: E501 - 'creds': (IoArgoprojEventsV1alpha1GitCreds,), # noqa: E501 + 'creds': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds,), # noqa: E501 'file_path': (str,), # noqa: E501 'insecure_ignore_host_key': (bool,), # noqa: E501 'ref': (str,), # noqa: E501 - 'remote': (IoArgoprojEventsV1alpha1GitRemoteConfig,), # noqa: E501 + 'remote': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig,), # noqa: E501 'ssh_key_secret': (SecretKeySelector,), # noqa: E501 'tag': (str,), # noqa: E501 'url': (str,), # noqa: E501 @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,11 +164,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) branch (str): [optional] # noqa: E501 clone_directory (str): Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client.. [optional] # noqa: E501 - creds (IoArgoprojEventsV1alpha1GitCreds): [optional] # noqa: E501 + creds (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds): [optional] # noqa: E501 file_path (str): [optional] # noqa: E501 insecure_ignore_host_key (bool): [optional] # noqa: E501 ref (str): [optional] # noqa: E501 - remote (IoArgoprojEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 + remote (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 tag (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -255,11 +255,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) branch (str): [optional] # noqa: E501 clone_directory (str): Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client.. [optional] # noqa: E501 - creds (IoArgoprojEventsV1alpha1GitCreds): [optional] # noqa: E501 + creds (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds): [optional] # noqa: E501 file_path (str): [optional] # noqa: E501 insecure_ignore_host_key (bool): [optional] # noqa: E501 ref (str): [optional] # noqa: E501 - remote (IoArgoprojEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 + remote (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig): [optional] # noqa: E501 ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 tag (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_creds.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_creds.py index 65eb7f70ab91..8c108de7cc93 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_creds.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_creds.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GitCreds(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitCreds - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitCreds - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_remote_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_remote_config.py index d02db3151c8a..b7173272e9a0 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_git_remote_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_remote_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1GitRemoteConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_app_creds.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_app_creds.py index 79739c6ac094..ce893e3367c9 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_app_creds.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_app_creds.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GithubAppCreds(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubAppCreds - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubAppCreds - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_event_source.py similarity index 81% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_event_source.py index 300c34b2da48..e3486f0965bd 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_github_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_event_source.py @@ -30,19 +30,19 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds - from argo_workflows.model.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_app_creds import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_owned_repositories import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1GithubAppCreds'] = IoArgoprojEventsV1alpha1GithubAppCreds - globals()['IoArgoprojEventsV1alpha1OwnedRepositories'] = IoArgoprojEventsV1alpha1OwnedRepositories - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GithubEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -100,8 +100,8 @@ def openapi_types(): 'content_type': (str,), # noqa: E501 'delete_hook_on_finish': (bool,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'github_app': (IoArgoprojEventsV1alpha1GithubAppCreds,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'github_app': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds,), # noqa: E501 'github_base_url': (str,), # noqa: E501 'github_upload_url': (str,), # noqa: E501 'id': (str,), # noqa: E501 @@ -109,9 +109,9 @@ def openapi_types(): 'metadata': ({str: (str,)},), # noqa: E501 'organizations': ([str],), # noqa: E501 'owner': (str,), # noqa: E501 - 'repositories': ([IoArgoprojEventsV1alpha1OwnedRepositories],), # noqa: E501 + 'repositories': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories],), # noqa: E501 'repository': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 'webhook_secret': (SecretKeySelector,), # noqa: E501 } @@ -149,7 +149,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -187,8 +187,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 content_type (str): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - github_app (IoArgoprojEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + github_app (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 github_base_url (str): [optional] # noqa: E501 github_upload_url (str): [optional] # noqa: E501 id (str): [optional] # noqa: E501 @@ -196,9 +196,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 organizations ([str]): Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set.. [optional] # noqa: E501 owner (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 repository (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 webhook_secret (SecretKeySelector): [optional] # noqa: E501 """ @@ -248,7 +248,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GithubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -286,8 +286,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 content_type (str): [optional] # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - github_app (IoArgoprojEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + github_app (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds): [optional] # noqa: E501 github_base_url (str): [optional] # noqa: E501 github_upload_url (str): [optional] # noqa: E501 id (str): [optional] # noqa: E501 @@ -295,9 +295,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 organizations ([str]): Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set.. [optional] # noqa: E501 owner (str): [optional] # noqa: E501 - repositories ([IoArgoprojEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 + repositories ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories]): Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set.. [optional] # noqa: E501 repository (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 webhook_secret (SecretKeySelector): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gitlab_event_source.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gitlab_event_source.py index 87ad404dd030..81df21720354 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_gitlab_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gitlab_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1GitlabEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -95,14 +95,14 @@ def openapi_types(): 'delete_hook_on_finish': (bool,), # noqa: E501 'enable_ssl_verification': (bool,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'gitlab_base_url': (str,), # noqa: E501 'groups': ([str],), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'project_id': (str,), # noqa: E501 'projects': ([str],), # noqa: E501 'secret_token': (SecretKeySelector,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -133,7 +133,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitlabEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -170,14 +170,14 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 enable_ssl_verification (bool): [optional] # noqa: E501 events ([str]): Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 gitlab_base_url (str): [optional] # noqa: E501 groups ([str]): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 project_id (str): [optional] # noqa: E501 projects ([str]): [optional] # noqa: E501 secret_token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -226,7 +226,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1GitlabEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -263,14 +263,14 @@ def __init__(self, *args, **kwargs): # noqa: E501 delete_hook_on_finish (bool): [optional] # noqa: E501 enable_ssl_verification (bool): [optional] # noqa: E501 events ([str]): Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 gitlab_base_url (str): [optional] # noqa: E501 groups ([str]): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 project_id (str): [optional] # noqa: E501 projects ([str]): [optional] # noqa: E501 secret_token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_hdfs_event_source.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_hdfs_event_source.py index dc5fff99079d..001ce6bd2d30 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_hdfs_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_hdfs_event_source.py @@ -31,16 +31,16 @@ def lazy_import(): from argo_workflows.model.config_map_key_selector import ConfigMapKeySelector - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig from argo_workflows.model.secret_key_selector import SecretKeySelector globals()['ConfigMapKeySelector'] = ConfigMapKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1HDFSEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -95,7 +95,7 @@ def openapi_types(): return { 'addresses': ([str],), # noqa: E501 'check_interval': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'hdfs_user': (str,), # noqa: E501 'krb_c_cache_secret': (SecretKeySelector,), # noqa: E501 'krb_config_config_map': (ConfigMapKeySelector,), # noqa: E501 @@ -105,7 +105,7 @@ def openapi_types(): 'krb_username': (str,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'type': (str,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 + 'watch_path_config': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig,), # noqa: E501 } @cached_property @@ -137,7 +137,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HDFSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -172,7 +172,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) addresses ([str]): [optional] # noqa: E501 check_interval (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 @@ -182,7 +182,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 type (str): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -231,7 +231,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HDFSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -266,7 +266,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) addresses ([str]): [optional] # noqa: E501 check_interval (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 hdfs_user (str): HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used.. [optional] # noqa: E501 krb_c_cache_secret (SecretKeySelector): [optional] # noqa: E501 krb_config_config_map (ConfigMapKeySelector): [optional] # noqa: E501 @@ -276,7 +276,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 krb_username (str): KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used.. [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 type (str): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_http_trigger.py similarity index 77% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_http_trigger.py index a972868e5139..0a7750a09b77 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_http_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_http_trigger.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1SecureHeader'] = IoArgoprojEventsV1alpha1SecureHeader - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter -class IoArgoprojEventsV1alpha1HTTPTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,14 +93,14 @@ def openapi_types(): """ lazy_import() return { - 'basic_auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 + 'basic_auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 'headers': ({str: (str,)},), # noqa: E501 'method': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'secure_headers': ([IoArgoprojEventsV1alpha1SecureHeader],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'secure_headers': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader],), # noqa: E501 'timeout': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'url': (str,), # noqa: E501 } @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HTTPTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -162,14 +162,14 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + basic_auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 headers ({str: (str,)}): [optional] # noqa: E501 method (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - secure_headers ([IoArgoprojEventsV1alpha1SecureHeader]): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + secure_headers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]): [optional] # noqa: E501 timeout (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): URL refers to the URL to send HTTP request to.. [optional] # noqa: E501 """ @@ -219,7 +219,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1HTTPTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -252,14 +252,14 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic_auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + basic_auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 headers ({str: (str,)}): [optional] # noqa: E501 method (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - secure_headers ([IoArgoprojEventsV1alpha1SecureHeader]): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + secure_headers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]): [optional] # noqa: E501 timeout (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): URL refers to the URL to send HTTP request to.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_int64_or_string.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_int64_or_string.py index 710ec4d27a1a..29dd85cbd761 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_int64_or_string.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_int64_or_string.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Int64OrString(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -105,7 +105,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Int64OrString - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Int64OrString - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource.py index 41535c92a5ac..0722173cfa66 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Resource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -106,7 +106,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Resource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -188,7 +188,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Resource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource_policy.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource_policy.py index 5461dac117b7..bbf1190f8f86 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_k8_s_resource_policy.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource_policy.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff -class IoArgoprojEventsV1alpha1K8SResourcePolicy(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 + 'backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 'error_on_backoff_timeout': (bool,), # noqa: E501 'labels': ({str: (str,)},), # noqa: E501 } @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 + backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 error_on_backoff_timeout (bool): [optional] # noqa: E501 labels ({str: (str,)}): [optional] # noqa: E501 """ @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -228,7 +228,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 + backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 error_on_backoff_timeout (bool): [optional] # noqa: E501 labels ({str: (str,)}): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_consumer_group.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_consumer_group.py index 648263a68d7a..7dea6d40b3e8 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_consumer_group.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_consumer_group.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1KafkaConsumerGroup(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -105,7 +105,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_event_source.py similarity index 79% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_event_source.py index dc63af404426..2781eafdef0e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_event_source.py @@ -30,19 +30,19 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup - from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1KafkaConsumerGroup'] = IoArgoprojEventsV1alpha1KafkaConsumerGroup - globals()['IoArgoprojEventsV1alpha1SASLConfig'] = IoArgoprojEventsV1alpha1SASLConfig - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - - -class IoArgoprojEventsV1alpha1KafkaEventSource(ModelNormal): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_consumer_group import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -96,15 +96,15 @@ def openapi_types(): lazy_import() return { 'config': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'consumer_group': (IoArgoprojEventsV1alpha1KafkaConsumerGroup,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'consumer_group': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'limit_events_per_second': (str,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'partition': (str,), # noqa: E501 - 'sasl': (IoArgoprojEventsV1alpha1SASLConfig,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'sasl': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic': (str,), # noqa: E501 'url': (str,), # noqa: E501 'version': (str,), # noqa: E501 @@ -139,7 +139,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -173,15 +173,15 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) config (str): Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional. [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consumer_group (IoArgoprojEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + consumer_group (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 limit_events_per_second (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 partition (str): [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + sasl (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 version (str): [optional] # noqa: E501 @@ -233,7 +233,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -267,15 +267,15 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) config (str): Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional. [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - consumer_group (IoArgoprojEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + consumer_group (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 limit_events_per_second (str): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 partition (str): [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + sasl (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 version (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_trigger.py similarity index 74% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_trigger.py index 08038859f24c..a9bce9cdb8a2 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_kafka_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_trigger.py @@ -30,17 +30,19 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1SASLConfig'] = IoArgoprojEventsV1alpha1SASLConfig - globals()['IoArgoprojEventsV1alpha1SchemaRegistryConfig'] = IoArgoprojEventsV1alpha1SchemaRegistryConfig - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - - -class IoArgoprojEventsV1alpha1KafkaTrigger(ModelNormal): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_schema_registry_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -95,14 +97,16 @@ def openapi_types(): return { 'compress': (bool,), # noqa: E501 'flush_frequency': (int,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'headers': ({str: (str,)},), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'partition': (int,), # noqa: E501 'partitioning_key': (str,), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'required_acks': (int,), # noqa: E501 - 'sasl': (IoArgoprojEventsV1alpha1SASLConfig,), # noqa: E501 - 'schema_registry': (IoArgoprojEventsV1alpha1SchemaRegistryConfig,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'sasl': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig,), # noqa: E501 + 'schema_registry': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig,), # noqa: E501 + 'secure_headers': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader],), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic': (str,), # noqa: E501 'url': (str,), # noqa: E501 'version': (str,), # noqa: E501 @@ -116,6 +120,7 @@ def discriminator(): attribute_map = { 'compress': 'compress', # noqa: E501 'flush_frequency': 'flushFrequency', # noqa: E501 + 'headers': 'headers', # noqa: E501 'parameters': 'parameters', # noqa: E501 'partition': 'partition', # noqa: E501 'partitioning_key': 'partitioningKey', # noqa: E501 @@ -123,6 +128,7 @@ def discriminator(): 'required_acks': 'requiredAcks', # noqa: E501 'sasl': 'sasl', # noqa: E501 'schema_registry': 'schemaRegistry', # noqa: E501 + 'secure_headers': 'secureHeaders', # noqa: E501 'tls': 'tls', # noqa: E501 'topic': 'topic', # noqa: E501 'url': 'url', # noqa: E501 @@ -137,7 +143,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -172,14 +178,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) compress (bool): [optional] # noqa: E501 flush_frequency (int): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 + headers ({str: (str,)}): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 partition (int): [optional] # noqa: E501 partitioning_key (str): The partitioning key for the messages put on the Kafka topic. +optional.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 required_acks (int): RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional.. [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - schema_registry (IoArgoprojEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + sasl (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig): [optional] # noqa: E501 + schema_registry (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 + secure_headers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): URL of the Kafka broker, multiple URLs separated by comma.. [optional] # noqa: E501 version (str): [optional] # noqa: E501 @@ -231,7 +239,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1KafkaTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -266,14 +274,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) compress (bool): [optional] # noqa: E501 flush_frequency (int): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 + headers ({str: (str,)}): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 partition (int): [optional] # noqa: E501 partitioning_key (str): The partitioning key for the messages put on the Kafka topic. +optional.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 required_acks (int): RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional.. [optional] # noqa: E501 - sasl (IoArgoprojEventsV1alpha1SASLConfig): [optional] # noqa: E501 - schema_registry (IoArgoprojEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + sasl (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig): [optional] # noqa: E501 + schema_registry (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig): [optional] # noqa: E501 + secure_headers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): URL of the Kafka broker, multiple URLs separated by comma.. [optional] # noqa: E501 version (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_log_trigger.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_log_trigger.py index 1a51a4fbb283..ee5872bfed0f 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_log_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_log_trigger.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1LogTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1LogTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1LogTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata.py index 3a57a5d60c6a..1ae38b7bec7a 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_metadata.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Metadata(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Metadata - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Metadata - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_mqtt_event_source.py similarity index 81% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_mqtt_event_source.py index 91a7ce39220e..c05f3456e236 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_mqtt_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_mqtt_event_source.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig -class IoArgoprojEventsV1alpha1MQTTEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,13 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 'client_id': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic': (str,), # noqa: E501 'url': (str,), # noqa: E501 } @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1MQTTEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -162,13 +162,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 client_id (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 """ @@ -219,7 +219,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1MQTTEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -252,13 +252,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 client_id (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): [optional] # noqa: E501 url (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth.py similarity index 93% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth.py index e066d7dae76c..8bd27330cb23 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_auth.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1NATSAuth(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): """ lazy_import() return { - 'basic': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 + 'basic': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 'credential': (SecretKeySelector,), # noqa: E501 'nkey': (SecretKeySelector,), # noqa: E501 'token': (SecretKeySelector,), # noqa: E501 @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -148,7 +148,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + basic (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 credential (SecretKeySelector): [optional] # noqa: E501 nkey (SecretKeySelector): [optional] # noqa: E501 token (SecretKeySelector): [optional] # noqa: E501 @@ -200,7 +200,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSAuth - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -233,7 +233,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - basic (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + basic (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 credential (SecretKeySelector): [optional] # noqa: E501 nkey (SecretKeySelector): [optional] # noqa: E501 token (SecretKeySelector): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_events_source.py similarity index 80% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_events_source.py index 7534ec59b2eb..6ef5fceba7bb 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_events_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_events_source.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1NATSAuth'] = IoArgoprojEventsV1alpha1NATSAuth - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig -class IoArgoprojEventsV1alpha1NATSEventsSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,14 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1NATSAuth,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 + 'queue': (str,), # noqa: E501 'subject': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'url': (str,), # noqa: E501 } @@ -114,6 +115,7 @@ def discriminator(): 'filter': 'filter', # noqa: E501 'json_body': 'jsonBody', # noqa: E501 'metadata': 'metadata', # noqa: E501 + 'queue': 'queue', # noqa: E501 'subject': 'subject', # noqa: E501 'tls': 'tls', # noqa: E501 'url': 'url', # noqa: E501 @@ -127,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSEventsSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -160,13 +162,14 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1NATSAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 + queue (str): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): [optional] # noqa: E501 """ @@ -216,7 +219,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSEventsSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -249,13 +252,14 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1NATSAuth): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 + queue (str): [optional] # noqa: E501 subject (str): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_trigger.py similarity index 82% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_trigger.py index fe91ddb51d42..ad980eae723f 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nats_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_trigger.py @@ -30,13 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter -class IoArgoprojEventsV1alpha1NATSTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,10 +91,11 @@ def openapi_types(): """ lazy_import() return { - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth,), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'subject': (str,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'url': (str,), # noqa: E501 } @@ -102,6 +105,7 @@ def discriminator(): attribute_map = { + 'auth': 'auth', # noqa: E501 'parameters': 'parameters', # noqa: E501 'payload': 'payload', # noqa: E501 'subject': 'subject', # noqa: E501 @@ -117,7 +121,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -150,10 +154,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 subject (str): Name of the subject to put message on.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): URL of the NATS cluster.. [optional] # noqa: E501 """ @@ -203,7 +208,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NATSTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -236,10 +241,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 subject (str): Name of the subject to put message on.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 url (str): URL of the NATS cluster.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nsq_event_source.py similarity index 84% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nsq_event_source.py index f3e0513de680..cd14ea55344e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_nsq_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nsq_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig -class IoArgoprojEventsV1alpha1NSQEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,12 +92,12 @@ def openapi_types(): lazy_import() return { 'channel': (str,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'host_address': (str,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'topic': (str,), # noqa: E501 } @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NSQEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -159,12 +159,12 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) channel (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): Topic to subscribe to.. [optional] # noqa: E501 """ @@ -214,7 +214,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1NSQEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -248,12 +248,12 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) channel (str): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 topic (str): Topic to subscribe to.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_open_whisk_trigger.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_open_whisk_trigger.py index 893b248f29ba..c6f23728e826 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_open_whisk_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_open_whisk_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1OpenWhiskTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,8 +93,8 @@ def openapi_types(): 'auth_token': (SecretKeySelector,), # noqa: E501 'host': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'version': (str,), # noqa: E501 } @@ -121,7 +121,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -158,8 +158,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 auth_token (SecretKeySelector): [optional] # noqa: E501 host (str): Host URL of the OpenWhisk.. [optional] # noqa: E501 namespace (str): Namespace for the action. Defaults to \"_\". +optional.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 version (str): [optional] # noqa: E501 """ @@ -209,7 +209,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -246,8 +246,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 auth_token (SecretKeySelector): [optional] # noqa: E501 host (str): Host URL of the OpenWhisk.. [optional] # noqa: E501 namespace (str): Namespace for the action. Defaults to \"_\". +optional.. [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 version (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_owned_repositories.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_owned_repositories.py index fb7556ac1f1a..c57bacd477b8 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_owned_repositories.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_owned_repositories.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1OwnedRepositories(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OwnedRepositories - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1OwnedRepositories - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_payload_field.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_payload_field.py index 78ade3573f30..38d824988763 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_payload_field.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_payload_field.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1PayloadField(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PayloadField - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PayloadField - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pub_sub_event_source.py similarity index 93% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pub_sub_event_source.py index 5badd3c68fac..9ffa1693f5ec 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pub_sub_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pub_sub_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1PubSubEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,7 +91,7 @@ def openapi_types(): return { 'credential_secret': (SecretKeySelector,), # noqa: E501 'delete_subscription_on_finish': (bool,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'project_id': (str,), # noqa: E501 @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PubSubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -160,7 +160,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) credential_secret (SecretKeySelector): [optional] # noqa: E501 delete_subscription_on_finish (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 project_id (str): [optional] # noqa: E501 @@ -215,7 +215,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PubSubEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -250,7 +250,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) credential_secret (SecretKeySelector): [optional] # noqa: E501 delete_subscription_on_finish (bool): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 project_id (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_event_source.py similarity index 86% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_event_source.py index 8d4a934bbe30..9115a40c9d5e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_event_source.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1PulsarEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -96,11 +96,11 @@ def openapi_types(): 'auth_athenz_params': ({str: (str,)},), # noqa: E501 'auth_athenz_secret': (SecretKeySelector,), # noqa: E501 'auth_token_secret': (SecretKeySelector,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'tls_allow_insecure_connection': (bool,), # noqa: E501 'tls_trust_certs_secret': (SecretKeySelector,), # noqa: E501 'tls_validate_hostname': (bool,), # noqa: E501 @@ -139,7 +139,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -175,11 +175,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 tls_allow_insecure_connection (bool): [optional] # noqa: E501 tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 tls_validate_hostname (bool): [optional] # noqa: E501 @@ -234,7 +234,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -270,11 +270,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 tls_allow_insecure_connection (bool): [optional] # noqa: E501 tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 tls_validate_hostname (bool): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_trigger.py similarity index 82% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_trigger.py index 5ad4e589aac6..bd7bba64f699 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_pulsar_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_trigger.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1PulsarTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -96,10 +96,10 @@ def openapi_types(): 'auth_athenz_params': ({str: (str,)},), # noqa: E501 'auth_athenz_secret': (SecretKeySelector,), # noqa: E501 'auth_token_secret': (SecretKeySelector,), # noqa: E501 - 'connection_backoff': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'payload': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'connection_backoff': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'payload': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'tls_allow_insecure_connection': (bool,), # noqa: E501 'tls_trust_certs_secret': (SecretKeySelector,), # noqa: E501 'tls_validate_hostname': (bool,), # noqa: E501 @@ -135,7 +135,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -171,10 +171,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 tls_allow_insecure_connection (bool): [optional] # noqa: E501 tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 tls_validate_hostname (bool): [optional] # noqa: E501 @@ -228,7 +228,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1PulsarTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -264,10 +264,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 auth_athenz_params ({str: (str,)}): [optional] # noqa: E501 auth_athenz_secret (SecretKeySelector): [optional] # noqa: E501 auth_token_secret (SecretKeySelector): [optional] # noqa: E501 - connection_backoff (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 - payload ([IoArgoprojEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + connection_backoff (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved Kafka trigger object.. [optional] # noqa: E501 + payload ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Payload is the list of key-value extracted from an event payload to construct the request payload.. [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 tls_allow_insecure_connection (bool): [optional] # noqa: E501 tls_trust_certs_secret (SecretKeySelector): [optional] # noqa: E501 tls_validate_hostname (bool): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_rate_limit.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_rate_limit.py index ff82cc26642f..3151425ca2c9 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_rate_limit.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_rate_limit.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1RateLimit(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RateLimit - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RateLimit - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_event_source.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_event_source.py index e3cf938a9038..f6a8ec662f0e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1RedisEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,13 @@ def openapi_types(): return { 'channels': ([str],), # noqa: E501 'db': (int,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'host_address': (str,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'namespace': (str,), # noqa: E501 'password': (SecretKeySelector,), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'username': (str,), # noqa: E501 } @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,13 +164,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) channels ([str]): [optional] # noqa: E501 db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (str): [optional] # noqa: E501 """ @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -255,13 +255,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) channels ([str]): [optional] # noqa: E501 db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_stream_event_source.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_stream_event_source.py index 1b2515ffac9b..8dcc200777a5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_redis_stream_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_stream_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1TLSConfig'] = IoArgoprojEventsV1alpha1TLSConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1RedisStreamEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,13 @@ def openapi_types(): return { 'consumer_group': (str,), # noqa: E501 'db': (int,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'host_address': (str,), # noqa: E501 'max_msg_count_per_read': (int,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'password': (SecretKeySelector,), # noqa: E501 'streams': ([str],), # noqa: E501 - 'tls': (IoArgoprojEventsV1alpha1TLSConfig,), # noqa: E501 + 'tls': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig,), # noqa: E501 'username': (str,), # noqa: E501 } @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,13 +164,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) consumer_group (str): [optional] # noqa: E501 db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 max_msg_count_per_read (int): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 streams ([str]): Streams to look for entries. XREADGROUP is used on all streams using a single consumer group.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (str): [optional] # noqa: E501 """ @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -255,13 +255,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) consumer_group (str): [optional] # noqa: E501 db (int): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 host_address (str): [optional] # noqa: E501 max_msg_count_per_read (int): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 streams ([str]): Streams to look for entries. XREADGROUP is used on all streams using a single consumer group.. [optional] # noqa: E501 - tls (IoArgoprojEventsV1alpha1TLSConfig): [optional] # noqa: E501 + tls (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig): [optional] # noqa: E501 username (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_event_source.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_event_source.py index d26918d0740f..ea430f2db929 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter from argo_workflows.model.group_version_resource import GroupVersionResource - from argo_workflows.model.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter globals()['GroupVersionResource'] = GroupVersionResource - globals()['IoArgoprojEventsV1alpha1ResourceFilter'] = IoArgoprojEventsV1alpha1ResourceFilter -class IoArgoprojEventsV1alpha1ResourceEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -90,7 +90,7 @@ def openapi_types(): lazy_import() return { 'event_types': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1ResourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter,), # noqa: E501 'group_version_resource': (GroupVersionResource,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'namespace': (str,), # noqa: E501 @@ -117,7 +117,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -151,7 +151,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) event_types ([str]): EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1ResourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter): [optional] # noqa: E501 group_version_resource (GroupVersionResource): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 @@ -203,7 +203,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -237,7 +237,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) event_types ([str]): EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE.. [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1ResourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter): [optional] # noqa: E501 group_version_resource (GroupVersionResource): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_filter.py similarity index 90% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_filter.py index 26dd205a0f1f..b0df740fb7a4 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_resource_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_filter.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector - globals()['IoArgoprojEventsV1alpha1Selector'] = IoArgoprojEventsV1alpha1Selector + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_selector import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector -class IoArgoprojEventsV1alpha1ResourceFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,8 +89,8 @@ def openapi_types(): return { 'after_start': (bool,), # noqa: E501 'created_by': (datetime,), # noqa: E501 - 'fields': ([IoArgoprojEventsV1alpha1Selector],), # noqa: E501 - 'labels': ([IoArgoprojEventsV1alpha1Selector],), # noqa: E501 + 'fields': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector],), # noqa: E501 + 'labels': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector],), # noqa: E501 'prefix': (str,), # noqa: E501 } @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -150,8 +150,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) after_start (bool): [optional] # noqa: E501 created_by (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - labels ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 + fields ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]): [optional] # noqa: E501 + labels ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]): [optional] # noqa: E501 prefix (str): [optional] # noqa: E501 """ @@ -201,7 +201,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ResourceFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -236,8 +236,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) after_start (bool): [optional] # noqa: E501 created_by (datetime): Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers.. [optional] # noqa: E501 - fields ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 - labels ([IoArgoprojEventsV1alpha1Selector]): [optional] # noqa: E501 + fields ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]): [optional] # noqa: E501 + labels ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]): [optional] # noqa: E501 prefix (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact.py similarity index 89% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact.py index 0778b1d7ba76..842f85643fe3 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_artifact.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_bucket import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1S3Bucket'] = IoArgoprojEventsV1alpha1S3Bucket - globals()['IoArgoprojEventsV1alpha1S3Filter'] = IoArgoprojEventsV1alpha1S3Filter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1S3Artifact(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,11 +92,11 @@ def openapi_types(): lazy_import() return { 'access_key': (SecretKeySelector,), # noqa: E501 - 'bucket': (IoArgoprojEventsV1alpha1S3Bucket,), # noqa: E501 + 'bucket': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket,), # noqa: E501 'ca_certificate': (SecretKeySelector,), # noqa: E501 'endpoint': (str,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1S3Filter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter,), # noqa: E501 'insecure': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'region': (str,), # noqa: E501 @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Artifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -163,11 +163,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) access_key (SecretKeySelector): [optional] # noqa: E501 - bucket (IoArgoprojEventsV1alpha1S3Bucket): [optional] # noqa: E501 + bucket (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket): [optional] # noqa: E501 ca_certificate (SecretKeySelector): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1S3Filter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter): [optional] # noqa: E501 insecure (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Artifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -254,11 +254,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) access_key (SecretKeySelector): [optional] # noqa: E501 - bucket (IoArgoprojEventsV1alpha1S3Bucket): [optional] # noqa: E501 + bucket (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket): [optional] # noqa: E501 ca_certificate (SecretKeySelector): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1S3Filter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter): [optional] # noqa: E501 insecure (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_bucket.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_bucket.py index 56f61b075ced..aa216cef2153 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_bucket.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_bucket.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1S3Bucket(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Bucket - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Bucket - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_filter.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_filter.py index a2410f720c48..9f37b2b9c567 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_s3_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_filter.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1S3Filter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Filter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1S3Filter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config.py index 4e4bf3408aef..b2524ace59be 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sasl_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SASLConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SASLConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SASLConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_schema_registry_config.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_schema_registry_config.py index 0c0a69d68fd4..f2d3edc3e83d 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_schema_registry_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_schema_registry_config.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth - globals()['IoArgoprojEventsV1alpha1BasicAuth'] = IoArgoprojEventsV1alpha1BasicAuth + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth -class IoArgoprojEventsV1alpha1SchemaRegistryConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'auth': (IoArgoprojEventsV1alpha1BasicAuth,), # noqa: E501 + 'auth': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth,), # noqa: E501 'schema_id': (int,), # noqa: E501 'url': (str,), # noqa: E501 } @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 schema_id (int): [optional] # noqa: E501 url (str): Schema Registry URL.. [optional] # noqa: E501 """ @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -228,7 +228,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - auth (IoArgoprojEventsV1alpha1BasicAuth): [optional] # noqa: E501 + auth (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth): [optional] # noqa: E501 schema_id (int): [optional] # noqa: E501 url (str): Schema Registry URL.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header.py index 3b2bc72fc0f9..9f35c93beadd 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_secure_header.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource - globals()['IoArgoprojEventsV1alpha1ValueFromSource'] = IoArgoprojEventsV1alpha1ValueFromSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_value_from_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource -class IoArgoprojEventsV1alpha1SecureHeader(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -88,7 +88,7 @@ def openapi_types(): lazy_import() return { 'name': (str,), # noqa: E501 - 'value_from': (IoArgoprojEventsV1alpha1ValueFromSource,), # noqa: E501 + 'value_from': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource,), # noqa: E501 } @cached_property @@ -109,7 +109,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SecureHeader - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -143,7 +143,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) name (str): [optional] # noqa: E501 - value_from (IoArgoprojEventsV1alpha1ValueFromSource): [optional] # noqa: E501 + value_from (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -192,7 +192,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SecureHeader - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -226,7 +226,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) name (str): [optional] # noqa: E501 - value_from (IoArgoprojEventsV1alpha1ValueFromSource): [optional] # noqa: E501 + value_from (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_selector.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_selector.py index 54c76eeeaf3e..594c065be3c6 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_selector.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_selector.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1Selector(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -105,7 +105,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Selector - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Selector - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor.py index e0ccea56f660..eab06e78be51 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_spec import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus from argo_workflows.model.object_meta import ObjectMeta - globals()['IoArgoprojEventsV1alpha1EventSourceSpec'] = IoArgoprojEventsV1alpha1EventSourceSpec - globals()['IoArgoprojEventsV1alpha1EventSourceStatus'] = IoArgoprojEventsV1alpha1EventSourceStatus + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus globals()['ObjectMeta'] = ObjectMeta -class IoArgoprojEventsV1alpha1EventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,8 +92,8 @@ def openapi_types(): lazy_import() return { 'metadata': (ObjectMeta,), # noqa: E501 - 'spec': (IoArgoprojEventsV1alpha1EventSourceSpec,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1EventSourceStatus,), # noqa: E501 + 'spec': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec,), # noqa: E501 + 'status': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus,), # noqa: E501 } @cached_property @@ -115,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -149,8 +149,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 + spec (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -199,7 +199,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -233,8 +233,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metadata (ObjectMeta): [optional] # noqa: E501 - spec (IoArgoprojEventsV1alpha1EventSourceSpec): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1EventSourceStatus): [optional] # noqa: E501 + spec (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list.py index 831798b34304..5a64a2e903d2 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_list.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor from argo_workflows.model.list_meta import ListMeta - globals()['IoArgoprojEventsV1alpha1EventSource'] = IoArgoprojEventsV1alpha1EventSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor globals()['ListMeta'] = ListMeta -class IoArgoprojEventsV1alpha1EventSourceList(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): """ lazy_import() return { - 'items': ([IoArgoprojEventsV1alpha1EventSource],), # noqa: E501 + 'items': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor],), # noqa: E501 'metadata': (ListMeta,), # noqa: E501 } @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceList - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,7 +144,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1EventSource]): [optional] # noqa: E501 + items ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor]): [optional] # noqa: E501 metadata (ListMeta): [optional] # noqa: E501 """ @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceList - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,7 +227,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - items ([IoArgoprojEventsV1alpha1EventSource]): [optional] # noqa: E501 + items ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor]): [optional] # noqa: E501 metadata (ListMeta): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_spec.py similarity index 83% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_spec.py index ed5683bcdb7b..fba536847cb1 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sensor_spec.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_spec.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency - from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger - globals()['IoArgoprojEventsV1alpha1EventDependency'] = IoArgoprojEventsV1alpha1EventDependency - globals()['IoArgoprojEventsV1alpha1Template'] = IoArgoprojEventsV1alpha1Template - globals()['IoArgoprojEventsV1alpha1Trigger'] = IoArgoprojEventsV1alpha1Trigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger -class IoArgoprojEventsV1alpha1SensorSpec(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,14 +91,14 @@ def openapi_types(): """ lazy_import() return { - 'dependencies': ([IoArgoprojEventsV1alpha1EventDependency],), # noqa: E501 + 'dependencies': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency],), # noqa: E501 'error_on_failed_round': (bool,), # noqa: E501 'event_bus_name': (str,), # noqa: E501 'logging_fields': ({str: (str,)},), # noqa: E501 'replicas': (int,), # noqa: E501 'revision_history_limit': (int,), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1Template,), # noqa: E501 - 'triggers': ([IoArgoprojEventsV1alpha1Trigger],), # noqa: E501 + 'template': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template,), # noqa: E501 + 'triggers': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger],), # noqa: E501 } @cached_property @@ -125,7 +125,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorSpec - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -158,14 +158,14 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - dependencies ([IoArgoprojEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 + dependencies ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 error_on_failed_round (bool): ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed.. [optional] # noqa: E501 event_bus_name (str): [optional] # noqa: E501 logging_fields ({str: (str,)}): [optional] # noqa: E501 replicas (int): [optional] # noqa: E501 revision_history_limit (int): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - triggers ([IoArgoprojEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template): [optional] # noqa: E501 + triggers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -214,7 +214,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SensorSpec - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -247,14 +247,14 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - dependencies ([IoArgoprojEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 + dependencies ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency]): Dependencies is a list of the events that this sensor is dependent on.. [optional] # noqa: E501 error_on_failed_round (bool): ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed.. [optional] # noqa: E501 event_bus_name (str): [optional] # noqa: E501 logging_fields ({str: (str,)}): [optional] # noqa: E501 replicas (int): [optional] # noqa: E501 revision_history_limit (int): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - triggers ([IoArgoprojEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template): [optional] # noqa: E501 + triggers ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger]): Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_status.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_status.py index f2f0e349af17..1260475f0e86 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_status.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_status.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status - globals()['IoArgoprojEventsV1alpha1Status'] = IoArgoprojEventsV1alpha1Status + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status -class IoArgoprojEventsV1alpha1EventSourceStatus(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'status': (IoArgoprojEventsV1alpha1Status,), # noqa: E501 + 'status': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status,), # noqa: E501 } @cached_property @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceStatus - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceStatus - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - status (IoArgoprojEventsV1alpha1Status): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_service.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_service.py index ca7c730af766..d7abc48a4cdb 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_service.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_service.py @@ -30,11 +30,13 @@ def lazy_import(): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata from argo_workflows.model.service_port import ServicePort + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata globals()['ServicePort'] = ServicePort -class IoArgoprojEventsV1alpha1Service(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -88,6 +90,7 @@ def openapi_types(): lazy_import() return { 'cluster_ip': (str,), # noqa: E501 + 'metadata': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata,), # noqa: E501 'ports': ([ServicePort],), # noqa: E501 } @@ -98,6 +101,7 @@ def discriminator(): attribute_map = { 'cluster_ip': 'clusterIP', # noqa: E501 + 'metadata': 'metadata', # noqa: E501 'ports': 'ports', # noqa: E501 } @@ -109,7 +113,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Service - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -143,6 +147,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) cluster_ip (str): [optional] # noqa: E501 + metadata (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata): [optional] # noqa: E501 ports ([ServicePort]): [optional] # noqa: E501 """ @@ -192,7 +197,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Service - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -226,6 +231,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) cluster_ip (str): [optional] # noqa: E501 + metadata (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata): [optional] # noqa: E501 ports ([ServicePort]): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sftp_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sftp_event_source.py index 79d29af7a1e8..4a7da57bb220 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sftp_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sftp_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WatchPathConfig'] = IoArgoprojEventsV1alpha1WatchPathConfig + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SFTPEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,13 +93,13 @@ def openapi_types(): return { 'address': (SecretKeySelector,), # noqa: E501 'event_type': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'password': (SecretKeySelector,), # noqa: E501 'poll_interval_duration': (str,), # noqa: E501 'ssh_key_secret': (SecretKeySelector,), # noqa: E501 'username': (SecretKeySelector,), # noqa: E501 - 'watch_path_config': (IoArgoprojEventsV1alpha1WatchPathConfig,), # noqa: E501 + 'watch_path_config': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig,), # noqa: E501 } @cached_property @@ -127,7 +127,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SFTPEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -162,13 +162,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) address (SecretKeySelector): [optional] # noqa: E501 event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 poll_interval_duration (str): [optional] # noqa: E501 ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 username (SecretKeySelector): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -217,7 +217,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SFTPEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -252,13 +252,13 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) address (SecretKeySelector): [optional] # noqa: E501 event_type (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 password (SecretKeySelector): [optional] # noqa: E501 poll_interval_duration (str): [optional] # noqa: E501 ssh_key_secret (SecretKeySelector): [optional] # noqa: E501 username (SecretKeySelector): [optional] # noqa: E501 - watch_path_config (IoArgoprojEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 + watch_path_config (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_event_source.py index 56a1713060ec..f8d57b14a017 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SlackEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,11 +91,11 @@ def openapi_types(): """ lazy_import() return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'signing_secret': (SecretKeySelector,), # noqa: E501 'token': (SecretKeySelector,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -119,7 +119,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -152,11 +152,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 signing_secret (SecretKeySelector): [optional] # noqa: E501 token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -205,7 +205,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -238,11 +238,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 signing_secret (SecretKeySelector): [optional] # noqa: E501 token (SecretKeySelector): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_sender.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_sender.py index 1692b09f190e..9ba230a7ba73 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_sender.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_sender.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1SlackSender(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackSender - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackSender - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_thread.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_thread.py index e8fb2e1d05ee..5fe0c3907677 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_thread.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_thread.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1SlackThread(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackThread - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackThread - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_trigger.py similarity index 84% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_trigger.py index dfcaee6f5a30..3386a5a8b9ec 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_slack_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_trigger.py @@ -30,17 +30,17 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_sender import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_thread import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1SlackSender'] = IoArgoprojEventsV1alpha1SlackSender - globals()['IoArgoprojEventsV1alpha1SlackThread'] = IoArgoprojEventsV1alpha1SlackThread - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SlackTrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -97,10 +97,10 @@ def openapi_types(): 'blocks': (str,), # noqa: E501 'channel': (str,), # noqa: E501 'message': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'sender': (IoArgoprojEventsV1alpha1SlackSender,), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'sender': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender,), # noqa: E501 'slack_token': (SecretKeySelector,), # noqa: E501 - 'thread': (IoArgoprojEventsV1alpha1SlackThread,), # noqa: E501 + 'thread': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread,), # noqa: E501 } @cached_property @@ -127,7 +127,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,10 +164,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 blocks (str): [optional] # noqa: E501 channel (str): [optional] # noqa: E501 message (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - sender (IoArgoprojEventsV1alpha1SlackSender): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + sender (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender): [optional] # noqa: E501 slack_token (SecretKeySelector): [optional] # noqa: E501 - thread (IoArgoprojEventsV1alpha1SlackThread): [optional] # noqa: E501 + thread (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -216,7 +216,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SlackTrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -253,10 +253,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 blocks (str): [optional] # noqa: E501 channel (str): [optional] # noqa: E501 message (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - sender (IoArgoprojEventsV1alpha1SlackSender): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + sender (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender): [optional] # noqa: E501 slack_token (SecretKeySelector): [optional] # noqa: E501 - thread (IoArgoprojEventsV1alpha1SlackThread): [optional] # noqa: E501 + thread (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sns_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sns_event_source.py index 1ee61864431e..7a1eb05371bf 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sns_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sns_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SNSEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,14 +93,14 @@ def openapi_types(): return { 'access_key': (SecretKeySelector,), # noqa: E501 'endpoint': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'region': (str,), # noqa: E501 'role_arn': (str,), # noqa: E501 'secret_key': (SecretKeySelector,), # noqa: E501 'topic_arn': (str,), # noqa: E501 'validate_signature': (bool,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -129,7 +129,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SNSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,14 +164,14 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) access_key (SecretKeySelector): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 role_arn (str): [optional] # noqa: E501 secret_key (SecretKeySelector): [optional] # noqa: E501 topic_arn (str): [optional] # noqa: E501 validate_signature (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -220,7 +220,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SNSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -255,14 +255,14 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) access_key (SecretKeySelector): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 role_arn (str): [optional] # noqa: E501 secret_key (SecretKeySelector): [optional] # noqa: E501 topic_arn (str): [optional] # noqa: E501 validate_signature (bool): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sqs_event_source.py similarity index 93% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sqs_event_source.py index 44d583d5dbe6..347a9690f6bb 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_sqs_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sqs_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1SQSEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -92,7 +92,7 @@ def openapi_types(): 'access_key': (SecretKeySelector,), # noqa: E501 'dlq': (bool,), # noqa: E501 'endpoint': (str,), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 'json_body': (bool,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'queue': (str,), # noqa: E501 @@ -133,7 +133,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SQSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -169,7 +169,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 access_key (SecretKeySelector): [optional] # noqa: E501 dlq (bool): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue (str): [optional] # noqa: E501 @@ -227,7 +227,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1SQSEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -263,7 +263,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 access_key (SecretKeySelector): [optional] # noqa: E501 dlq (bool): [optional] # noqa: E501 endpoint (str): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 json_body (bool): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 queue (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_standard_k8_s_trigger.py similarity index 86% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_standard_k8_s_trigger.py index 0f0a4e540842..2bf34da0959c 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_standard_k8_s_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_standard_k8_s_trigger.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1ArtifactLocation'] = IoArgoprojEventsV1alpha1ArtifactLocation - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter -class IoArgoprojEventsV1alpha1StandardK8STrigger(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -91,9 +91,9 @@ def openapi_types(): return { 'live_object': (bool,), # noqa: E501 'operation': (str,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 'patch_strategy': (str,), # noqa: E501 - 'source': (IoArgoprojEventsV1alpha1ArtifactLocation,), # noqa: E501 + 'source': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation,), # noqa: E501 } @cached_property @@ -117,7 +117,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -152,9 +152,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) live_object (bool): [optional] # noqa: E501 operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 patch_strategy (str): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 + source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -203,7 +203,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -238,9 +238,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) live_object (bool): [optional] # noqa: E501 operation (str): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): Parameters is the list of parameters that is applied to resolved K8s trigger object.. [optional] # noqa: E501 patch_strategy (str): [optional] # noqa: E501 - source (IoArgoprojEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 + source (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status.py index c29559f7eb18..d8fbda05eed0 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition - globals()['IoArgoprojEventsV1alpha1Condition'] = IoArgoprojEventsV1alpha1Condition + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_condition import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition -class IoArgoprojEventsV1alpha1Status(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'conditions': ([IoArgoprojEventsV1alpha1Condition],), # noqa: E501 + 'conditions': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition],), # noqa: E501 } @cached_property @@ -107,7 +107,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Status - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -140,7 +140,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - conditions ([IoArgoprojEventsV1alpha1Condition]): [optional] # noqa: E501 + conditions ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition]): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Status - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - conditions ([IoArgoprojEventsV1alpha1Condition]): [optional] # noqa: E501 + conditions ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition]): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status_policy.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status_policy.py index d27324e86210..777f74385e57 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_status_policy.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status_policy.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1StatusPolicy(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -101,7 +101,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StatusPolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -183,7 +183,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StatusPolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_event_source.py similarity index 88% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_event_source.py index 6b3a78fffbe6..163e4f982362 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_event_source.py @@ -30,15 +30,15 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1StorageGridFilter'] = IoArgoprojEventsV1alpha1StorageGridFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1StorageGridEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -95,11 +95,11 @@ def openapi_types(): 'auth_token': (SecretKeySelector,), # noqa: E501 'bucket': (str,), # noqa: E501 'events': ([str],), # noqa: E501 - 'filter': (IoArgoprojEventsV1alpha1StorageGridFilter,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter,), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 'region': (str,), # noqa: E501 'topic_arn': (str,), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -127,7 +127,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -164,11 +164,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 auth_token (SecretKeySelector): [optional] # noqa: E501 bucket (str): Name of the bucket to register notifications for.. [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 topic_arn (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -217,7 +217,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -254,11 +254,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 auth_token (SecretKeySelector): [optional] # noqa: E501 bucket (str): Name of the bucket to register notifications for.. [optional] # noqa: E501 events ([str]): [optional] # noqa: E501 - filter (IoArgoprojEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 region (str): [optional] # noqa: E501 topic_arn (str): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_filter.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_filter.py index e15d005d6566..301dd4a1541e 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_storage_grid_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_filter.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1StorageGridFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StorageGridFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_stripe_event_source.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_stripe_event_source.py index 60ce13c32c0a..3b02fdbdeb79 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_stripe_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_stripe_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext from argo_workflows.model.secret_key_selector import SecretKeySelector - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1StripeEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -93,7 +93,7 @@ def openapi_types(): 'create_webhook': (bool,), # noqa: E501 'event_filter': ([str],), # noqa: E501 'metadata': ({str: (str,)},), # noqa: E501 - 'webhook': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'webhook': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -117,7 +117,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StripeEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -154,7 +154,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 create_webhook (bool): [optional] # noqa: E501 event_filter ([str]): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -203,7 +203,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1StripeEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -240,7 +240,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 create_webhook (bool): [optional] # noqa: E501 event_filter ([str]): [optional] # noqa: E501 metadata ({str: (str,)}): [optional] # noqa: E501 - webhook (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + webhook (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template.py similarity index 90% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template.py index fe6dd7c6b393..d7c193ad26fc 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_template.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template.py @@ -31,22 +31,22 @@ def lazy_import(): from argo_workflows.model.affinity import Affinity - from argo_workflows.model.container import Container - from argo_workflows.model.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_container import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata from argo_workflows.model.local_object_reference import LocalObjectReference from argo_workflows.model.pod_security_context import PodSecurityContext from argo_workflows.model.toleration import Toleration from argo_workflows.model.volume import Volume globals()['Affinity'] = Affinity - globals()['Container'] = Container - globals()['IoArgoprojEventsV1alpha1Metadata'] = IoArgoprojEventsV1alpha1Metadata + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata globals()['LocalObjectReference'] = LocalObjectReference globals()['PodSecurityContext'] = PodSecurityContext globals()['Toleration'] = Toleration globals()['Volume'] = Volume -class IoArgoprojEventsV1alpha1Template(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -100,9 +100,9 @@ def openapi_types(): lazy_import() return { 'affinity': (Affinity,), # noqa: E501 - 'container': (Container,), # noqa: E501 + 'container': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container,), # noqa: E501 'image_pull_secrets': ([LocalObjectReference],), # noqa: E501 - 'metadata': (IoArgoprojEventsV1alpha1Metadata,), # noqa: E501 + 'metadata': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata,), # noqa: E501 'node_selector': ({str: (str,)},), # noqa: E501 'priority': (int,), # noqa: E501 'priority_class_name': (str,), # noqa: E501 @@ -139,7 +139,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Template - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -173,9 +173,9 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) affinity (Affinity): [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 + container (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container): [optional] # noqa: E501 image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - metadata (IoArgoprojEventsV1alpha1Metadata): [optional] # noqa: E501 + metadata (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata): [optional] # noqa: E501 node_selector ({str: (str,)}): [optional] # noqa: E501 priority (int): [optional] # noqa: E501 priority_class_name (str): [optional] # noqa: E501 @@ -231,7 +231,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Template - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -265,9 +265,9 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) affinity (Affinity): [optional] # noqa: E501 - container (Container): [optional] # noqa: E501 + container (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container): [optional] # noqa: E501 image_pull_secrets ([LocalObjectReference]): [optional] # noqa: E501 - metadata (IoArgoprojEventsV1alpha1Metadata): [optional] # noqa: E501 + metadata (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata): [optional] # noqa: E501 node_selector ({str: (str,)}): [optional] # noqa: E501 priority (int): [optional] # noqa: E501 priority_class_name (str): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_time_filter.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_time_filter.py index 8ad1034f178b..a90d973008d7 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_time_filter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_time_filter.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1TimeFilter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TimeFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TimeFilter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config.py index 1fee1144e475..b33ee02c1de5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_tls_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1TLSConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -113,7 +113,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TLSConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -198,7 +198,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TLSConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger.py similarity index 74% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger.py index b00df6dfaf0c..a7aeb7fddca1 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger.py @@ -30,19 +30,19 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff - from argo_workflows.model.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate - globals()['IoArgoprojEventsV1alpha1Backoff'] = IoArgoprojEventsV1alpha1Backoff - globals()['IoArgoprojEventsV1alpha1RateLimit'] = IoArgoprojEventsV1alpha1RateLimit - globals()['IoArgoprojEventsV1alpha1TriggerParameter'] = IoArgoprojEventsV1alpha1TriggerParameter - globals()['IoArgoprojEventsV1alpha1TriggerPolicy'] = IoArgoprojEventsV1alpha1TriggerPolicy - globals()['IoArgoprojEventsV1alpha1TriggerTemplate'] = IoArgoprojEventsV1alpha1TriggerTemplate - - -class IoArgoprojEventsV1alpha1Trigger(ModelNormal): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_rate_limit import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_template import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -96,11 +96,12 @@ def openapi_types(): lazy_import() return { 'at_least_once': (bool,), # noqa: E501 - 'parameters': ([IoArgoprojEventsV1alpha1TriggerParameter],), # noqa: E501 - 'policy': (IoArgoprojEventsV1alpha1TriggerPolicy,), # noqa: E501 - 'rate_limit': (IoArgoprojEventsV1alpha1RateLimit,), # noqa: E501 - 'retry_strategy': (IoArgoprojEventsV1alpha1Backoff,), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1TriggerTemplate,), # noqa: E501 + 'dlq_trigger': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger,), # noqa: E501 + 'parameters': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter],), # noqa: E501 + 'policy': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy,), # noqa: E501 + 'rate_limit': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit,), # noqa: E501 + 'retry_strategy': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff,), # noqa: E501 + 'template': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate,), # noqa: E501 } @cached_property @@ -110,6 +111,7 @@ def discriminator(): attribute_map = { 'at_least_once': 'atLeastOnce', # noqa: E501 + 'dlq_trigger': 'dlqTrigger', # noqa: E501 'parameters': 'parameters', # noqa: E501 'policy': 'policy', # noqa: E501 'rate_limit': 'rateLimit', # noqa: E501 @@ -125,7 +127,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Trigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -159,11 +161,12 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) at_least_once (bool): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - policy (IoArgoprojEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 - rate_limit (IoArgoprojEventsV1alpha1RateLimit): [optional] # noqa: E501 - retry_strategy (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 + dlq_trigger (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + policy (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 + rate_limit (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit): [optional] # noqa: E501 + retry_strategy (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -212,7 +215,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1Trigger - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -246,11 +249,12 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) at_least_once (bool): [optional] # noqa: E501 - parameters ([IoArgoprojEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 - policy (IoArgoprojEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 - rate_limit (IoArgoprojEventsV1alpha1RateLimit): [optional] # noqa: E501 - retry_strategy (IoArgoprojEventsV1alpha1Backoff): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 + dlq_trigger (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger): [optional] # noqa: E501 + parameters ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]): [optional] # noqa: E501 + policy (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy): [optional] # noqa: E501 + rate_limit (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit): [optional] # noqa: E501 + retry_strategy (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff): [optional] # noqa: E501 + template (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter.py similarity index 92% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter.py index 8c88dd6b7d4e..ddd65c99b0a1 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter.py @@ -30,11 +30,11 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource - globals()['IoArgoprojEventsV1alpha1TriggerParameterSource'] = IoArgoprojEventsV1alpha1TriggerParameterSource + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource -class IoArgoprojEventsV1alpha1TriggerParameter(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,7 +89,7 @@ def openapi_types(): return { 'dest': (str,), # noqa: E501 'operation': (str,), # noqa: E501 - 'src': (IoArgoprojEventsV1alpha1TriggerParameterSource,), # noqa: E501 + 'src': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource,), # noqa: E501 } @cached_property @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -146,7 +146,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) dest (str): Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used.. [optional] # noqa: E501 operation (str): Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it.. [optional] # noqa: E501 - src (IoArgoprojEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 + src (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -195,7 +195,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameter - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -230,7 +230,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) dest (str): Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used.. [optional] # noqa: E501 operation (str): Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it.. [optional] # noqa: E501 - src (IoArgoprojEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 + src (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter_source.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter_source.py index 84328054ecbb..a140e8c72b44 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_parameter_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter_source.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1TriggerParameterSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -113,7 +113,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -201,7 +201,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_policy.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_policy.py index e2a0584d293a..8b9acf41ed25 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_policy.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_policy.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy - from argo_workflows.model.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy - globals()['IoArgoprojEventsV1alpha1K8SResourcePolicy'] = IoArgoprojEventsV1alpha1K8SResourcePolicy - globals()['IoArgoprojEventsV1alpha1StatusPolicy'] = IoArgoprojEventsV1alpha1StatusPolicy + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy -class IoArgoprojEventsV1alpha1TriggerPolicy(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,8 +89,8 @@ def openapi_types(): """ lazy_import() return { - 'k8s': (IoArgoprojEventsV1alpha1K8SResourcePolicy,), # noqa: E501 - 'status': (IoArgoprojEventsV1alpha1StatusPolicy,), # noqa: E501 + 'k8s': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy,), # noqa: E501 + 'status': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy,), # noqa: E501 } @cached_property @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerPolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,8 +144,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - k8s (IoArgoprojEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1StatusPolicy): [optional] # noqa: E501 + k8s (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerPolicy - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,8 +227,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - k8s (IoArgoprojEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 - status (IoArgoprojEventsV1alpha1StatusPolicy): [optional] # noqa: E501 + k8s (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy): [optional] # noqa: E501 + status (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_template.py similarity index 54% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_template.py index 071237736196..a8ff60e44ef4 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_trigger_template.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_template.py @@ -30,39 +30,39 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria - from argo_workflows.model.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger - from argo_workflows.model.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger - globals()['IoArgoprojEventsV1alpha1AWSLambdaTrigger'] = IoArgoprojEventsV1alpha1AWSLambdaTrigger - globals()['IoArgoprojEventsV1alpha1ArgoWorkflowTrigger'] = IoArgoprojEventsV1alpha1ArgoWorkflowTrigger - globals()['IoArgoprojEventsV1alpha1AzureEventHubsTrigger'] = IoArgoprojEventsV1alpha1AzureEventHubsTrigger - globals()['IoArgoprojEventsV1alpha1AzureServiceBusTrigger'] = IoArgoprojEventsV1alpha1AzureServiceBusTrigger - globals()['IoArgoprojEventsV1alpha1ConditionsResetCriteria'] = IoArgoprojEventsV1alpha1ConditionsResetCriteria - globals()['IoArgoprojEventsV1alpha1CustomTrigger'] = IoArgoprojEventsV1alpha1CustomTrigger - globals()['IoArgoprojEventsV1alpha1EmailTrigger'] = IoArgoprojEventsV1alpha1EmailTrigger - globals()['IoArgoprojEventsV1alpha1HTTPTrigger'] = IoArgoprojEventsV1alpha1HTTPTrigger - globals()['IoArgoprojEventsV1alpha1KafkaTrigger'] = IoArgoprojEventsV1alpha1KafkaTrigger - globals()['IoArgoprojEventsV1alpha1LogTrigger'] = IoArgoprojEventsV1alpha1LogTrigger - globals()['IoArgoprojEventsV1alpha1NATSTrigger'] = IoArgoprojEventsV1alpha1NATSTrigger - globals()['IoArgoprojEventsV1alpha1OpenWhiskTrigger'] = IoArgoprojEventsV1alpha1OpenWhiskTrigger - globals()['IoArgoprojEventsV1alpha1PulsarTrigger'] = IoArgoprojEventsV1alpha1PulsarTrigger - globals()['IoArgoprojEventsV1alpha1SlackTrigger'] = IoArgoprojEventsV1alpha1SlackTrigger - globals()['IoArgoprojEventsV1alpha1StandardK8STrigger'] = IoArgoprojEventsV1alpha1StandardK8STrigger - - -class IoArgoprojEventsV1alpha1TriggerTemplate(ModelNormal): + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_argo_workflow_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_aws_lambda_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_event_hubs_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_criteria import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_custom_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_email_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_http_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_log_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_open_whisk_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_standard_k8_s_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger + + +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -115,23 +115,23 @@ def openapi_types(): """ lazy_import() return { - 'argo_workflow': (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger,), # noqa: E501 - 'aws_lambda': (IoArgoprojEventsV1alpha1AWSLambdaTrigger,), # noqa: E501 - 'azure_event_hubs': (IoArgoprojEventsV1alpha1AzureEventHubsTrigger,), # noqa: E501 - 'azure_service_bus': (IoArgoprojEventsV1alpha1AzureServiceBusTrigger,), # noqa: E501 + 'argo_workflow': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger,), # noqa: E501 + 'aws_lambda': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger,), # noqa: E501 + 'azure_event_hubs': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger,), # noqa: E501 + 'azure_service_bus': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger,), # noqa: E501 'conditions': (str,), # noqa: E501 - 'conditions_reset': ([IoArgoprojEventsV1alpha1ConditionsResetCriteria],), # noqa: E501 - 'custom': (IoArgoprojEventsV1alpha1CustomTrigger,), # noqa: E501 - 'email': (IoArgoprojEventsV1alpha1EmailTrigger,), # noqa: E501 - 'http': (IoArgoprojEventsV1alpha1HTTPTrigger,), # noqa: E501 - 'k8s': (IoArgoprojEventsV1alpha1StandardK8STrigger,), # noqa: E501 - 'kafka': (IoArgoprojEventsV1alpha1KafkaTrigger,), # noqa: E501 - 'log': (IoArgoprojEventsV1alpha1LogTrigger,), # noqa: E501 + 'conditions_reset': ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria],), # noqa: E501 + 'custom': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger,), # noqa: E501 + 'email': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger,), # noqa: E501 + 'http': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger,), # noqa: E501 + 'k8s': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger,), # noqa: E501 + 'kafka': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger,), # noqa: E501 + 'log': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger,), # noqa: E501 'name': (str,), # noqa: E501 - 'nats': (IoArgoprojEventsV1alpha1NATSTrigger,), # noqa: E501 - 'open_whisk': (IoArgoprojEventsV1alpha1OpenWhiskTrigger,), # noqa: E501 - 'pulsar': (IoArgoprojEventsV1alpha1PulsarTrigger,), # noqa: E501 - 'slack': (IoArgoprojEventsV1alpha1SlackTrigger,), # noqa: E501 + 'nats': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger,), # noqa: E501 + 'open_whisk': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger,), # noqa: E501 + 'pulsar': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger,), # noqa: E501 + 'slack': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger,), # noqa: E501 } @cached_property @@ -167,7 +167,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerTemplate - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -200,23 +200,23 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - argo_workflow (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 - aws_lambda (IoArgoprojEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 - azure_event_hubs (IoArgoprojEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 - azure_service_bus (IoArgoprojEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 + argo_workflow (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 + aws_lambda (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 + azure_event_hubs (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 + azure_service_bus (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 conditions (str): [optional] # noqa: E501 - conditions_reset ([IoArgoprojEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 - custom (IoArgoprojEventsV1alpha1CustomTrigger): [optional] # noqa: E501 - email (IoArgoprojEventsV1alpha1EmailTrigger): [optional] # noqa: E501 - http (IoArgoprojEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 - k8s (IoArgoprojEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 - kafka (IoArgoprojEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 - log (IoArgoprojEventsV1alpha1LogTrigger): [optional] # noqa: E501 + conditions_reset ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 + custom (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger): [optional] # noqa: E501 + email (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger): [optional] # noqa: E501 + http (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 + k8s (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 + kafka (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 + log (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger): [optional] # noqa: E501 name (str): Name is a unique name of the action to take.. [optional] # noqa: E501 - nats (IoArgoprojEventsV1alpha1NATSTrigger): [optional] # noqa: E501 - open_whisk (IoArgoprojEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 - pulsar (IoArgoprojEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 - slack (IoArgoprojEventsV1alpha1SlackTrigger): [optional] # noqa: E501 + nats (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger): [optional] # noqa: E501 + open_whisk (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 + pulsar (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 + slack (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -265,7 +265,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1TriggerTemplate - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -298,23 +298,23 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - argo_workflow (IoArgoprojEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 - aws_lambda (IoArgoprojEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 - azure_event_hubs (IoArgoprojEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 - azure_service_bus (IoArgoprojEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 + argo_workflow (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger): [optional] # noqa: E501 + aws_lambda (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger): [optional] # noqa: E501 + azure_event_hubs (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger): [optional] # noqa: E501 + azure_service_bus (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger): [optional] # noqa: E501 conditions (str): [optional] # noqa: E501 - conditions_reset ([IoArgoprojEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 - custom (IoArgoprojEventsV1alpha1CustomTrigger): [optional] # noqa: E501 - email (IoArgoprojEventsV1alpha1EmailTrigger): [optional] # noqa: E501 - http (IoArgoprojEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 - k8s (IoArgoprojEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 - kafka (IoArgoprojEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 - log (IoArgoprojEventsV1alpha1LogTrigger): [optional] # noqa: E501 + conditions_reset ([GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria]): [optional] # noqa: E501 + custom (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger): [optional] # noqa: E501 + email (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger): [optional] # noqa: E501 + http (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger): [optional] # noqa: E501 + k8s (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger): [optional] # noqa: E501 + kafka (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger): [optional] # noqa: E501 + log (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger): [optional] # noqa: E501 name (str): Name is a unique name of the action to take.. [optional] # noqa: E501 - nats (IoArgoprojEventsV1alpha1NATSTrigger): [optional] # noqa: E501 - open_whisk (IoArgoprojEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 - pulsar (IoArgoprojEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 - slack (IoArgoprojEventsV1alpha1SlackTrigger): [optional] # noqa: E501 + nats (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger): [optional] # noqa: E501 + open_whisk (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger): [optional] # noqa: E501 + pulsar (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger): [optional] # noqa: E501 + slack (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_url_artifact.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_url_artifact.py index a8e63789e05d..2d82e820709f 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_url_artifact.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_url_artifact.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1URLArtifact(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -103,7 +103,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1URLArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -186,7 +186,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1URLArtifact - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_value_from_source.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_value_from_source.py index dad95c34b8e5..970daffcc56b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_value_from_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_value_from_source.py @@ -36,7 +36,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1ValueFromSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ValueFromSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1ValueFromSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config.py index dba84388ad4e..716ac94da2b2 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_watch_path_config.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config.py @@ -30,7 +30,7 @@ -class IoArgoprojEventsV1alpha1WatchPathConfig(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -105,7 +105,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WatchPathConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -189,7 +189,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WatchPathConfig - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context.py similarity index 97% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context.py index 921fbda41a0e..5ca58da4467b 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_context.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context.py @@ -34,7 +34,7 @@ def lazy_import(): globals()['SecretKeySelector'] = SecretKeySelector -class IoArgoprojEventsV1alpha1WebhookContext(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -123,7 +123,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookContext - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -213,7 +213,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookContext - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_event_source.py similarity index 87% rename from sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py rename to sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_event_source.py index c6316912e390..ea002590dba5 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_webhook_event_source.py +++ b/sdks/python/client/argo_workflows/model/github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_event_source.py @@ -30,13 +30,13 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext - globals()['IoArgoprojEventsV1alpha1EventSourceFilter'] = IoArgoprojEventsV1alpha1EventSourceFilter - globals()['IoArgoprojEventsV1alpha1WebhookContext'] = IoArgoprojEventsV1alpha1WebhookContext + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext -class IoArgoprojEventsV1alpha1WebhookEventSource(ModelNormal): +class GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -89,8 +89,8 @@ def openapi_types(): """ lazy_import() return { - 'filter': (IoArgoprojEventsV1alpha1EventSourceFilter,), # noqa: E501 - 'webhook_context': (IoArgoprojEventsV1alpha1WebhookContext,), # noqa: E501 + 'filter': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter,), # noqa: E501 + 'webhook_context': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext,), # noqa: E501 } @cached_property @@ -111,7 +111,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -144,8 +144,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - webhook_context (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + webhook_context (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -194,7 +194,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1WebhookEventSource - a model defined in OpenAPI + """GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -227,8 +227,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter (IoArgoprojEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 - webhook_context (IoArgoprojEventsV1alpha1WebhookContext): [optional] # noqa: E501 + filter (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter): [optional] # noqa: E501 + webhook_context (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/image_volume_source.py b/sdks/python/client/argo_workflows/model/image_volume_source.py new file mode 100644 index 000000000000..36805d8de600 --- /dev/null +++ b/sdks/python/client/argo_workflows/model/image_volume_source.py @@ -0,0 +1,259 @@ +""" + Argo Workflows API + + Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 + + The version of the OpenAPI document: VERSION + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from argo_workflows.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, + OpenApiModel +) +from argo_workflows.exceptions import ApiAttributeError + + + +class ImageVolumeSource(ModelNormal): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + 'pull_policy': (str,), # noqa: E501 + 'reference': (str,), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'pull_policy': 'pullPolicy', # noqa: E501 + 'reference': 'reference', # noqa: E501 + } + + read_only_vars = { + } + + _composed_schemas = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + """ImageVolumeSource - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pull_policy (str): Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise.. [optional] # noqa: E501 + reference (str): Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + ]) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs): # noqa: E501 + """ImageVolumeSource - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pull_policy (str): Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise.. [optional] # noqa: E501 + reference (str): Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets.. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if var_name not in self.attribute_map and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self.additional_properties_type is None: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py b/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py deleted file mode 100644 index a36b07152fe1..000000000000 --- a/sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_event_source_spec.py +++ /dev/null @@ -1,461 +0,0 @@ -""" - Argo Workflows API - - Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argo-workflows.readthedocs.io/en/latest/ # noqa: E501 - - The version of the OpenAPI document: VERSION - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from argo_workflows.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, - OpenApiModel -) -from argo_workflows.exceptions import ApiAttributeError - - -def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource - from argo_workflows.model.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact - from argo_workflows.model.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service - from argo_workflows.model.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource - from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template - from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource - globals()['IoArgoprojEventsV1alpha1AMQPEventSource'] = IoArgoprojEventsV1alpha1AMQPEventSource - globals()['IoArgoprojEventsV1alpha1AzureEventsHubEventSource'] = IoArgoprojEventsV1alpha1AzureEventsHubEventSource - globals()['IoArgoprojEventsV1alpha1AzureQueueStorageEventSource'] = IoArgoprojEventsV1alpha1AzureQueueStorageEventSource - globals()['IoArgoprojEventsV1alpha1AzureServiceBusEventSource'] = IoArgoprojEventsV1alpha1AzureServiceBusEventSource - globals()['IoArgoprojEventsV1alpha1BitbucketEventSource'] = IoArgoprojEventsV1alpha1BitbucketEventSource - globals()['IoArgoprojEventsV1alpha1BitbucketServerEventSource'] = IoArgoprojEventsV1alpha1BitbucketServerEventSource - globals()['IoArgoprojEventsV1alpha1CalendarEventSource'] = IoArgoprojEventsV1alpha1CalendarEventSource - globals()['IoArgoprojEventsV1alpha1EmitterEventSource'] = IoArgoprojEventsV1alpha1EmitterEventSource - globals()['IoArgoprojEventsV1alpha1FileEventSource'] = IoArgoprojEventsV1alpha1FileEventSource - globals()['IoArgoprojEventsV1alpha1GenericEventSource'] = IoArgoprojEventsV1alpha1GenericEventSource - globals()['IoArgoprojEventsV1alpha1GerritEventSource'] = IoArgoprojEventsV1alpha1GerritEventSource - globals()['IoArgoprojEventsV1alpha1GithubEventSource'] = IoArgoprojEventsV1alpha1GithubEventSource - globals()['IoArgoprojEventsV1alpha1GitlabEventSource'] = IoArgoprojEventsV1alpha1GitlabEventSource - globals()['IoArgoprojEventsV1alpha1HDFSEventSource'] = IoArgoprojEventsV1alpha1HDFSEventSource - globals()['IoArgoprojEventsV1alpha1KafkaEventSource'] = IoArgoprojEventsV1alpha1KafkaEventSource - globals()['IoArgoprojEventsV1alpha1MQTTEventSource'] = IoArgoprojEventsV1alpha1MQTTEventSource - globals()['IoArgoprojEventsV1alpha1NATSEventsSource'] = IoArgoprojEventsV1alpha1NATSEventsSource - globals()['IoArgoprojEventsV1alpha1NSQEventSource'] = IoArgoprojEventsV1alpha1NSQEventSource - globals()['IoArgoprojEventsV1alpha1PubSubEventSource'] = IoArgoprojEventsV1alpha1PubSubEventSource - globals()['IoArgoprojEventsV1alpha1PulsarEventSource'] = IoArgoprojEventsV1alpha1PulsarEventSource - globals()['IoArgoprojEventsV1alpha1RedisEventSource'] = IoArgoprojEventsV1alpha1RedisEventSource - globals()['IoArgoprojEventsV1alpha1RedisStreamEventSource'] = IoArgoprojEventsV1alpha1RedisStreamEventSource - globals()['IoArgoprojEventsV1alpha1ResourceEventSource'] = IoArgoprojEventsV1alpha1ResourceEventSource - globals()['IoArgoprojEventsV1alpha1S3Artifact'] = IoArgoprojEventsV1alpha1S3Artifact - globals()['IoArgoprojEventsV1alpha1SFTPEventSource'] = IoArgoprojEventsV1alpha1SFTPEventSource - globals()['IoArgoprojEventsV1alpha1SNSEventSource'] = IoArgoprojEventsV1alpha1SNSEventSource - globals()['IoArgoprojEventsV1alpha1SQSEventSource'] = IoArgoprojEventsV1alpha1SQSEventSource - globals()['IoArgoprojEventsV1alpha1Service'] = IoArgoprojEventsV1alpha1Service - globals()['IoArgoprojEventsV1alpha1SlackEventSource'] = IoArgoprojEventsV1alpha1SlackEventSource - globals()['IoArgoprojEventsV1alpha1StorageGridEventSource'] = IoArgoprojEventsV1alpha1StorageGridEventSource - globals()['IoArgoprojEventsV1alpha1StripeEventSource'] = IoArgoprojEventsV1alpha1StripeEventSource - globals()['IoArgoprojEventsV1alpha1Template'] = IoArgoprojEventsV1alpha1Template - globals()['IoArgoprojEventsV1alpha1WebhookEventSource'] = IoArgoprojEventsV1alpha1WebhookEventSource - - -class IoArgoprojEventsV1alpha1EventSourceSpec(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'amqp': ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)},), # noqa: E501 - 'azure_events_hub': ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)},), # noqa: E501 - 'azure_queue_storage': ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)},), # noqa: E501 - 'azure_service_bus': ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)},), # noqa: E501 - 'bitbucket': ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)},), # noqa: E501 - 'bitbucketserver': ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)},), # noqa: E501 - 'calendar': ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)},), # noqa: E501 - 'emitter': ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)},), # noqa: E501 - 'event_bus_name': (str,), # noqa: E501 - 'file': ({str: (IoArgoprojEventsV1alpha1FileEventSource,)},), # noqa: E501 - 'generic': ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)},), # noqa: E501 - 'gerrit': ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)},), # noqa: E501 - 'github': ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)},), # noqa: E501 - 'gitlab': ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)},), # noqa: E501 - 'hdfs': ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)},), # noqa: E501 - 'kafka': ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)},), # noqa: E501 - 'minio': ({str: (IoArgoprojEventsV1alpha1S3Artifact,)},), # noqa: E501 - 'mqtt': ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)},), # noqa: E501 - 'nats': ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)},), # noqa: E501 - 'nsq': ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)},), # noqa: E501 - 'pub_sub': ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)},), # noqa: E501 - 'pulsar': ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)},), # noqa: E501 - 'redis': ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)},), # noqa: E501 - 'redis_stream': ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)},), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'resource': ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)},), # noqa: E501 - 'service': (IoArgoprojEventsV1alpha1Service,), # noqa: E501 - 'sftp': ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)},), # noqa: E501 - 'slack': ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)},), # noqa: E501 - 'sns': ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)},), # noqa: E501 - 'sqs': ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)},), # noqa: E501 - 'storage_grid': ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)},), # noqa: E501 - 'stripe': ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)},), # noqa: E501 - 'template': (IoArgoprojEventsV1alpha1Template,), # noqa: E501 - 'webhook': ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'amqp': 'amqp', # noqa: E501 - 'azure_events_hub': 'azureEventsHub', # noqa: E501 - 'azure_queue_storage': 'azureQueueStorage', # noqa: E501 - 'azure_service_bus': 'azureServiceBus', # noqa: E501 - 'bitbucket': 'bitbucket', # noqa: E501 - 'bitbucketserver': 'bitbucketserver', # noqa: E501 - 'calendar': 'calendar', # noqa: E501 - 'emitter': 'emitter', # noqa: E501 - 'event_bus_name': 'eventBusName', # noqa: E501 - 'file': 'file', # noqa: E501 - 'generic': 'generic', # noqa: E501 - 'gerrit': 'gerrit', # noqa: E501 - 'github': 'github', # noqa: E501 - 'gitlab': 'gitlab', # noqa: E501 - 'hdfs': 'hdfs', # noqa: E501 - 'kafka': 'kafka', # noqa: E501 - 'minio': 'minio', # noqa: E501 - 'mqtt': 'mqtt', # noqa: E501 - 'nats': 'nats', # noqa: E501 - 'nsq': 'nsq', # noqa: E501 - 'pub_sub': 'pubSub', # noqa: E501 - 'pulsar': 'pulsar', # noqa: E501 - 'redis': 'redis', # noqa: E501 - 'redis_stream': 'redisStream', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'resource': 'resource', # noqa: E501 - 'service': 'service', # noqa: E501 - 'sftp': 'sftp', # noqa: E501 - 'slack': 'slack', # noqa: E501 - 'sns': 'sns', # noqa: E501 - 'sqs': 'sqs', # noqa: E501 - 'storage_grid': 'storageGrid', # noqa: E501 - 'stripe': 'stripe', # noqa: E501 - 'template': 'template', # noqa: E501 - 'webhook': 'webhook', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - amqp ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 - azure_events_hub ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 - azure_queue_storage ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 - azure_service_bus ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 - bitbucket ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 - bitbucketserver ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 - calendar ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 - emitter ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - file ({str: (IoArgoprojEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 - generic ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 - gerrit ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 - github ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 - gitlab ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 - hdfs ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 - kafka ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 - minio ({str: (IoArgoprojEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 - mqtt ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 - nats ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 - nsq ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 - pub_sub ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 - pulsar ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 - redis ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 - redis_stream ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - resource ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 - service (IoArgoprojEventsV1alpha1Service): [optional] # noqa: E501 - sftp ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 - slack ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 - sns ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 - sqs ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 - storage_grid ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 - stripe ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - webhook ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IoArgoprojEventsV1alpha1EventSourceSpec - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - amqp ({str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}): [optional] # noqa: E501 - azure_events_hub ({str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}): [optional] # noqa: E501 - azure_queue_storage ({str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}): [optional] # noqa: E501 - azure_service_bus ({str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}): [optional] # noqa: E501 - bitbucket ({str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}): [optional] # noqa: E501 - bitbucketserver ({str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}): [optional] # noqa: E501 - calendar ({str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}): [optional] # noqa: E501 - emitter ({str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}): [optional] # noqa: E501 - event_bus_name (str): [optional] # noqa: E501 - file ({str: (IoArgoprojEventsV1alpha1FileEventSource,)}): [optional] # noqa: E501 - generic ({str: (IoArgoprojEventsV1alpha1GenericEventSource,)}): [optional] # noqa: E501 - gerrit ({str: (IoArgoprojEventsV1alpha1GerritEventSource,)}): [optional] # noqa: E501 - github ({str: (IoArgoprojEventsV1alpha1GithubEventSource,)}): [optional] # noqa: E501 - gitlab ({str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}): [optional] # noqa: E501 - hdfs ({str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}): [optional] # noqa: E501 - kafka ({str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}): [optional] # noqa: E501 - minio ({str: (IoArgoprojEventsV1alpha1S3Artifact,)}): [optional] # noqa: E501 - mqtt ({str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}): [optional] # noqa: E501 - nats ({str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}): [optional] # noqa: E501 - nsq ({str: (IoArgoprojEventsV1alpha1NSQEventSource,)}): [optional] # noqa: E501 - pub_sub ({str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}): [optional] # noqa: E501 - pulsar ({str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}): [optional] # noqa: E501 - redis ({str: (IoArgoprojEventsV1alpha1RedisEventSource,)}): [optional] # noqa: E501 - redis_stream ({str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - resource ({str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}): [optional] # noqa: E501 - service (IoArgoprojEventsV1alpha1Service): [optional] # noqa: E501 - sftp ({str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}): [optional] # noqa: E501 - slack ({str: (IoArgoprojEventsV1alpha1SlackEventSource,)}): [optional] # noqa: E501 - sns ({str: (IoArgoprojEventsV1alpha1SNSEventSource,)}): [optional] # noqa: E501 - sqs ({str: (IoArgoprojEventsV1alpha1SQSEventSource,)}): [optional] # noqa: E501 - storage_grid ({str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}): [optional] # noqa: E501 - stripe ({str: (IoArgoprojEventsV1alpha1StripeEventSource,)}): [optional] # noqa: E501 - template (IoArgoprojEventsV1alpha1Template): [optional] # noqa: E501 - webhook ({str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py index e29da4738f14..7296211e5b5b 100644 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py +++ b/sdks/python/client/argo_workflows/model/persistent_volume_claim_condition.py @@ -114,8 +114,8 @@ def _from_openapi_data(cls, status, type, *args, **kwargs): # noqa: E501 """PersistentVolumeClaimCondition - a model defined in OpenAPI Args: - status (str): - type (str): + status (str): Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required + type (str): Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -205,8 +205,8 @@ def __init__(self, status, type, *args, **kwargs): # noqa: E501 """PersistentVolumeClaimCondition - a model defined in OpenAPI Args: - status (str): - type (str): + status (str): Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required + type (str): Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py index bf5b1ddd552c..67ce71a99eac 100644 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py +++ b/sdks/python/client/argo_workflows/model/persistent_volume_claim_spec.py @@ -168,7 +168,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 resources (VolumeResourceRequirements): [optional] # noqa: E501 selector (LabelSelector): [optional] # noqa: E501 storage_class_name (str): storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1. [optional] # noqa: E501 - volume_attributes_class_name (str): volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.. [optional] # noqa: E501 + volume_attributes_class_name (str): volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).. [optional] # noqa: E501 volume_mode (str): volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.. [optional] # noqa: E501 volume_name (str): volumeName is the binding reference to the PersistentVolume backing this claim.. [optional] # noqa: E501 """ @@ -258,7 +258,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 resources (VolumeResourceRequirements): [optional] # noqa: E501 selector (LabelSelector): [optional] # noqa: E501 storage_class_name (str): storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1. [optional] # noqa: E501 - volume_attributes_class_name (str): volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.. [optional] # noqa: E501 + volume_attributes_class_name (str): volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).. [optional] # noqa: E501 volume_mode (str): volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.. [optional] # noqa: E501 volume_name (str): volumeName is the binding reference to the PersistentVolume backing this claim.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py b/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py index 152899752e18..173fdf65cd07 100644 --- a/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py +++ b/sdks/python/client/argo_workflows/model/persistent_volume_claim_status.py @@ -161,7 +161,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 allocated_resources ({str: (str,)}): allocatedResources tracks the resources allocated to a PVC including its capacity. Key names follow standard Kubernetes label syntax. Valid values are either: * Un-prefixed keys: - storage - the capacity of the volume. * Custom resources must use implementation-defined prefixed names such as \"example.com/my-custom-resource\" Apart from above values - keys that are unprefixed or have kubernetes.io prefix are considered reserved and hence may not be used. Capacity reported here may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. A controller that receives PVC update with previously unknown resourceName should ignore the update for the purpose it was designed. For example - a controller that only is responsible for resizing capacity of the volume, should ignore PVC updates that change other valid resources associated with PVC. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 capacity ({str: (str,)}): capacity represents the actual resources of the underlying volume.. [optional] # noqa: E501 conditions ([PersistentVolumeClaimCondition]): conditions is the current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'Resizing'.. [optional] # noqa: E501 - current_volume_attributes_class_name (str): currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature.. [optional] # noqa: E501 + current_volume_attributes_class_name (str): currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default).. [optional] # noqa: E501 modify_volume_status (ModifyVolumeStatus): [optional] # noqa: E501 phase (str): phase represents the current phase of PersistentVolumeClaim.. [optional] # noqa: E501 """ @@ -250,7 +250,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 allocated_resources ({str: (str,)}): allocatedResources tracks the resources allocated to a PVC including its capacity. Key names follow standard Kubernetes label syntax. Valid values are either: * Un-prefixed keys: - storage - the capacity of the volume. * Custom resources must use implementation-defined prefixed names such as \"example.com/my-custom-resource\" Apart from above values - keys that are unprefixed or have kubernetes.io prefix are considered reserved and hence may not be used. Capacity reported here may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. A controller that receives PVC update with previously unknown resourceName should ignore the update for the purpose it was designed. For example - a controller that only is responsible for resizing capacity of the volume, should ignore PVC updates that change other valid resources associated with PVC. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature.. [optional] # noqa: E501 capacity ({str: (str,)}): capacity represents the actual resources of the underlying volume.. [optional] # noqa: E501 conditions ([PersistentVolumeClaimCondition]): conditions is the current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'Resizing'.. [optional] # noqa: E501 - current_volume_attributes_class_name (str): currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature.. [optional] # noqa: E501 + current_volume_attributes_class_name (str): currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default).. [optional] # noqa: E501 modify_volume_status (ModifyVolumeStatus): [optional] # noqa: E501 phase (str): phase represents the current phase of PersistentVolumeClaim.. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/pod_affinity_term.py b/sdks/python/client/argo_workflows/model/pod_affinity_term.py index d6aa55c312de..8a9bd02b118d 100644 --- a/sdks/python/client/argo_workflows/model/pod_affinity_term.py +++ b/sdks/python/client/argo_workflows/model/pod_affinity_term.py @@ -154,8 +154,8 @@ def _from_openapi_data(cls, topology_key, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) label_selector (LabelSelector): [optional] # noqa: E501 - match_label_keys ([str]): MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.. [optional] # noqa: E501 - mismatch_label_keys ([str]): MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.. [optional] # noqa: E501 + match_label_keys ([str]): MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).. [optional] # noqa: E501 + mismatch_label_keys ([str]): MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).. [optional] # noqa: E501 namespace_selector (LabelSelector): [optional] # noqa: E501 namespaces ([str]): namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\".. [optional] # noqa: E501 """ @@ -244,8 +244,8 @@ def __init__(self, topology_key, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) label_selector (LabelSelector): [optional] # noqa: E501 - match_label_keys ([str]): MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.. [optional] # noqa: E501 - mismatch_label_keys ([str]): MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate.. [optional] # noqa: E501 + match_label_keys ([str]): MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).. [optional] # noqa: E501 + mismatch_label_keys ([str]): MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default).. [optional] # noqa: E501 namespace_selector (LabelSelector): [optional] # noqa: E501 namespaces ([str]): namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\".. [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/pod_dns_config_option.py b/sdks/python/client/argo_workflows/model/pod_dns_config_option.py index 154ae32daec1..c5c9c17e86ca 100644 --- a/sdks/python/client/argo_workflows/model/pod_dns_config_option.py +++ b/sdks/python/client/argo_workflows/model/pod_dns_config_option.py @@ -136,8 +136,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): Required.. [optional] # noqa: E501 - value (str): [optional] # noqa: E501 + name (str): Name is this DNS resolver option's name. Required.. [optional] # noqa: E501 + value (str): Value is this DNS resolver option's value.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -219,8 +219,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): Required.. [optional] # noqa: E501 - value (str): [optional] # noqa: E501 + name (str): Name is this DNS resolver option's name. Required.. [optional] # noqa: E501 + value (str): Value is this DNS resolver option's value.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/pod_security_context.py b/sdks/python/client/argo_workflows/model/pod_security_context.py index 09315d5119aa..44954b333af9 100644 --- a/sdks/python/client/argo_workflows/model/pod_security_context.py +++ b/sdks/python/client/argo_workflows/model/pod_security_context.py @@ -101,9 +101,11 @@ def openapi_types(): 'run_as_group': (int,), # noqa: E501 'run_as_non_root': (bool,), # noqa: E501 'run_as_user': (int,), # noqa: E501 + 'se_linux_change_policy': (str,), # noqa: E501 'se_linux_options': (SELinuxOptions,), # noqa: E501 'seccomp_profile': (SeccompProfile,), # noqa: E501 'supplemental_groups': ([int],), # noqa: E501 + 'supplemental_groups_policy': (str,), # noqa: E501 'sysctls': ([Sysctl],), # noqa: E501 'windows_options': (WindowsSecurityContextOptions,), # noqa: E501 } @@ -120,9 +122,11 @@ def discriminator(): 'run_as_group': 'runAsGroup', # noqa: E501 'run_as_non_root': 'runAsNonRoot', # noqa: E501 'run_as_user': 'runAsUser', # noqa: E501 + 'se_linux_change_policy': 'seLinuxChangePolicy', # noqa: E501 'se_linux_options': 'seLinuxOptions', # noqa: E501 'seccomp_profile': 'seccompProfile', # noqa: E501 'supplemental_groups': 'supplementalGroups', # noqa: E501 + 'supplemental_groups_policy': 'supplementalGroupsPolicy', # noqa: E501 'sysctls': 'sysctls', # noqa: E501 'windows_options': 'windowsOptions', # noqa: E501 } @@ -174,9 +178,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + se_linux_change_policy (str): seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are \"MountOption\" and \"Recursive\". \"Recursive\" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node. \"MountOption\" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. \"MountOption\" value is allowed only when SELinuxMount feature gate is enabled. If not specified and SELinuxMount feature gate is enabled, \"MountOption\" is used. If not specified and SELinuxMount feature gate is disabled, \"MountOption\" is used for ReadWriteOncePod volumes and \"Recursive\" for all other volumes. This field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers. All Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 se_linux_options (SELinuxOptions): [optional] # noqa: E501 seccomp_profile (SeccompProfile): [optional] # noqa: E501 - supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + supplemental_groups_policy (str): Defines how supplemental groups of the first container processes are calculated. Valid values are \"Merge\" and \"Strict\". If not specified, \"Merge\" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 sysctls ([Sysctl]): Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 """ @@ -266,9 +272,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 run_as_user (int): The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + se_linux_change_policy (str): seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are \"MountOption\" and \"Recursive\". \"Recursive\" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node. \"MountOption\" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. \"MountOption\" value is allowed only when SELinuxMount feature gate is enabled. If not specified and SELinuxMount feature gate is enabled, \"MountOption\" is used. If not specified and SELinuxMount feature gate is disabled, \"MountOption\" is used for ReadWriteOncePod volumes and \"Recursive\" for all other volumes. This field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers. All Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 se_linux_options (SELinuxOptions): [optional] # noqa: E501 seccomp_profile (SeccompProfile): [optional] # noqa: E501 - supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + supplemental_groups ([int]): A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + supplemental_groups_policy (str): Defines how supplemental groups of the first container processes are calculated. Valid values are \"Merge\" and \"Strict\". If not specified, \"Merge\" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 sysctls ([Sysctl]): Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 windows_options (WindowsSecurityContextOptions): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/projected_volume_source.py b/sdks/python/client/argo_workflows/model/projected_volume_source.py index 3a7b186faae1..b5d89fb0e558 100644 --- a/sdks/python/client/argo_workflows/model/projected_volume_source.py +++ b/sdks/python/client/argo_workflows/model/projected_volume_source.py @@ -143,7 +143,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) default_mode (int): defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - sources ([VolumeProjection]): sources is the list of volume projections. [optional] # noqa: E501 + sources ([VolumeProjection]): sources is the list of volume projections. Each entry in this list handles one source.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -226,7 +226,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) default_mode (int): defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.. [optional] # noqa: E501 - sources ([VolumeProjection]): sources is the list of volume projections. [optional] # noqa: E501 + sources ([VolumeProjection]): sources is the list of volume projections. Each entry in this list handles one source.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/resource_claim.py b/sdks/python/client/argo_workflows/model/resource_claim.py index a5dadcfc748a..c695e5eae246 100644 --- a/sdks/python/client/argo_workflows/model/resource_claim.py +++ b/sdks/python/client/argo_workflows/model/resource_claim.py @@ -82,6 +82,7 @@ def openapi_types(): """ return { 'name': (str,), # noqa: E501 + 'request': (str,), # noqa: E501 } @cached_property @@ -91,6 +92,7 @@ def discriminator(): attribute_map = { 'name': 'name', # noqa: E501 + 'request': 'request', # noqa: E501 } read_only_vars = { @@ -137,6 +139,7 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + request (str): Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -222,6 +225,7 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + request (str): Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/security_context.py b/sdks/python/client/argo_workflows/model/security_context.py index 767ea7fc66fa..423618466b36 100644 --- a/sdks/python/client/argo_workflows/model/security_context.py +++ b/sdks/python/client/argo_workflows/model/security_context.py @@ -174,7 +174,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 app_armor_profile (AppArmorProfile): [optional] # noqa: E501 capabilities (Capabilities): [optional] # noqa: E501 privileged (bool): Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 read_only_root_filesystem (bool): Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 @@ -267,7 +267,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 app_armor_profile (AppArmorProfile): [optional] # noqa: E501 capabilities (Capabilities): [optional] # noqa: E501 privileged (bool): Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 - proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 + proc_mount (str): procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 read_only_root_filesystem (bool): Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_group (int): The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows.. [optional] # noqa: E501 run_as_non_root (bool): Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence.. [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py b/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py index b77da79c3e2b..eef2f30201bb 100644 --- a/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py +++ b/sdks/python/client/argo_workflows/model/sensor_create_sensor_request.py @@ -31,9 +31,9 @@ def lazy_import(): from argo_workflows.model.create_options import CreateOptions - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor globals()['CreateOptions'] = CreateOptions - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor class SensorCreateSensorRequest(ModelNormal): @@ -91,7 +91,7 @@ def openapi_types(): return { 'create_options': (CreateOptions,), # noqa: E501 'namespace': (str,), # noqa: E501 - 'sensor': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 + 'sensor': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), # noqa: E501 } @cached_property @@ -148,7 +148,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) create_options (CreateOptions): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + sensor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -232,7 +232,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) create_options (CreateOptions): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + sensor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py b/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py index 4792d7ec9408..f83396790201 100644 --- a/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py +++ b/sdks/python/client/argo_workflows/model/sensor_sensor_watch_event.py @@ -30,8 +30,8 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor class SensorSensorWatchEvent(ModelNormal): @@ -87,7 +87,7 @@ def openapi_types(): """ lazy_import() return { - 'object': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 + 'object': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), # noqa: E501 'type': (str,), # noqa: E501 } @@ -142,7 +142,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + object (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 type (str): [optional] # noqa: E501 """ @@ -225,7 +225,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - object (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + object (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 type (str): [optional] # noqa: E501 """ diff --git a/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py b/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py index 4ea9205e3952..ab95873fd645 100644 --- a/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py +++ b/sdks/python/client/argo_workflows/model/sensor_update_sensor_request.py @@ -30,8 +30,8 @@ def lazy_import(): - from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor - globals()['IoArgoprojEventsV1alpha1Sensor'] = IoArgoprojEventsV1alpha1Sensor + from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor + globals()['GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor'] = GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor class SensorUpdateSensorRequest(ModelNormal): @@ -89,7 +89,7 @@ def openapi_types(): return { 'name': (str,), # noqa: E501 'namespace': (str,), # noqa: E501 - 'sensor': (IoArgoprojEventsV1alpha1Sensor,), # noqa: E501 + 'sensor': (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor,), # noqa: E501 } @cached_property @@ -146,7 +146,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + sensor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -230,7 +230,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) name (str): [optional] # noqa: E501 namespace (str): [optional] # noqa: E501 - sensor (IoArgoprojEventsV1alpha1Sensor): [optional] # noqa: E501 + sensor (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/argo_workflows/model/volume.py b/sdks/python/client/argo_workflows/model/volume.py index 7f5f0c1a54e8..86983bcf3f5a 100644 --- a/sdks/python/client/argo_workflows/model/volume.py +++ b/sdks/python/client/argo_workflows/model/volume.py @@ -47,6 +47,7 @@ def lazy_import(): from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource from argo_workflows.model.host_path_volume_source import HostPathVolumeSource + from argo_workflows.model.image_volume_source import ImageVolumeSource from argo_workflows.model.iscsi_volume_source import ISCSIVolumeSource from argo_workflows.model.nfs_volume_source import NFSVolumeSource from argo_workflows.model.persistent_volume_claim_volume_source import PersistentVolumeClaimVolumeSource @@ -77,6 +78,7 @@ def lazy_import(): globals()['GlusterfsVolumeSource'] = GlusterfsVolumeSource globals()['HostPathVolumeSource'] = HostPathVolumeSource globals()['ISCSIVolumeSource'] = ISCSIVolumeSource + globals()['ImageVolumeSource'] = ImageVolumeSource globals()['NFSVolumeSource'] = NFSVolumeSource globals()['PersistentVolumeClaimVolumeSource'] = PersistentVolumeClaimVolumeSource globals()['PhotonPersistentDiskVolumeSource'] = PhotonPersistentDiskVolumeSource @@ -161,6 +163,7 @@ def openapi_types(): 'git_repo': (GitRepoVolumeSource,), # noqa: E501 'glusterfs': (GlusterfsVolumeSource,), # noqa: E501 'host_path': (HostPathVolumeSource,), # noqa: E501 + 'image': (ImageVolumeSource,), # noqa: E501 'iscsi': (ISCSIVolumeSource,), # noqa: E501 'nfs': (NFSVolumeSource,), # noqa: E501 'persistent_volume_claim': (PersistentVolumeClaimVolumeSource,), # noqa: E501 @@ -199,6 +202,7 @@ def discriminator(): 'git_repo': 'gitRepo', # noqa: E501 'glusterfs': 'glusterfs', # noqa: E501 'host_path': 'hostPath', # noqa: E501 + 'image': 'image', # noqa: E501 'iscsi': 'iscsi', # noqa: E501 'nfs': 'nfs', # noqa: E501 'persistent_volume_claim': 'persistentVolumeClaim', # noqa: E501 @@ -274,6 +278,7 @@ def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 git_repo (GitRepoVolumeSource): [optional] # noqa: E501 glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 host_path (HostPathVolumeSource): [optional] # noqa: E501 + image (ImageVolumeSource): [optional] # noqa: E501 iscsi (ISCSIVolumeSource): [optional] # noqa: E501 nfs (NFSVolumeSource): [optional] # noqa: E501 persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 @@ -388,6 +393,7 @@ def __init__(self, name, *args, **kwargs): # noqa: E501 git_repo (GitRepoVolumeSource): [optional] # noqa: E501 glusterfs (GlusterfsVolumeSource): [optional] # noqa: E501 host_path (HostPathVolumeSource): [optional] # noqa: E501 + image (ImageVolumeSource): [optional] # noqa: E501 iscsi (ISCSIVolumeSource): [optional] # noqa: E501 nfs (NFSVolumeSource): [optional] # noqa: E501 persistent_volume_claim (PersistentVolumeClaimVolumeSource): [optional] # noqa: E501 diff --git a/sdks/python/client/argo_workflows/models/__init__.py b/sdks/python/client/argo_workflows/models/__init__.py index 4b2f194d0a3b..3a9a2aef0f4e 100644 --- a/sdks/python/client/argo_workflows/models/__init__.py +++ b/sdks/python/client/argo_workflows/models/__init__.py @@ -49,6 +49,124 @@ from argo_workflows.model.gce_persistent_disk_volume_source import GCEPersistentDiskVolumeSource from argo_workflows.model.grpc_action import GRPCAction from argo_workflows.model.git_repo_volume_source import GitRepoVolumeSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_consume_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_exchange_declare_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_bind_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amqp_queue_declare_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_aws_lambda_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_amount import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_argo_workflow_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_artifact_location import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_event_hubs_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_events_hub_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_queue_storage_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_azure_service_bus_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_backoff import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_basic_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_repository import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_bitbucket_server_repository import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_calendar_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_catchup_configuration import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_condition import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_by_time import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_conditions_reset_criteria import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_config_map_persistence import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_container import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_custom_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_data_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_email_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_emitter_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_dependency_transformer import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_persistence import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_spec import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_expr_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_file_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_generic_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gerrit_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_creds import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_git_remote_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_app_creds import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_github_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_gitlab_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_hdfs_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_http_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_int64_or_string import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_k8_s_resource_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_consumer_group import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_kafka_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_log_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_mqtt_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_metadata import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_auth import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_events_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nats_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_nsq_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_open_whisk_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_owned_repositories import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_payload_field import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pub_sub_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_pulsar_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_rate_limit import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_redis_stream_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_resource_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_bucket import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_s3_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sasl_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sftp_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sns_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sqs_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_schema_registry_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_secure_header import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_selector import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_spec import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_service import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_sender import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_thread import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_slack_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_standard_k8_s_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_status_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_storage_grid_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_stripe_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_tls_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_template import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_time_filter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_parameter_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_policy import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_trigger_template import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_url_artifact import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_value_from_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_watch_path_config import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_context import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_webhook_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource from argo_workflows.model.glusterfs_volume_source import GlusterfsVolumeSource from argo_workflows.model.google_protobuf_any import GoogleProtobufAny from argo_workflows.model.group_version_resource import GroupVersionResource @@ -59,123 +177,7 @@ from argo_workflows.model.host_alias import HostAlias from argo_workflows.model.host_path_volume_source import HostPathVolumeSource from argo_workflows.model.iscsi_volume_source import ISCSIVolumeSource -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_consume_config import IoArgoprojEventsV1alpha1AMQPConsumeConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_event_source import IoArgoprojEventsV1alpha1AMQPEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_exchange_declare_config import IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_bind_config import IoArgoprojEventsV1alpha1AMQPQueueBindConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_amqp_queue_declare_config import IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_aws_lambda_trigger import IoArgoprojEventsV1alpha1AWSLambdaTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_amount import IoArgoprojEventsV1alpha1Amount -from argo_workflows.model.io_argoproj_events_v1alpha1_argo_workflow_trigger import IoArgoprojEventsV1alpha1ArgoWorkflowTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_artifact_location import IoArgoprojEventsV1alpha1ArtifactLocation -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_event_hubs_trigger import IoArgoprojEventsV1alpha1AzureEventHubsTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_events_hub_event_source import IoArgoprojEventsV1alpha1AzureEventsHubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_queue_storage_event_source import IoArgoprojEventsV1alpha1AzureQueueStorageEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_event_source import IoArgoprojEventsV1alpha1AzureServiceBusEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_azure_service_bus_trigger import IoArgoprojEventsV1alpha1AzureServiceBusTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_backoff import IoArgoprojEventsV1alpha1Backoff -from argo_workflows.model.io_argoproj_events_v1alpha1_basic_auth import IoArgoprojEventsV1alpha1BasicAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_auth import IoArgoprojEventsV1alpha1BitbucketAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_basic_auth import IoArgoprojEventsV1alpha1BitbucketBasicAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_event_source import IoArgoprojEventsV1alpha1BitbucketEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_repository import IoArgoprojEventsV1alpha1BitbucketRepository -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_event_source import IoArgoprojEventsV1alpha1BitbucketServerEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_bitbucket_server_repository import IoArgoprojEventsV1alpha1BitbucketServerRepository -from argo_workflows.model.io_argoproj_events_v1alpha1_calendar_event_source import IoArgoprojEventsV1alpha1CalendarEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_catchup_configuration import IoArgoprojEventsV1alpha1CatchupConfiguration -from argo_workflows.model.io_argoproj_events_v1alpha1_condition import IoArgoprojEventsV1alpha1Condition -from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_by_time import IoArgoprojEventsV1alpha1ConditionsResetByTime -from argo_workflows.model.io_argoproj_events_v1alpha1_conditions_reset_criteria import IoArgoprojEventsV1alpha1ConditionsResetCriteria -from argo_workflows.model.io_argoproj_events_v1alpha1_config_map_persistence import IoArgoprojEventsV1alpha1ConfigMapPersistence -from argo_workflows.model.io_argoproj_events_v1alpha1_custom_trigger import IoArgoprojEventsV1alpha1CustomTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_data_filter import IoArgoprojEventsV1alpha1DataFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_email_trigger import IoArgoprojEventsV1alpha1EmailTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_emitter_event_source import IoArgoprojEventsV1alpha1EmitterEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_context import IoArgoprojEventsV1alpha1EventContext -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency import IoArgoprojEventsV1alpha1EventDependency -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_filter import IoArgoprojEventsV1alpha1EventDependencyFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_event_dependency_transformer import IoArgoprojEventsV1alpha1EventDependencyTransformer -from argo_workflows.model.io_argoproj_events_v1alpha1_event_persistence import IoArgoprojEventsV1alpha1EventPersistence -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_filter import IoArgoprojEventsV1alpha1EventSourceFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_spec import IoArgoprojEventsV1alpha1EventSourceSpec -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_status import IoArgoprojEventsV1alpha1EventSourceStatus -from argo_workflows.model.io_argoproj_events_v1alpha1_expr_filter import IoArgoprojEventsV1alpha1ExprFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_file_artifact import IoArgoprojEventsV1alpha1FileArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_file_event_source import IoArgoprojEventsV1alpha1FileEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_generic_event_source import IoArgoprojEventsV1alpha1GenericEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_gerrit_event_source import IoArgoprojEventsV1alpha1GerritEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_git_artifact import IoArgoprojEventsV1alpha1GitArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_git_creds import IoArgoprojEventsV1alpha1GitCreds -from argo_workflows.model.io_argoproj_events_v1alpha1_git_remote_config import IoArgoprojEventsV1alpha1GitRemoteConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_github_app_creds import IoArgoprojEventsV1alpha1GithubAppCreds -from argo_workflows.model.io_argoproj_events_v1alpha1_github_event_source import IoArgoprojEventsV1alpha1GithubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_gitlab_event_source import IoArgoprojEventsV1alpha1GitlabEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_hdfs_event_source import IoArgoprojEventsV1alpha1HDFSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_http_trigger import IoArgoprojEventsV1alpha1HTTPTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_int64_or_string import IoArgoprojEventsV1alpha1Int64OrString -from argo_workflows.model.io_argoproj_events_v1alpha1_k8_s_resource_policy import IoArgoprojEventsV1alpha1K8SResourcePolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_consumer_group import IoArgoprojEventsV1alpha1KafkaConsumerGroup -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_event_source import IoArgoprojEventsV1alpha1KafkaEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_kafka_trigger import IoArgoprojEventsV1alpha1KafkaTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_log_trigger import IoArgoprojEventsV1alpha1LogTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_mqtt_event_source import IoArgoprojEventsV1alpha1MQTTEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_metadata import IoArgoprojEventsV1alpha1Metadata -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_auth import IoArgoprojEventsV1alpha1NATSAuth -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_events_source import IoArgoprojEventsV1alpha1NATSEventsSource -from argo_workflows.model.io_argoproj_events_v1alpha1_nats_trigger import IoArgoprojEventsV1alpha1NATSTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_nsq_event_source import IoArgoprojEventsV1alpha1NSQEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_open_whisk_trigger import IoArgoprojEventsV1alpha1OpenWhiskTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_owned_repositories import IoArgoprojEventsV1alpha1OwnedRepositories -from argo_workflows.model.io_argoproj_events_v1alpha1_payload_field import IoArgoprojEventsV1alpha1PayloadField -from argo_workflows.model.io_argoproj_events_v1alpha1_pub_sub_event_source import IoArgoprojEventsV1alpha1PubSubEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_event_source import IoArgoprojEventsV1alpha1PulsarEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_pulsar_trigger import IoArgoprojEventsV1alpha1PulsarTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_rate_limit import IoArgoprojEventsV1alpha1RateLimit -from argo_workflows.model.io_argoproj_events_v1alpha1_redis_event_source import IoArgoprojEventsV1alpha1RedisEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_redis_stream_event_source import IoArgoprojEventsV1alpha1RedisStreamEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource import IoArgoprojEventsV1alpha1Resource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource_event_source import IoArgoprojEventsV1alpha1ResourceEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_resource_filter import IoArgoprojEventsV1alpha1ResourceFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_artifact import IoArgoprojEventsV1alpha1S3Artifact -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_bucket import IoArgoprojEventsV1alpha1S3Bucket -from argo_workflows.model.io_argoproj_events_v1alpha1_s3_filter import IoArgoprojEventsV1alpha1S3Filter -from argo_workflows.model.io_argoproj_events_v1alpha1_sasl_config import IoArgoprojEventsV1alpha1SASLConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_sftp_event_source import IoArgoprojEventsV1alpha1SFTPEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_sns_event_source import IoArgoprojEventsV1alpha1SNSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_sqs_event_source import IoArgoprojEventsV1alpha1SQSEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_schema_registry_config import IoArgoprojEventsV1alpha1SchemaRegistryConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_secure_header import IoArgoprojEventsV1alpha1SecureHeader -from argo_workflows.model.io_argoproj_events_v1alpha1_selector import IoArgoprojEventsV1alpha1Selector -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_spec import IoArgoprojEventsV1alpha1SensorSpec -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_status import IoArgoprojEventsV1alpha1SensorStatus -from argo_workflows.model.io_argoproj_events_v1alpha1_service import IoArgoprojEventsV1alpha1Service -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_event_source import IoArgoprojEventsV1alpha1SlackEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_sender import IoArgoprojEventsV1alpha1SlackSender -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_thread import IoArgoprojEventsV1alpha1SlackThread -from argo_workflows.model.io_argoproj_events_v1alpha1_slack_trigger import IoArgoprojEventsV1alpha1SlackTrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_standard_k8_s_trigger import IoArgoprojEventsV1alpha1StandardK8STrigger -from argo_workflows.model.io_argoproj_events_v1alpha1_status import IoArgoprojEventsV1alpha1Status -from argo_workflows.model.io_argoproj_events_v1alpha1_status_policy import IoArgoprojEventsV1alpha1StatusPolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_event_source import IoArgoprojEventsV1alpha1StorageGridEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_storage_grid_filter import IoArgoprojEventsV1alpha1StorageGridFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_stripe_event_source import IoArgoprojEventsV1alpha1StripeEventSource -from argo_workflows.model.io_argoproj_events_v1alpha1_tls_config import IoArgoprojEventsV1alpha1TLSConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_template import IoArgoprojEventsV1alpha1Template -from argo_workflows.model.io_argoproj_events_v1alpha1_time_filter import IoArgoprojEventsV1alpha1TimeFilter -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger import IoArgoprojEventsV1alpha1Trigger -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter import IoArgoprojEventsV1alpha1TriggerParameter -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_parameter_source import IoArgoprojEventsV1alpha1TriggerParameterSource -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_policy import IoArgoprojEventsV1alpha1TriggerPolicy -from argo_workflows.model.io_argoproj_events_v1alpha1_trigger_template import IoArgoprojEventsV1alpha1TriggerTemplate -from argo_workflows.model.io_argoproj_events_v1alpha1_url_artifact import IoArgoprojEventsV1alpha1URLArtifact -from argo_workflows.model.io_argoproj_events_v1alpha1_value_from_source import IoArgoprojEventsV1alpha1ValueFromSource -from argo_workflows.model.io_argoproj_events_v1alpha1_watch_path_config import IoArgoprojEventsV1alpha1WatchPathConfig -from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_context import IoArgoprojEventsV1alpha1WebhookContext -from argo_workflows.model.io_argoproj_events_v1alpha1_webhook_event_source import IoArgoprojEventsV1alpha1WebhookEventSource +from argo_workflows.model.image_volume_source import ImageVolumeSource from argo_workflows.model.io_argoproj_workflow_v1alpha1_archive_strategy import IoArgoprojWorkflowV1alpha1ArchiveStrategy from argo_workflows.model.io_argoproj_workflow_v1alpha1_arguments import IoArgoprojWorkflowV1alpha1Arguments from argo_workflows.model.io_argoproj_workflow_v1alpha1_art_gc_status import IoArgoprojWorkflowV1alpha1ArtGCStatus diff --git a/sdks/python/client/docs/ArtifactServiceApi.md b/sdks/python/client/docs/ArtifactServiceApi.md index 0b58c6b39ca0..107205e93b7a 100644 --- a/sdks/python/client/docs/ArtifactServiceApi.md +++ b/sdks/python/client/docs/ArtifactServiceApi.md @@ -1229,6 +1229,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -1242,6 +1243,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -1954,6 +1956,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -2252,6 +2255,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3463,6 +3467,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4649,6 +4654,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4764,6 +4770,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -4777,6 +4784,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -4982,6 +4990,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -5973,6 +5982,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -6789,6 +6802,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -7087,6 +7101,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -8298,6 +8313,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9484,6 +9500,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9599,6 +9616,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -9612,6 +9630,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -9817,6 +9836,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -10808,6 +10828,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11339,6 +11363,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -12793,6 +12821,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -13614,6 +13646,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -13912,6 +13945,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -15123,6 +15157,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16309,6 +16344,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16424,6 +16460,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -16437,6 +16474,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -16642,6 +16680,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -17633,6 +17672,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -18769,6 +18812,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -18782,6 +18826,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -19494,6 +19539,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -19792,6 +19838,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -21003,6 +21050,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22189,6 +22237,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22304,6 +22353,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -22317,6 +22367,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -22522,6 +22573,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -23513,6 +23565,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -24329,6 +24385,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -24627,6 +24684,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -25838,6 +25896,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27024,6 +27083,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27139,6 +27199,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -27152,6 +27213,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -27357,6 +27419,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -28348,6 +28411,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -28879,6 +28946,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -30361,6 +30432,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -30374,6 +30446,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -31086,6 +31159,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -31384,6 +31458,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32595,6 +32670,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33781,6 +33857,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33896,6 +33973,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -33909,6 +33987,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -34114,6 +34193,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -35105,6 +35185,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -35921,6 +36005,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -36219,6 +36304,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -37430,6 +37516,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -38616,6 +38703,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -38731,6 +38819,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -38744,6 +38833,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -38949,6 +39039,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -39940,6 +40031,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -40471,6 +40566,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -41925,6 +42024,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -42746,6 +42849,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -43044,6 +43148,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -44255,6 +44360,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -45441,6 +45547,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -45556,6 +45663,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -45569,6 +45677,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -45774,6 +45883,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -46765,6 +46875,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -47901,6 +48015,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -47914,6 +48029,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -48626,6 +48742,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -48924,6 +49041,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -50135,6 +50253,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -51321,6 +51440,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -51436,6 +51556,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -51449,6 +51570,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -51654,6 +51776,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -52645,6 +52768,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -53461,6 +53588,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -53759,6 +53887,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -54970,6 +55099,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -56156,6 +56286,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -56271,6 +56402,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -56284,6 +56416,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -56489,6 +56622,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -57480,6 +57614,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -58011,6 +58149,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, diff --git a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md index 487122d51a34..fa44f20fe6f7 100644 --- a/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/ClusterWorkflowTemplateServiceApi.md @@ -1065,6 +1065,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -1078,6 +1079,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -1790,6 +1792,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -2088,6 +2091,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3299,6 +3303,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4485,6 +4490,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4600,6 +4606,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -4613,6 +4620,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -4818,6 +4826,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -5809,6 +5818,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -6625,6 +6638,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -6923,6 +6937,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -8134,6 +8149,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9320,6 +9336,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9435,6 +9452,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -9448,6 +9466,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -9653,6 +9672,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -10644,6 +10664,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11175,6 +11199,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11451,6 +11479,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) # example passing only required values which don't have defaults set try: @@ -11462,7 +11491,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_cluster_workflow_template(name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_cluster_workflow_template(name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling ClusterWorkflowTemplateServiceApi->delete_cluster_workflow_template: %s\n" % e) @@ -11480,6 +11509,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -12644,6 +12674,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -12657,6 +12688,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -13369,6 +13401,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -13667,6 +13700,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -14878,6 +14912,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16064,6 +16099,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16179,6 +16215,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -16192,6 +16229,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -16397,6 +16435,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -17388,6 +17427,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -18204,6 +18247,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -18502,6 +18546,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -19713,6 +19758,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -20899,6 +20945,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -21014,6 +21061,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -21027,6 +21075,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -21232,6 +21281,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22223,6 +22273,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -22754,6 +22808,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -24129,6 +24187,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -24142,6 +24201,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -24854,6 +24914,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -25152,6 +25213,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -26363,6 +26425,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27549,6 +27612,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27664,6 +27728,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -27677,6 +27742,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -27882,6 +27948,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -28873,6 +28940,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -29689,6 +29760,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -29987,6 +30059,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -31198,6 +31271,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32384,6 +32458,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32499,6 +32574,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -32512,6 +32588,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -32717,6 +32794,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33708,6 +33786,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -34239,6 +34321,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, diff --git a/sdks/python/client/docs/CronWorkflowServiceApi.md b/sdks/python/client/docs/CronWorkflowServiceApi.md index 0aa6ba71e7d7..f92dddf73c29 100644 --- a/sdks/python/client/docs/CronWorkflowServiceApi.md +++ b/sdks/python/client/docs/CronWorkflowServiceApi.md @@ -1125,6 +1125,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -1138,6 +1139,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -1850,6 +1852,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -2148,6 +2151,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3359,6 +3363,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4545,6 +4550,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4660,6 +4666,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -4673,6 +4680,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -4878,6 +4886,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -5869,6 +5878,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -6685,6 +6698,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -6983,6 +6997,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -8194,6 +8209,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9380,6 +9396,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9495,6 +9512,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -9508,6 +9526,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -9713,6 +9732,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -10704,6 +10724,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11235,6 +11259,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11539,6 +11567,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) # example passing only required values which don't have defaults set try: @@ -11550,7 +11579,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_cron_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_cron_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling CronWorkflowServiceApi->delete_cron_workflow: %s\n" % e) @@ -11569,6 +11598,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -12786,6 +12816,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -12799,6 +12830,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -13511,6 +13543,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -13809,6 +13842,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -15020,6 +15054,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16206,6 +16241,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16321,6 +16357,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -16334,6 +16371,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -16539,6 +16577,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -17530,6 +17569,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -18346,6 +18389,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -18644,6 +18688,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -19855,6 +19900,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -21041,6 +21087,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -21156,6 +21203,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -21169,6 +21217,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -21374,6 +21423,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22365,6 +22415,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -22896,6 +22950,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -24534,6 +24592,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -24547,6 +24606,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -25259,6 +25319,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -25557,6 +25618,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -26768,6 +26830,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27954,6 +28017,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -28069,6 +28133,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -28082,6 +28147,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -28287,6 +28353,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -29278,6 +29345,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -30094,6 +30165,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -30392,6 +30464,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -31603,6 +31676,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32789,6 +32863,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32904,6 +32979,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -32917,6 +32993,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -33122,6 +33199,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -34113,6 +34191,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -34644,6 +34726,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, diff --git a/sdks/python/client/docs/EventSourceServiceApi.md b/sdks/python/client/docs/EventSourceServiceApi.md index fa522c0d0bb7..df016b6c2e8a 100644 --- a/sdks/python/client/docs/EventSourceServiceApi.md +++ b/sdks/python/client/docs/EventSourceServiceApi.md @@ -14,7 +14,7 @@ Method | HTTP request | Description # **create_event_source** -> IoArgoprojEventsV1alpha1EventSource create_event_source(namespace, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource create_event_source(namespace, body) @@ -27,8 +27,8 @@ import time import argo_workflows from argo_workflows.api import event_source_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource from argo_workflows.model.eventsource_create_event_source_request import EventsourceCreateEventSourceRequest +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -53,7 +53,7 @@ with argo_workflows.ApiClient(configuration) as api_client: api_instance = event_source_service_api.EventSourceServiceApi(api_client) namespace = "namespace_example" # str | body = EventsourceCreateEventSourceRequest( - event_source=IoArgoprojEventsV1alpha1EventSource( + event_source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource( metadata=ObjectMeta( annotations={ "key": "key_example", @@ -96,10 +96,10 @@ with argo_workflows.ApiClient(configuration) as api_client: self_link="self_link_example", uid="uid_example", ), - spec=IoArgoprojEventsV1alpha1EventSourceSpec( + spec=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec( amqp={ - "key": IoArgoprojEventsV1alpha1AMQPEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -111,28 +111,28 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - consume=IoArgoprojEventsV1alpha1AMQPConsumeConfig( + consume=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig( auto_ack=True, consumer_tag="consumer_tag_example", exclusive=True, no_local=True, no_wait=True, ), - exchange_declare=IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig( + exchange_declare=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig( auto_delete=True, durable=True, internal=True, @@ -140,17 +140,17 @@ with argo_workflows.ApiClient(configuration) as api_client: ), exchange_name="exchange_name_example", exchange_type="exchange_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - queue_bind=IoArgoprojEventsV1alpha1AMQPQueueBindConfig( + queue_bind=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig( no_wait=True, ), - queue_declare=IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig( + queue_declare=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig( arguments="arguments_example", auto_delete=True, durable=True, @@ -159,7 +159,7 @@ with argo_workflows.ApiClient(configuration) as api_client: no_wait=True, ), routing_key="routing_key_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -186,8 +186,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_events_hub={ - "key": IoArgoprojEventsV1alpha1AzureEventsHubEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), fqdn="fqdn_example", @@ -208,7 +208,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_queue_storage={ - "key": IoArgoprojEventsV1alpha1AzureQueueStorageEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource( connection_string=SecretKeySelector( key="key_example", name="name_example", @@ -216,7 +216,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), decode_message=True, dlq=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -229,13 +229,13 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_service_bus={ - "key": IoArgoprojEventsV1alpha1AzureServiceBusEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource( connection_string=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), fully_qualified_namespace="fully_qualified_namespace_example", @@ -245,7 +245,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, queue_name="queue_name_example", subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -267,9 +267,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, bitbucket={ - "key": IoArgoprojEventsV1alpha1BitbucketEventSource( - auth=IoArgoprojEventsV1alpha1BitbucketAuth( - basic=IoArgoprojEventsV1alpha1BitbucketBasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -291,7 +291,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -300,13 +300,13 @@ with argo_workflows.ApiClient(configuration) as api_client: owner="owner_example", project_key="project_key_example", repositories=[ - IoArgoprojEventsV1alpha1BitbucketRepository( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository( owner="owner_example", repository_slug="repository_slug_example", ), ], repository_slug="repository_slug_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -334,32 +334,38 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, bitbucketserver={ - "key": IoArgoprojEventsV1alpha1BitbucketServerEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource( access_token=SecretKeySelector( key="key_example", name="name_example", optional=True, ), bitbucketserver_base_url="bitbucketserver_base_url_example", + check_interval="check_interval_example", delete_hook_on_finish=True, events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ "key": "key_example", }, + one_event_per_change=True, project_key="project_key_example", + projects=[ + "projects_example", + ], repositories=[ - IoArgoprojEventsV1alpha1BitbucketServerRepository( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository( project_key="project_key_example", repository_slug="repository_slug_example", ), ], repository_slug="repository_slug_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + skip_branch_refs_changed_on_open_pr=True, + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -377,7 +383,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), insecure_skip_verify=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -410,23 +416,23 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, calendar={ - "key": IoArgoprojEventsV1alpha1CalendarEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource( exclusion_dates=[ "exclusion_dates_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), interval="interval_example", metadata={ "key": "key_example", }, - persistence=IoArgoprojEventsV1alpha1EventPersistence( - catchup=IoArgoprojEventsV1alpha1CatchupConfiguration( + persistence=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence( + catchup=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration( enabled=True, max_duration="max_duration_example", ), - config_map=IoArgoprojEventsV1alpha1ConfigMapPersistence( + config_map=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence( create_if_not_exist=True, name="name_example", ), @@ -436,25 +442,25 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, emitter={ - "key": IoArgoprojEventsV1alpha1EmitterEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource( broker="broker_example", channel_key="channel_key_example", channel_name="channel_name_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -466,7 +472,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -493,16 +499,16 @@ with argo_workflows.ApiClient(configuration) as api_client: }, event_bus_name="event_bus_name_example", file={ - "key": IoArgoprojEventsV1alpha1FileEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource( event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ "key": "key_example", }, polling=True, - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -510,14 +516,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, generic={ - "key": IoArgoprojEventsV1alpha1GenericEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource( auth_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), config="config_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), insecure=True, @@ -529,8 +535,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, gerrit={ - "key": IoArgoprojEventsV1alpha1GerritEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -546,7 +552,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), gerrit_base_url="gerrit_base_url_example", @@ -558,7 +564,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "projects_example", ], ssl_verify=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -586,7 +592,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, github={ - "key": IoArgoprojEventsV1alpha1GithubEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource( active=True, api_token=SecretKeySelector( key="key_example", @@ -598,10 +604,10 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), - github_app=IoArgoprojEventsV1alpha1GithubAppCreds( + github_app=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds( app_id="app_id_example", installation_id="installation_id_example", private_key=SecretKeySelector( @@ -622,7 +628,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], owner="owner_example", repositories=[ - IoArgoprojEventsV1alpha1OwnedRepositories( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories( names=[ "names_example", ], @@ -630,7 +636,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], repository="repository_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -663,7 +669,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, gitlab={ - "key": IoArgoprojEventsV1alpha1GitlabEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource( access_token=SecretKeySelector( key="key_example", name="name_example", @@ -674,7 +680,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), gitlab_base_url="gitlab_base_url_example", @@ -693,7 +699,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -721,12 +727,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, hdfs={ - "key": IoArgoprojEventsV1alpha1HDFSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource( addresses=[ "addresses_example", ], check_interval="check_interval_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), hdfs_user="hdfs_user_example", @@ -752,7 +758,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, type="type_example", - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -760,28 +766,28 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, kafka={ - "key": IoArgoprojEventsV1alpha1KafkaEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource( config="config_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - consumer_group=IoArgoprojEventsV1alpha1KafkaConsumerGroup( + consumer_group=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup( group_name="group_name_example", oldest=True, rebalance_strategy="rebalance_strategy_example", ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -790,7 +796,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, partition="partition_example", - sasl=IoArgoprojEventsV1alpha1SASLConfig( + sasl=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig( mechanism="mechanism_example", password_secret=SecretKeySelector( key="key_example", @@ -803,7 +809,7 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -827,13 +833,13 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, minio={ - "key": IoArgoprojEventsV1alpha1S3Artifact( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -846,7 +852,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -863,8 +869,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, mqtt={ - "key": IoArgoprojEventsV1alpha1MQTTEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -877,28 +883,28 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), client_id="client_id_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -921,9 +927,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, nats={ - "key": IoArgoprojEventsV1alpha1NATSEventsSource( - auth=IoArgoprojEventsV1alpha1NATSAuth( - basic=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -951,29 +957,30 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, + queue="queue_example", subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -995,23 +1002,23 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, nsq={ - "key": IoArgoprojEventsV1alpha1NSQEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource( channel="channel_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -1019,7 +1026,7 @@ with argo_workflows.ApiClient(configuration) as api_client: metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1041,14 +1048,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, pub_sub={ - "key": IoArgoprojEventsV1alpha1PubSubEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource( credential_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), delete_subscription_on_finish=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -1062,7 +1069,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, pulsar={ - "key": IoArgoprojEventsV1alpha1PulsarEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource( auth_athenz_params={ "key": "key_example", }, @@ -1076,28 +1083,28 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1130,12 +1137,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, redis={ - "key": IoArgoprojEventsV1alpha1RedisEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource( channels=[ "channels_example", ], db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -1149,7 +1156,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1171,10 +1178,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, redis_stream={ - "key": IoArgoprojEventsV1alpha1RedisStreamEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource( consumer_group="consumer_group_example", db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -1190,7 +1197,7 @@ with argo_workflows.ApiClient(configuration) as api_client: streams=[ "streams_example", ], - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1213,22 +1220,22 @@ with argo_workflows.ApiClient(configuration) as api_client: }, replicas=1, resource={ - "key": IoArgoprojEventsV1alpha1ResourceEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource( event_types=[ "event_types_example", ], - filter=IoArgoprojEventsV1alpha1ResourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter( after_start=True, created_by=dateutil_parser('1970-01-01T00:00:00.00Z'), fields=[ - IoArgoprojEventsV1alpha1Selector( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector( key="key_example", operation="operation_example", value="value_example", ), ], labels=[ - IoArgoprojEventsV1alpha1Selector( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector( key="key_example", operation="operation_example", value="value_example", @@ -1247,8 +1254,16 @@ with argo_workflows.ApiClient(configuration) as api_client: namespace="namespace_example", ), }, - service=IoArgoprojEventsV1alpha1Service( + service=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service( cluster_ip="cluster_ip_example", + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), ports=[ ServicePort( app_protocol="app_protocol_example", @@ -1261,14 +1276,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), sftp={ - "key": IoArgoprojEventsV1alpha1SFTPEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource( address=SecretKeySelector( key="key_example", name="name_example", optional=True, ), event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -1290,7 +1305,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -1298,8 +1313,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, slack={ - "key": IoArgoprojEventsV1alpha1SlackEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -1315,7 +1330,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1343,14 +1358,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, sns={ - "key": IoArgoprojEventsV1alpha1SNSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -1365,7 +1380,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), topic_arn="topic_arn_example", validate_signature=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1393,7 +1408,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, sqs={ - "key": IoArgoprojEventsV1alpha1SQSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource( access_key=SecretKeySelector( key="key_example", name="name_example", @@ -1401,7 +1416,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), dlq=True, endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -1426,7 +1441,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, storage_grid={ - "key": IoArgoprojEventsV1alpha1StorageGridEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource( api_url="api_url_example", auth_token=SecretKeySelector( key="key_example", @@ -1437,7 +1452,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1StorageGridFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter( prefix="prefix_example", suffix="suffix_example", ), @@ -1446,7 +1461,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, region="region_example", topic_arn="topic_arn_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1474,7 +1489,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, stripe={ - "key": IoArgoprojEventsV1alpha1StripeEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource( api_key=SecretKeySelector( key="key_example", name="name_example", @@ -1487,7 +1502,7 @@ with argo_workflows.ApiClient(configuration) as api_client: metadata={ "key": "key_example", }, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1514,7 +1529,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), }, - template=IoArgoprojEventsV1alpha1Template( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template( affinity=Affinity( node_affinity=NodeAffinity( preferred_during_scheduling_ignored_during_execution=[ @@ -1746,13 +1761,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], + container=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container( env=[ EnvVar( name="name_example", @@ -1793,148 +1802,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - image="image_example", image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="protocol_example", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resize_policy=[ - ContainerResizePolicy( - resource_name="resource_name_example", - restart_policy="restart_policy_example", - ), - ], resources=ResourceRequirements( claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -1944,7 +1817,6 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - restart_policy="restart_policy_example", security_context=SecurityContext( allow_privilege_escalation=True, app_armor_profile=AppArmorProfile( @@ -1982,50 +1854,6 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_user_name="run_as_user_name_example", ), ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], volume_mounts=[ VolumeMount( mount_path="mount_path_example", @@ -2037,14 +1865,13 @@ with argo_workflows.ApiClient(configuration) as api_client: sub_path_expr="sub_path_expr_example", ), ], - working_dir="working_dir_example", ), image_pull_secrets=[ LocalObjectReference( name="name_example", ), ], - metadata=IoArgoprojEventsV1alpha1Metadata( + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( annotations={ "key": "key_example", }, @@ -2067,6 +1894,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -2080,6 +1908,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -2323,6 +2152,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -2497,11 +2330,11 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), webhook={ - "key": IoArgoprojEventsV1alpha1WebhookEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), - webhook_context=IoArgoprojEventsV1alpha1WebhookContext( + webhook_context=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2529,10 +2362,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, ), - status=IoArgoprojEventsV1alpha1EventSourceStatus( - status=IoArgoprojEventsV1alpha1Status( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status( conditions=[ - IoArgoprojEventsV1alpha1Condition( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition( last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), message="message_example", reason="reason_example", @@ -2564,7 +2397,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization @@ -2631,6 +2464,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) # example passing only required values which don't have defaults set try: @@ -2642,7 +2476,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_event_source(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_event_source(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling EventSourceServiceApi->delete_event_source: %s\n" % e) @@ -2661,6 +2495,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -2734,9 +2569,10 @@ with argo_workflows.ApiClient(configuration) as api_client: pod_log_options_since_time_seconds = "podLogOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) pod_log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) pod_log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. (optional) pod_log_options_limit_bytes = "podLogOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) pod_log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) + pod_log_options_stream = "podLogOptions.stream_example" # str | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. (optional) # example passing only required values which don't have defaults set try: @@ -2748,7 +2584,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.event_sources_logs(namespace, name=name, event_source_type=event_source_type, event_name=event_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) + api_response = api_instance.event_sources_logs(namespace, name=name, event_source_type=event_source_type, event_name=event_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, pod_log_options_stream=pod_log_options_stream) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling EventSourceServiceApi->event_sources_logs: %s\n" % e) @@ -2771,9 +2607,10 @@ Name | Type | Description | Notes **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **pod_log_options_stream** | **str**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] ### Return type @@ -2799,7 +2636,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_event_source** -> IoArgoprojEventsV1alpha1EventSource get_event_source(namespace, name) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource get_event_source(namespace, name) @@ -2812,7 +2649,7 @@ import time import argo_workflows from argo_workflows.api import event_source_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -2856,7 +2693,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization @@ -2878,7 +2715,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_event_sources** -> IoArgoprojEventsV1alpha1EventSourceList list_event_sources(namespace) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList list_event_sources(namespace) @@ -2891,7 +2728,7 @@ import time import argo_workflows from argo_workflows.api import event_source_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source_list import IoArgoprojEventsV1alpha1EventSourceList +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -2961,7 +2798,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSourceList**](IoArgoprojEventsV1alpha1EventSourceList.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md) ### Authorization @@ -2983,7 +2820,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **update_event_source** -> IoArgoprojEventsV1alpha1EventSource update_event_source(namespace, name, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource update_event_source(namespace, name, body) @@ -2996,8 +2833,8 @@ import time import argo_workflows from argo_workflows.api import event_source_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_event_source import IoArgoprojEventsV1alpha1EventSource from argo_workflows.model.eventsource_update_event_source_request import EventsourceUpdateEventSourceRequest +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_event_source import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -3023,7 +2860,7 @@ with argo_workflows.ApiClient(configuration) as api_client: namespace = "namespace_example" # str | name = "name_example" # str | body = EventsourceUpdateEventSourceRequest( - event_source=IoArgoprojEventsV1alpha1EventSource( + event_source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource( metadata=ObjectMeta( annotations={ "key": "key_example", @@ -3066,10 +2903,10 @@ with argo_workflows.ApiClient(configuration) as api_client: self_link="self_link_example", uid="uid_example", ), - spec=IoArgoprojEventsV1alpha1EventSourceSpec( + spec=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec( amqp={ - "key": IoArgoprojEventsV1alpha1AMQPEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -3081,28 +2918,28 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - consume=IoArgoprojEventsV1alpha1AMQPConsumeConfig( + consume=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig( auto_ack=True, consumer_tag="consumer_tag_example", exclusive=True, no_local=True, no_wait=True, ), - exchange_declare=IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig( + exchange_declare=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig( auto_delete=True, durable=True, internal=True, @@ -3110,17 +2947,17 @@ with argo_workflows.ApiClient(configuration) as api_client: ), exchange_name="exchange_name_example", exchange_type="exchange_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - queue_bind=IoArgoprojEventsV1alpha1AMQPQueueBindConfig( + queue_bind=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig( no_wait=True, ), - queue_declare=IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig( + queue_declare=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig( arguments="arguments_example", auto_delete=True, durable=True, @@ -3129,7 +2966,7 @@ with argo_workflows.ApiClient(configuration) as api_client: no_wait=True, ), routing_key="routing_key_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3156,8 +2993,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_events_hub={ - "key": IoArgoprojEventsV1alpha1AzureEventsHubEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), fqdn="fqdn_example", @@ -3178,7 +3015,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_queue_storage={ - "key": IoArgoprojEventsV1alpha1AzureQueueStorageEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource( connection_string=SecretKeySelector( key="key_example", name="name_example", @@ -3186,7 +3023,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), decode_message=True, dlq=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -3199,13 +3036,13 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, azure_service_bus={ - "key": IoArgoprojEventsV1alpha1AzureServiceBusEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource( connection_string=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), fully_qualified_namespace="fully_qualified_namespace_example", @@ -3215,7 +3052,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, queue_name="queue_name_example", subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3237,9 +3074,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, bitbucket={ - "key": IoArgoprojEventsV1alpha1BitbucketEventSource( - auth=IoArgoprojEventsV1alpha1BitbucketAuth( - basic=IoArgoprojEventsV1alpha1BitbucketBasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -3261,7 +3098,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -3270,13 +3107,13 @@ with argo_workflows.ApiClient(configuration) as api_client: owner="owner_example", project_key="project_key_example", repositories=[ - IoArgoprojEventsV1alpha1BitbucketRepository( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository( owner="owner_example", repository_slug="repository_slug_example", ), ], repository_slug="repository_slug_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3304,32 +3141,38 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, bitbucketserver={ - "key": IoArgoprojEventsV1alpha1BitbucketServerEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource( access_token=SecretKeySelector( key="key_example", name="name_example", optional=True, ), bitbucketserver_base_url="bitbucketserver_base_url_example", + check_interval="check_interval_example", delete_hook_on_finish=True, events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ "key": "key_example", }, + one_event_per_change=True, project_key="project_key_example", + projects=[ + "projects_example", + ], repositories=[ - IoArgoprojEventsV1alpha1BitbucketServerRepository( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository( project_key="project_key_example", repository_slug="repository_slug_example", ), ], repository_slug="repository_slug_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + skip_branch_refs_changed_on_open_pr=True, + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3347,7 +3190,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), insecure_skip_verify=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3380,23 +3223,23 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, calendar={ - "key": IoArgoprojEventsV1alpha1CalendarEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource( exclusion_dates=[ "exclusion_dates_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), interval="interval_example", metadata={ "key": "key_example", }, - persistence=IoArgoprojEventsV1alpha1EventPersistence( - catchup=IoArgoprojEventsV1alpha1CatchupConfiguration( + persistence=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence( + catchup=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration( enabled=True, max_duration="max_duration_example", ), - config_map=IoArgoprojEventsV1alpha1ConfigMapPersistence( + config_map=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence( create_if_not_exist=True, name="name_example", ), @@ -3406,25 +3249,25 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, emitter={ - "key": IoArgoprojEventsV1alpha1EmitterEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource( broker="broker_example", channel_key="channel_key_example", channel_name="channel_name_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -3436,7 +3279,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3463,16 +3306,16 @@ with argo_workflows.ApiClient(configuration) as api_client: }, event_bus_name="event_bus_name_example", file={ - "key": IoArgoprojEventsV1alpha1FileEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource( event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ "key": "key_example", }, polling=True, - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -3480,14 +3323,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, generic={ - "key": IoArgoprojEventsV1alpha1GenericEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource( auth_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), config="config_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), insecure=True, @@ -3499,8 +3342,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, gerrit={ - "key": IoArgoprojEventsV1alpha1GerritEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -3516,7 +3359,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), gerrit_base_url="gerrit_base_url_example", @@ -3528,7 +3371,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "projects_example", ], ssl_verify=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3556,7 +3399,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, github={ - "key": IoArgoprojEventsV1alpha1GithubEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource( active=True, api_token=SecretKeySelector( key="key_example", @@ -3568,10 +3411,10 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), - github_app=IoArgoprojEventsV1alpha1GithubAppCreds( + github_app=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds( app_id="app_id_example", installation_id="installation_id_example", private_key=SecretKeySelector( @@ -3592,7 +3435,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], owner="owner_example", repositories=[ - IoArgoprojEventsV1alpha1OwnedRepositories( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories( names=[ "names_example", ], @@ -3600,7 +3443,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], repository="repository_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3633,7 +3476,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, gitlab={ - "key": IoArgoprojEventsV1alpha1GitlabEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource( access_token=SecretKeySelector( key="key_example", name="name_example", @@ -3644,7 +3487,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), gitlab_base_url="gitlab_base_url_example", @@ -3663,7 +3506,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3691,12 +3534,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, hdfs={ - "key": IoArgoprojEventsV1alpha1HDFSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource( addresses=[ "addresses_example", ], check_interval="check_interval_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), hdfs_user="hdfs_user_example", @@ -3722,7 +3565,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, type="type_example", - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -3730,28 +3573,28 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, kafka={ - "key": IoArgoprojEventsV1alpha1KafkaEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource( config="config_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - consumer_group=IoArgoprojEventsV1alpha1KafkaConsumerGroup( + consumer_group=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup( group_name="group_name_example", oldest=True, rebalance_strategy="rebalance_strategy_example", ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -3760,7 +3603,7 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, partition="partition_example", - sasl=IoArgoprojEventsV1alpha1SASLConfig( + sasl=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig( mechanism="mechanism_example", password_secret=SecretKeySelector( key="key_example", @@ -3773,7 +3616,7 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3797,13 +3640,13 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, minio={ - "key": IoArgoprojEventsV1alpha1S3Artifact( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -3816,7 +3659,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -3833,8 +3676,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, mqtt={ - "key": IoArgoprojEventsV1alpha1MQTTEventSource( - auth=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -3847,28 +3690,28 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), client_id="client_id_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3891,9 +3734,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, nats={ - "key": IoArgoprojEventsV1alpha1NATSEventsSource( - auth=IoArgoprojEventsV1alpha1NATSAuth( - basic=IoArgoprojEventsV1alpha1BasicAuth( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -3921,29 +3764,30 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, + queue="queue_example", subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3965,23 +3809,23 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, nsq={ - "key": IoArgoprojEventsV1alpha1NSQEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource( channel="channel_example", - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -3989,7 +3833,7 @@ with argo_workflows.ApiClient(configuration) as api_client: metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4011,14 +3855,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, pub_sub={ - "key": IoArgoprojEventsV1alpha1PubSubEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource( credential_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), delete_subscription_on_finish=True, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -4032,7 +3876,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, pulsar={ - "key": IoArgoprojEventsV1alpha1PulsarEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource( auth_athenz_params={ "key": "key_example", }, @@ -4046,28 +3890,28 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, metadata={ "key": "key_example", }, - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4100,12 +3944,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, redis={ - "key": IoArgoprojEventsV1alpha1RedisEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource( channels=[ "channels_example", ], db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -4119,7 +3963,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4141,10 +3985,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, redis_stream={ - "key": IoArgoprojEventsV1alpha1RedisStreamEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource( consumer_group="consumer_group_example", db=1, - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), host_address="host_address_example", @@ -4160,7 +4004,7 @@ with argo_workflows.ApiClient(configuration) as api_client: streams=[ "streams_example", ], - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4183,22 +4027,22 @@ with argo_workflows.ApiClient(configuration) as api_client: }, replicas=1, resource={ - "key": IoArgoprojEventsV1alpha1ResourceEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource( event_types=[ "event_types_example", ], - filter=IoArgoprojEventsV1alpha1ResourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter( after_start=True, created_by=dateutil_parser('1970-01-01T00:00:00.00Z'), fields=[ - IoArgoprojEventsV1alpha1Selector( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector( key="key_example", operation="operation_example", value="value_example", ), ], labels=[ - IoArgoprojEventsV1alpha1Selector( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector( key="key_example", operation="operation_example", value="value_example", @@ -4217,8 +4061,16 @@ with argo_workflows.ApiClient(configuration) as api_client: namespace="namespace_example", ), }, - service=IoArgoprojEventsV1alpha1Service( + service=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service( cluster_ip="cluster_ip_example", + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( + annotations={ + "key": "key_example", + }, + labels={ + "key": "key_example", + }, + ), ports=[ ServicePort( app_protocol="app_protocol_example", @@ -4231,14 +4083,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), sftp={ - "key": IoArgoprojEventsV1alpha1SFTPEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource( address=SecretKeySelector( key="key_example", name="name_example", optional=True, ), event_type="event_type_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -4260,7 +4112,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - watch_path_config=IoArgoprojEventsV1alpha1WatchPathConfig( + watch_path_config=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig( directory="directory_example", path="path_example", path_regexp="path_regexp_example", @@ -4268,8 +4120,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, slack={ - "key": IoArgoprojEventsV1alpha1SlackEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -4285,7 +4137,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4313,14 +4165,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, sns={ - "key": IoArgoprojEventsV1alpha1SNSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), metadata={ @@ -4335,7 +4187,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), topic_arn="topic_arn_example", validate_signature=True, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4363,7 +4215,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, sqs={ - "key": IoArgoprojEventsV1alpha1SQSEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource( access_key=SecretKeySelector( key="key_example", name="name_example", @@ -4371,7 +4223,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), dlq=True, endpoint="endpoint_example", - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), json_body=True, @@ -4396,7 +4248,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, storage_grid={ - "key": IoArgoprojEventsV1alpha1StorageGridEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource( api_url="api_url_example", auth_token=SecretKeySelector( key="key_example", @@ -4407,7 +4259,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1StorageGridFilter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter( prefix="prefix_example", suffix="suffix_example", ), @@ -4416,7 +4268,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, region="region_example", topic_arn="topic_arn_example", - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4444,7 +4296,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, stripe={ - "key": IoArgoprojEventsV1alpha1StripeEventSource( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource( api_key=SecretKeySelector( key="key_example", name="name_example", @@ -4457,7 +4309,7 @@ with argo_workflows.ApiClient(configuration) as api_client: metadata={ "key": "key_example", }, - webhook=IoArgoprojEventsV1alpha1WebhookContext( + webhook=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4484,7 +4336,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), }, - template=IoArgoprojEventsV1alpha1Template( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template( affinity=Affinity( node_affinity=NodeAffinity( preferred_during_scheduling_ignored_during_execution=[ @@ -4716,13 +4568,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], + container=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container( env=[ EnvVar( name="name_example", @@ -4763,148 +4609,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - image="image_example", image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="protocol_example", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resize_policy=[ - ContainerResizePolicy( - resource_name="resource_name_example", - restart_policy="restart_policy_example", - ), - ], resources=ResourceRequirements( claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4914,7 +4624,6 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - restart_policy="restart_policy_example", security_context=SecurityContext( allow_privilege_escalation=True, app_armor_profile=AppArmorProfile( @@ -4952,50 +4661,6 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_user_name="run_as_user_name_example", ), ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], volume_mounts=[ VolumeMount( mount_path="mount_path_example", @@ -5007,14 +4672,13 @@ with argo_workflows.ApiClient(configuration) as api_client: sub_path_expr="sub_path_expr_example", ), ], - working_dir="working_dir_example", ), image_pull_secrets=[ LocalObjectReference( name="name_example", ), ], - metadata=IoArgoprojEventsV1alpha1Metadata( + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( annotations={ "key": "key_example", }, @@ -5037,6 +4701,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -5050,6 +4715,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -5293,6 +4959,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -5467,11 +5137,11 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), webhook={ - "key": IoArgoprojEventsV1alpha1WebhookEventSource( - filter=IoArgoprojEventsV1alpha1EventSourceFilter( + "key": GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter( expression="expression_example", ), - webhook_context=IoArgoprojEventsV1alpha1WebhookContext( + webhook_context=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext( auth_secret=SecretKeySelector( key="key_example", name="name_example", @@ -5499,10 +5169,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), }, ), - status=IoArgoprojEventsV1alpha1EventSourceStatus( - status=IoArgoprojEventsV1alpha1Status( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status( conditions=[ - IoArgoprojEventsV1alpha1Condition( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition( last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), message="message_example", reason="reason_example", @@ -5536,7 +5206,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) ### Authorization diff --git a/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md b/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md index 630497a18728..b8e668464153 100644 --- a/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md +++ b/sdks/python/client/docs/EventsourceCreateEventSourceRequest.md @@ -4,7 +4,7 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**event_source** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**event_source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **namespace** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md b/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md index 3e1f9c6bbddf..f8e8f4d7d7ae 100644 --- a/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md +++ b/sdks/python/client/docs/EventsourceEventSourceWatchEvent.md @@ -4,7 +4,7 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**object** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**object** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **type** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md b/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md index b6fb65c1a577..42de14fa2133 100644 --- a/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md +++ b/sdks/python/client/docs/EventsourceUpdateEventSourceRequest.md @@ -4,7 +4,7 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**event_source** | [**IoArgoprojEventsV1alpha1EventSource**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**event_source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **name** | **str** | | [optional] **namespace** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/GRPCAction.md b/sdks/python/client/docs/GRPCAction.md index 306d6237cc05..b4a1704bafd3 100644 --- a/sdks/python/client/docs/GRPCAction.md +++ b/sdks/python/client/docs/GRPCAction.md @@ -1,5 +1,6 @@ # GRPCAction +GRPCAction specifies an action involving a GRPC service. ## Properties Name | Type | Description | Notes diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md index 409b019423d0..b9625de8dabc 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPConsumeConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AMQPConsumeConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md new file mode 100644 index 000000000000..37d06f5e29d8 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md @@ -0,0 +1,26 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**consume** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPConsumeConfig.md) | | [optional] +**exchange_declare** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md) | | [optional] +**exchange_name** | **str** | | [optional] +**exchange_type** | **str** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**json_body** | **bool** | | [optional] +**metadata** | **{str: (str,)}** | | [optional] +**queue_bind** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md) | | [optional] +**queue_declare** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md) | | [optional] +**routing_key** | **str** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **str** | | [optional] +**url_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md index f3bf375aa3a2..5d6ecc7ec85e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPExchangeDeclareConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md similarity index 87% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md index 4a2a26f3a896..e60ffd7418b2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AMQPQueueBindConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueBindConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md index 7ebd05a6b10e..35d3c5e34d15 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPQueueDeclareConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md similarity index 73% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md index 2e45fe3532ca..37962f0eeab2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AWSLambdaTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AWSLambdaTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger ## Properties @@ -7,8 +7,8 @@ Name | Type | Description | Notes **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **function_name** | **str** | FunctionName refers to the name of the function to invoke. | [optional] **invocation_type** | **str** | Choose from the following options. * RequestResponse (default) - Invoke the function synchronously. Keep the connection open until the function returns a response or times out. The API response includes the function response and additional data. * Event - Invoke the function asynchronously. Send events that fail multiple times to the function's dead-letter queue (if it's configured). The API response only includes a status code. * DryRun - Validate parameter values and verify that the user or role has permission to invoke the function. +optional | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **region** | **str** | | [optional] **role_arn** | **str** | | [optional] **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md index 22f4df50a6bc..e050defec5d8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Amount.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1Amount +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount Amount represent a numeric amount. diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md similarity index 55% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md index 521eff0b0d48..6ffecde0fbc2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ArgoWorkflowTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger ## Properties @@ -6,8 +6,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **args** | **[str]** | | [optional] **operation** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md new file mode 100644 index 000000000000..258444a918a0 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md @@ -0,0 +1,18 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**configmap** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] +**file** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md) | | [optional] +**git** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md) | | [optional] +**inline** | **str** | | [optional] +**resource** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md) | | [optional] +**s3** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md) | | [optional] +**url** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md similarity index 57% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md index e974a38ad480..4360ab0ab9f2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AzureEventHubsTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger ## Properties @@ -6,8 +6,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **fqdn** | **str** | | [optional] **hub_name** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **shared_access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **shared_access_key_name** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md similarity index 75% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md index 56bb6b723a1a..e7ed3569ddb1 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1AzureEventsHubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **fqdn** | **str** | | [optional] **hub_name** | **str** | | [optional] **metadata** | **{str: (str,)}** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md similarity index 77% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md index 28b04f2849d0..91f87b32c848 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1AzureQueueStorageEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource ## Properties @@ -7,7 +7,7 @@ Name | Type | Description | Notes **connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **decode_message** | **bool** | | [optional] **dlq** | **bool** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **queue_name** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md similarity index 66% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md index feb5ea655d5a..d45e9e36dbe7 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md @@ -1,17 +1,17 @@ -# IoArgoprojEventsV1alpha1AzureServiceBusEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **fully_qualified_namespace** | **str** | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **queue_name** | **str** | | [optional] **subscription_name** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **topic_name** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md new file mode 100644 index 000000000000..ff96108b6998 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md @@ -0,0 +1,18 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**queue_name** | **str** | | [optional] +**subscription_name** | **str** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic_name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md new file mode 100644 index 000000000000..fb8f2f193e74 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md @@ -0,0 +1,15 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**duration** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md) | | [optional] +**factor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md) | | [optional] +**jitter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount.md) | | [optional] +**steps** | **int** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md index 5bbabe5c2265..c806799a114c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BasicAuth.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1BasicAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md similarity index 68% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md index ae1fa15c976a..24f279b6d64f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketAuth.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1BitbucketAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**basic** | [**IoArgoprojEventsV1alpha1BitbucketBasicAuth**](IoArgoprojEventsV1alpha1BitbucketBasicAuth.md) | | [optional] +**basic** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md) | | [optional] **oauth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md index 40784673659c..95a4eabfa992 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketBasicAuth.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1BitbucketBasicAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketBasicAuth ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md new file mode 100644 index 000000000000..5a016847b879 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md @@ -0,0 +1,21 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketAuth.md) | | [optional] +**delete_hook_on_finish** | **bool** | | [optional] +**events** | **[str]** | Events this webhook is subscribed to. | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**metadata** | **{str: (str,)}** | | [optional] +**owner** | **str** | | [optional] +**project_key** | **str** | | [optional] +**repositories** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md) | | [optional] +**repository_slug** | **str** | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md index 29cfa7e94f4c..4717775e29c7 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketRepository.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1BitbucketRepository +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketRepository ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md new file mode 100644 index 000000000000..b2afdffa4e1b --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md @@ -0,0 +1,27 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**access_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**bitbucketserver_base_url** | **str** | BitbucketServerBaseURL is the base URL for API requests to a custom endpoint. | [optional] +**check_interval** | **str** | | [optional] +**delete_hook_on_finish** | **bool** | | [optional] +**events** | **[str]** | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**metadata** | **{str: (str,)}** | | [optional] +**one_event_per_change** | **bool** | | [optional] +**project_key** | **str** | | [optional] +**projects** | **[str]** | | [optional] +**repositories** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md) | | [optional] +**repository_slug** | **str** | | [optional] +**skip_branch_refs_changed_on_open_pr** | **bool** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] +**webhook_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md new file mode 100644 index 000000000000..62a941bd0ec2 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository.md @@ -0,0 +1,13 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerRepository + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**project_key** | **str** | ProjectKey is the key of project for which integration needs to set up. | [optional] +**repository_slug** | **str** | RepositorySlug is the slug of the repository for which integration needs to set up. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md similarity index 63% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md index 866b69ee7106..84132ef89437 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CalendarEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md @@ -1,14 +1,14 @@ -# IoArgoprojEventsV1alpha1CalendarEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **exclusion_dates** | **[str]** | ExclusionDates defines the list of DATE-TIME exceptions for recurring events. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **interval** | **str** | | [optional] **metadata** | **{str: (str,)}** | | [optional] -**persistence** | [**IoArgoprojEventsV1alpha1EventPersistence**](IoArgoprojEventsV1alpha1EventPersistence.md) | | [optional] +**persistence** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md) | | [optional] **schedule** | **str** | | [optional] **timezone** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md index aea20471dcea..780b7f94a665 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CatchupConfiguration.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1CatchupConfiguration +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md similarity index 93% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md index e99152449026..b54f271a34b5 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Condition.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1Condition +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md similarity index 87% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md index fb0076af3e7f..f23b7c4f896b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetByTime.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ConditionsResetByTime +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md new file mode 100644 index 000000000000..6a69b89e9d1b --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md @@ -0,0 +1,12 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**by_time** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md index 3b1b1d92cfc3..8e3151b6675d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConfigMapPersistence.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ConfigMapPersistence +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md new file mode 100644 index 000000000000..f513c675766b --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md @@ -0,0 +1,17 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**env** | [**[EnvVar]**](EnvVar.md) | | [optional] +**env_from** | [**[EnvFromSource]**](EnvFromSource.md) | | [optional] +**image_pull_policy** | **str** | | [optional] +**resources** | [**ResourceRequirements**](ResourceRequirements.md) | | [optional] +**security_context** | [**SecurityContext**](SecurityContext.md) | | [optional] +**volume_mounts** | [**[VolumeMount]**](VolumeMount.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md similarity index 61% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md index b8338d9fc504..a8bca267526a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1CustomTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1CustomTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger CustomTrigger refers to the specification of the custom trigger. @@ -6,8 +6,8 @@ CustomTrigger refers to the specification of the custom trigger. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cert_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved custom trigger trigger object. | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **secure** | **bool** | | [optional] **server_name_override** | **str** | ServerNameOverride for the secure connection between sensor and custom trigger gRPC server. | [optional] **server_url** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md similarity index 95% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md index 44d16cb3af87..f3c625985f0e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1DataFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1DataFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md similarity index 79% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md index 60193ce7ab1b..3d227df06780 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmailTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EmailTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger EmailTrigger refers to the specification of the email notification trigger. @@ -8,7 +8,7 @@ Name | Type | Description | Notes **body** | **str** | | [optional] **_from** | **str** | | [optional] **host** | **str** | Host refers to the smtp host url to which email is send. | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] **port** | **int** | | [optional] **smtp_password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **subject** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md similarity index 60% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md index 550827d6bebb..63fb76442257 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EmitterEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EmitterEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource ## Properties @@ -7,12 +7,12 @@ Name | Type | Description | Notes **broker** | **str** | Broker URI to connect to. | [optional] **channel_key** | **str** | | [optional] **channel_name** | **str** | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md similarity index 95% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md index c2608917fa6e..b7b4295811be 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventContext.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EventContext +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md similarity index 64% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md index 57f84d47b760..2ef20f1ac038 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependency.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EventDependency +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency ## Properties @@ -6,10 +6,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_name** | **str** | | [optional] **event_source_name** | **str** | | [optional] -**filters** | [**IoArgoprojEventsV1alpha1EventDependencyFilter**](IoArgoprojEventsV1alpha1EventDependencyFilter.md) | | [optional] +**filters** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md) | | [optional] **filters_logical_operator** | **str** | FiltersLogicalOperator defines how different filters are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **name** | **str** | | [optional] -**transform** | [**IoArgoprojEventsV1alpha1EventDependencyTransformer**](IoArgoprojEventsV1alpha1EventDependencyTransformer.md) | | [optional] +**transform** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md similarity index 59% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md index cde7363bffdc..4914944129b0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter.md @@ -1,17 +1,17 @@ -# IoArgoprojEventsV1alpha1EventDependencyFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter EventDependencyFilter defines filters and constraints for a io.argoproj.workflow.v1alpha1. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**context** | [**IoArgoprojEventsV1alpha1EventContext**](IoArgoprojEventsV1alpha1EventContext.md) | | [optional] -**data** | [**[IoArgoprojEventsV1alpha1DataFilter]**](IoArgoprojEventsV1alpha1DataFilter.md) | | [optional] +**context** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext.md) | | [optional] +**data** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter.md) | | [optional] **data_logical_operator** | **str** | DataLogicalOperator defines how multiple Data filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] **expr_logical_operator** | **str** | ExprLogicalOperator defines how multiple Exprs filters (if defined) are evaluated together. Available values: and (&&), or (||) Is optional and if left blank treated as and (&&). | [optional] -**exprs** | [**[IoArgoprojEventsV1alpha1ExprFilter]**](IoArgoprojEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] +**exprs** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md) | Exprs contains the list of expressions evaluated against the event payload. | [optional] **script** | **str** | Script refers to a Lua script evaluated to determine the validity of an io.argoproj.workflow.v1alpha1. | [optional] -**time** | [**IoArgoprojEventsV1alpha1TimeFilter**](IoArgoprojEventsV1alpha1TimeFilter.md) | | [optional] +**time** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md similarity index 87% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md index deeaf11d5e1b..66f2a739f361 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventDependencyTransformer.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EventDependencyTransformer +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md new file mode 100644 index 000000000000..b6e583c39bed --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence.md @@ -0,0 +1,13 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventPersistence + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**catchup** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CatchupConfiguration.md) | | [optional] +**config_map** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConfigMapPersistence.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md similarity index 55% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md index 5821251facad..9df953e05e56 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Sensor.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md @@ -1,12 +1,12 @@ -# IoArgoprojEventsV1alpha1Sensor +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**spec** | [**IoArgoprojEventsV1alpha1SensorSpec**](IoArgoprojEventsV1alpha1SensorSpec.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1SensorStatus**](IoArgoprojEventsV1alpha1SensorStatus.md) | | [optional] +**spec** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md similarity index 87% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md index 9465351684e5..a53aa0c7764f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1EventSourceFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md similarity index 68% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md index c0125f170b40..fa3ec7e6a86b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorList.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1SensorList +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceList ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojEventsV1alpha1Sensor]**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**items** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSource.md) | | [optional] **metadata** | [**ListMeta**](ListMeta.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md new file mode 100644 index 000000000000..6b24958cb67d --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec.md @@ -0,0 +1,46 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceSpec + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**amqp** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AMQPEventSource.md) | | [optional] +**azure_events_hub** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] +**azure_queue_storage** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] +**azure_service_bus** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] +**bitbucket** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketEventSource.md) | | [optional] +**bitbucketserver** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BitbucketServerEventSource.md) | | [optional] +**calendar** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CalendarEventSource.md) | | [optional] +**emitter** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmitterEventSource.md) | | [optional] +**event_bus_name** | **str** | | [optional] +**file** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md) | | [optional] +**generic** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md) | | [optional] +**gerrit** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md) | | [optional] +**github** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md) | | [optional] +**gitlab** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md) | | [optional] +**hdfs** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md) | | [optional] +**kafka** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md) | | [optional] +**minio** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md) | | [optional] +**mqtt** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md) | | [optional] +**nats** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md) | | [optional] +**nsq** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md) | | [optional] +**pub_sub** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md) | | [optional] +**pulsar** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md) | | [optional] +**redis** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md) | | [optional] +**redis_stream** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md) | | [optional] +**replicas** | **int** | | [optional] +**resource** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md) | | [optional] +**service** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md) | | [optional] +**sftp** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md) | | [optional] +**slack** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md) | | [optional] +**sns** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md) | | [optional] +**sqs** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md) | | [optional] +**storage_grid** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md) | | [optional] +**stripe** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md) | | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md) | | [optional] +**webhook** | [**{str: (GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource,)}**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md new file mode 100644 index 000000000000..213746246920 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus.md @@ -0,0 +1,12 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceStatus + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md similarity index 64% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md index be5739ec9754..b374ad59be18 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ExprFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter.md @@ -1,11 +1,11 @@ -# IoArgoprojEventsV1alpha1ExprFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **expr** | **str** | Expr refers to the expression that determines the outcome of the filter. | [optional] -**fields** | [**[IoArgoprojEventsV1alpha1PayloadField]**](IoArgoprojEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] +**fields** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md) | Fields refers to set of keys that refer to the paths within event payload. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md index 1378d2e445a0..bf54b84990e0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileArtifact.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1FileArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md similarity index 60% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md index 4faab6841ac2..fc67d400dc17 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1FileEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1FileEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileEventSource FileEventSource describes an event-source for file related events. @@ -6,10 +6,10 @@ FileEventSource describes an event-source for file related events. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_type** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **polling** | **bool** | | [optional] -**watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] +**watch_path_config** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md similarity index 79% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md index ece5e692918b..e0062e72ca86 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GenericEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GenericEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GenericEventSource GenericEventSource refers to a generic event source. It can be used to implement a custom event source. @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **auth_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **config** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **insecure** | **bool** | Insecure determines the type of connection. | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md similarity index 59% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md index eebdd45b351f..dc056c599c36 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GerritEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource.md @@ -1,19 +1,19 @@ -# IoArgoprojEventsV1alpha1GerritEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GerritEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] **delete_hook_on_finish** | **bool** | | [optional] **events** | **[str]** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **gerrit_base_url** | **str** | | [optional] **hook_name** | **str** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **projects** | **[str]** | List of project namespace paths like \"whynowy/test\". | [optional] **ssl_verify** | **bool** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md similarity index 72% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md index bde5a82c5c59..e985d8164e1b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitArtifact.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GitArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact ## Properties @@ -6,11 +6,11 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **branch** | **str** | | [optional] **clone_directory** | **str** | Directory to clone the repository. We clone complete directory because GitArtifact is not limited to any specific Git service providers. Hence we don't use any specific git provider client. | [optional] -**creds** | [**IoArgoprojEventsV1alpha1GitCreds**](IoArgoprojEventsV1alpha1GitCreds.md) | | [optional] +**creds** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md) | | [optional] **file_path** | **str** | | [optional] **insecure_ignore_host_key** | **bool** | | [optional] **ref** | **str** | | [optional] -**remote** | [**IoArgoprojEventsV1alpha1GitRemoteConfig**](IoArgoprojEventsV1alpha1GitRemoteConfig.md) | | [optional] +**remote** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md) | | [optional] **ssh_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tag** | **str** | | [optional] **url** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md similarity index 91% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md index 01cccc15d5b6..3fd4d00abead 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitCreds.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GitCreds +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md similarity index 91% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md index 1ceb72c6fc51..c012a55ebdbb 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitRemoteConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GitRemoteConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md index dee75a2bf9e8..b2a3ff7f4338 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubAppCreds.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GithubAppCreds +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md similarity index 57% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md index aae17bff0524..fd8bd51b91fe 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GithubEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GithubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubEventSource ## Properties @@ -9,8 +9,8 @@ Name | Type | Description | Notes **content_type** | **str** | | [optional] **delete_hook_on_finish** | **bool** | | [optional] **events** | **[str]** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**github_app** | [**IoArgoprojEventsV1alpha1GithubAppCreds**](IoArgoprojEventsV1alpha1GithubAppCreds.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**github_app** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GithubAppCreds.md) | | [optional] **github_base_url** | **str** | | [optional] **github_upload_url** | **str** | | [optional] **id** | **str** | | [optional] @@ -18,9 +18,9 @@ Name | Type | Description | Notes **metadata** | **{str: (str,)}** | | [optional] **organizations** | **[str]** | Organizations holds the names of organizations (used for organization level webhooks). Not required if Repositories is set. | [optional] **owner** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1OwnedRepositories]**](IoArgoprojEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] +**repositories** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md) | Repositories holds the information of repositories, which uses repo owner as the key, and list of repo names as the value. Not required if Organizations is set. | [optional] **repository** | **str** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **webhook_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md similarity index 73% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md index d8e7a9627ea0..3bfd627a7b0d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1GitlabEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1GitlabEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitlabEventSource ## Properties @@ -8,14 +8,14 @@ Name | Type | Description | Notes **delete_hook_on_finish** | **bool** | | [optional] **enable_ssl_verification** | **bool** | | [optional] **events** | **[str]** | Events are gitlab event to listen to. Refer https://github.com/xanzy/go-gitlab/blob/bf34eca5d13a9f4c3f501d8a97b8ac226d55e4d9/projects.go#L794. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **gitlab_base_url** | **str** | | [optional] **groups** | **[str]** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **project_id** | **str** | | [optional] **projects** | **[str]** | | [optional] **secret_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md similarity index 77% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md index 9173487e0cbf..104092219289 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HDFSEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1HDFSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HDFSEventSource ## Properties @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **addresses** | **[str]** | | [optional] **check_interval** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **hdfs_user** | **str** | HDFSUser is the user to access HDFS file system. It is ignored if either ccache or keytab is used. | [optional] **krb_c_cache_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **krb_config_config_map** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] @@ -16,7 +16,7 @@ Name | Type | Description | Notes **krb_username** | **str** | KrbUsername is the Kerberos username used with Kerberos keytab It must be set if keytab is used. | [optional] **metadata** | **{str: (str,)}** | | [optional] **type** | **str** | | [optional] -**watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] +**watch_path_config** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md new file mode 100644 index 000000000000..5aaca03864d5 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md @@ -0,0 +1,20 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**basic_auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**headers** | **{str: (str,)}** | | [optional] +**method** | **str** | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**secure_headers** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md) | | [optional] +**timeout** | **str** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **str** | URL refers to the URL to send HTTP request to. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md index f0a3aa991d09..de15be0a45c8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Int64OrString.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1Int64OrString +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md similarity index 81% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md index 45a4810d75ee..f0d931c45465 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Resource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource.md @@ -1,6 +1,6 @@ -# IoArgoprojEventsV1alpha1Resource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource -Resource represent arbitrary structured data. +K8SResource represent arbitrary structured data. ## Properties Name | Type | Description | Notes diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md similarity index 71% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md index 4f5244055474..99c06cf36eb2 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1K8SResourcePolicy.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1K8SResourcePolicy +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] +**backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] **error_on_backoff_timeout** | **bool** | | [optional] **labels** | **{str: (str,)}** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md index 37bd57fc60ee..7aef598a9b7a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaConsumerGroup.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1KafkaConsumerGroup +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md similarity index 52% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md index c405c1022800..047bcf77719b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource.md @@ -1,19 +1,19 @@ -# IoArgoprojEventsV1alpha1KafkaEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config** | **str** | Yaml format Sarama config for Kafka connection. It follows the struct of sarama.Config. See https://github.com/IBM/sarama/blob/main/config.go e.g. consumer: fetch: min: 1 net: MaxOpenRequests: 5 +optional | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**consumer_group** | [**IoArgoprojEventsV1alpha1KafkaConsumerGroup**](IoArgoprojEventsV1alpha1KafkaConsumerGroup.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**consumer_group** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaConsumerGroup.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **limit_events_per_second** | **str** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **partition** | **str** | | [optional] -**sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**sasl** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **topic** | **str** | | [optional] **url** | **str** | | [optional] **version** | **str** | | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md new file mode 100644 index 000000000000..c0c61219419d --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md @@ -0,0 +1,27 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger + +KafkaTrigger refers to the specification of the Kafka trigger. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**compress** | **bool** | | [optional] +**flush_frequency** | **int** | | [optional] +**headers** | **{str: (str,)}** | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**partition** | **int** | | [optional] +**partitioning_key** | **str** | The partitioning key for the messages put on the Kafka topic. +optional. | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**required_acks** | **int** | RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional. | [optional] +**sasl** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md) | | [optional] +**schema_registry** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md) | | [optional] +**secure_headers** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **str** | | [optional] +**url** | **str** | URL of the Kafka broker, multiple URLs separated by comma. | [optional] +**version** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md index a8de5480dce0..a6f643c4d7c8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1LogTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1LogTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md new file mode 100644 index 000000000000..745fa813d698 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource.md @@ -0,0 +1,20 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1MQTTEventSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] +**client_id** | **str** | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**json_body** | **bool** | | [optional] +**metadata** | **{str: (str,)}** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**topic** | **str** | | [optional] +**url** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md index b7c578ba87af..1741338a1923 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Metadata.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1Metadata +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md similarity index 75% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md index 12620996759a..433f66e3bfb8 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSAuth.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1NATSAuth +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**basic** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] +**basic** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] **credential** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **nkey** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md new file mode 100644 index 000000000000..1f714a3ac28b --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource.md @@ -0,0 +1,20 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSEventsSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**json_body** | **bool** | | [optional] +**metadata** | **{str: (str,)}** | | [optional] +**queue** | **str** | | [optional] +**subject** | **str** | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md new file mode 100644 index 000000000000..589c80d5ffe3 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md @@ -0,0 +1,18 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger + +NATSTrigger refers to the specification of the NATS trigger. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**subject** | **str** | Name of the subject to put message on. | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] +**url** | **str** | URL of the NATS cluster. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md similarity index 55% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md index 9306f47b5dcb..9efadb29aae1 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NSQEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource.md @@ -1,16 +1,16 @@ -# IoArgoprojEventsV1alpha1NSQEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NSQEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **channel** | **str** | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **topic** | **str** | Topic to subscribe to. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md similarity index 62% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md index dec3f6409e66..34d5825430d1 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OpenWhiskTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1OpenWhiskTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger OpenWhiskTrigger refers to the specification of the OpenWhisk trigger. @@ -9,8 +9,8 @@ Name | Type | Description | Notes **auth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **host** | **str** | Host URL of the OpenWhisk. | [optional] **namespace** | **str** | Namespace for the action. Defaults to \"_\". +optional. | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] **version** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md index 4205b837d98f..923880f86174 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1OwnedRepositories.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1OwnedRepositories +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OwnedRepositories ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md similarity index 94% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md index 52c63a8edead..affc4a04535b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PayloadField.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1PayloadField +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField PayloadField binds a value at path within the event payload against a name. diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md similarity index 79% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md index e5608ed945e5..52738e34f5a0 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PubSubEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1PubSubEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PubSubEventSource PubSubEventSource refers to event-source for GCP PubSub related events. @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **credential_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **delete_subscription_on_finish** | **bool** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **project_id** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md similarity index 66% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md index 633cd68a9021..7d2c4050a012 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1PulsarEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarEventSource ## Properties @@ -7,11 +7,11 @@ Name | Type | Description | Notes **auth_athenz_params** | **{str: (str,)}** | | [optional] **auth_athenz_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **auth_token_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **tls_allow_insecure_connection** | **bool** | | [optional] **tls_trust_certs_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls_validate_hostname** | **bool** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md similarity index 52% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md index e1a343d48bbb..3afd4c6e4bde 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1PulsarTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1PulsarTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger PulsarTrigger refers to the specification of the Pulsar trigger. @@ -8,10 +8,10 @@ Name | Type | Description | Notes **auth_athenz_params** | **{str: (str,)}** | | [optional] **auth_athenz_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **auth_token_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**connection_backoff** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] +**payload** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **tls_allow_insecure_connection** | **bool** | | [optional] **tls_trust_certs_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **tls_validate_hostname** | **bool** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md index 9c1da4ce387c..4b66ea92400c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RateLimit.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1RateLimit +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md similarity index 67% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md index 7b0d7ef8c9a6..f7e3725aff6d 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1RedisEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisEventSource ## Properties @@ -6,13 +6,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **channels** | **[str]** | | [optional] **db** | **int** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **namespace** | **str** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md similarity index 69% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md index 3200d0d99e21..6b08f78cd612 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1RedisStreamEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1RedisStreamEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RedisStreamEventSource ## Properties @@ -6,13 +6,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **consumer_group** | **str** | | [optional] **db** | **int** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **host_address** | **str** | | [optional] **max_msg_count_per_read** | **int** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **streams** | **[str]** | Streams to look for entries. XREADGROUP is used on all streams using a single consumer group. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] +**tls** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md) | | [optional] **username** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md similarity index 78% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md index 77312a5274b9..467d9ae5e2b9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ResourceEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceEventSource ResourceEventSource refers to a event-source for K8s resource related events. @@ -6,7 +6,7 @@ ResourceEventSource refers to a event-source for K8s resource related events. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **event_types** | **[str]** | EventTypes is the list of event type to watch. Possible values are - ADD, UPDATE and DELETE. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1ResourceFilter**](IoArgoprojEventsV1alpha1ResourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md) | | [optional] **group_version_resource** | [**GroupVersionResource**](GroupVersionResource.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **namespace** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md similarity index 66% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md index f9f1ccec552b..b020f30ad2da 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ResourceFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ResourceFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ResourceFilter ## Properties @@ -6,8 +6,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **after_start** | **bool** | | [optional] **created_by** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] -**fields** | [**[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] -**labels** | [**[IoArgoprojEventsV1alpha1Selector]**](IoArgoprojEventsV1alpha1Selector.md) | | [optional] +**fields** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md) | | [optional] +**labels** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md) | | [optional] **prefix** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md similarity index 70% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md index d55a738e5a70..f3ccdbbc088b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Artifact.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact.md @@ -1,15 +1,15 @@ -# IoArgoprojEventsV1alpha1S3Artifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**bucket** | [**IoArgoprojEventsV1alpha1S3Bucket**](IoArgoprojEventsV1alpha1S3Bucket.md) | | [optional] +**bucket** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md) | | [optional] **ca_certificate** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **endpoint** | **str** | | [optional] **events** | **[str]** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1S3Filter**](IoArgoprojEventsV1alpha1S3Filter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md) | | [optional] **insecure** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **region** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md index a1571f97912c..e648da484405 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Bucket.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1S3Bucket +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md index c9254327b1e6..3953721b9678 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1S3Filter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1S3Filter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md similarity index 91% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md index 1924a4e99c90..fcbbdcd995e9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SASLConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SASLConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md similarity index 70% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md index f6bdf25a6782..4e98137a0bcf 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SFTPEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SFTPEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SFTPEventSource SFTPEventSource describes an event-source for sftp related events. @@ -7,13 +7,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **address** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **event_type** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **password** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **poll_interval_duration** | **str** | | [optional] **ssh_key_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **username** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**watch_path_config** | [**IoArgoprojEventsV1alpha1WatchPathConfig**](IoArgoprojEventsV1alpha1WatchPathConfig.md) | | [optional] +**watch_path_config** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md similarity index 68% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md index b0290d3e73f3..fe54c2ea4803 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SNSEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SNSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SNSEventSource ## Properties @@ -6,14 +6,14 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **endpoint** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **region** | **str** | | [optional] **role_arn** | **str** | | [optional] **secret_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **topic_arn** | **str** | | [optional] **validate_signature** | **bool** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md similarity index 83% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md index fe350f9f4a95..eff9448ceccb 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SQSEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SQSEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SQSEventSource ## Properties @@ -7,7 +7,7 @@ Name | Type | Description | Notes **access_key** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **dlq** | **bool** | | [optional] **endpoint** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **json_body** | **bool** | | [optional] **metadata** | **{str: (str,)}** | | [optional] **queue** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md similarity index 70% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md index 6e148cf15e0a..fda45f05254b 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SchemaRegistryConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1SchemaRegistryConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] +**auth** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth.md) | | [optional] **schema_id** | **int** | | [optional] **url** | **str** | Schema Registry URL. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md similarity index 67% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md index e71ac03b86d0..af869f63c442 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SecureHeader.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader.md @@ -1,11 +1,11 @@ -# IoArgoprojEventsV1alpha1SecureHeader +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] -**value_from** | [**IoArgoprojEventsV1alpha1ValueFromSource**](IoArgoprojEventsV1alpha1ValueFromSource.md) | | [optional] +**value_from** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md similarity index 91% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md index 40dd74f867f5..af6d5121421a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Selector.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1Selector +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Selector Selector represents conditional operation to select K8s objects. diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md similarity index 57% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md index d7ed376ebf36..84d54967fc75 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md @@ -1,12 +1,12 @@ -# IoArgoprojEventsV1alpha1EventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **metadata** | [**ObjectMeta**](ObjectMeta.md) | | [optional] -**spec** | [**IoArgoprojEventsV1alpha1EventSourceSpec**](IoArgoprojEventsV1alpha1EventSourceSpec.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1EventSourceStatus**](IoArgoprojEventsV1alpha1EventSourceStatus.md) | | [optional] +**spec** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md similarity index 70% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md index 87fffdf0a007..4f697ce08000 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceList.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md @@ -1,10 +1,10 @@ -# IoArgoprojEventsV1alpha1EventSourceList +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**items** | [**[IoArgoprojEventsV1alpha1EventSource]**](IoArgoprojEventsV1alpha1EventSource.md) | | [optional] +**items** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **metadata** | [**ListMeta**](ListMeta.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md similarity index 55% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md index 597a071caf9d..d3c24c35c3f3 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorSpec.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec.md @@ -1,17 +1,17 @@ -# IoArgoprojEventsV1alpha1SensorSpec +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**dependencies** | [**[IoArgoprojEventsV1alpha1EventDependency]**](IoArgoprojEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] +**dependencies** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency.md) | Dependencies is a list of the events that this sensor is dependent on. | [optional] **error_on_failed_round** | **bool** | ErrorOnFailedRound if set to true, marks sensor state as `error` if the previous trigger round fails. Once sensor state is set to `error`, no further triggers will be processed. | [optional] **event_bus_name** | **str** | | [optional] **logging_fields** | **{str: (str,)}** | | [optional] **replicas** | **int** | | [optional] **revision_history_limit** | **int** | | [optional] -**template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**triggers** | [**[IoArgoprojEventsV1alpha1Trigger]**](IoArgoprojEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md) | | [optional] +**triggers** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md) | Triggers is a list of the things that this sensor evokes. These are the outputs from this sensor. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md similarity index 70% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md index 4a0ffb5ac287..de1045305863 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SensorStatus.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus.md @@ -1,11 +1,11 @@ -# IoArgoprojEventsV1alpha1SensorStatus +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus SensorStatus contains information about the status of a sensor. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md similarity index 72% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md index 172180919f34..a26c72ae3658 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Service.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service.md @@ -1,10 +1,11 @@ -# IoArgoprojEventsV1alpha1Service +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Service ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **cluster_ip** | **str** | | [optional] +**metadata** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md) | | [optional] **ports** | [**[ServicePort]**](ServicePort.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md similarity index 61% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md index a2d759744472..a9051ad8d50a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource.md @@ -1,14 +1,14 @@ -# IoArgoprojEventsV1alpha1SlackEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackEventSource ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **signing_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md index f6b117aa48ca..07c6fd5dfbda 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackSender.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SlackSender +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md index ab92f1436bfa..a9ba90e7682a 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackThread.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SlackThread +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md similarity index 57% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md index dd7319e691a2..7a2e9ae383ba 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1SlackTrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1SlackTrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger SlackTrigger refers to the specification of the slack notification trigger. @@ -9,10 +9,10 @@ Name | Type | Description | Notes **blocks** | **str** | | [optional] **channel** | **str** | | [optional] **message** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**sender** | [**IoArgoprojEventsV1alpha1SlackSender**](IoArgoprojEventsV1alpha1SlackSender.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**sender** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender.md) | | [optional] **slack_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**thread** | [**IoArgoprojEventsV1alpha1SlackThread**](IoArgoprojEventsV1alpha1SlackThread.md) | | [optional] +**thread** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md similarity index 53% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md index 2d67821586bc..41a8a03947d9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StandardK8STrigger.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1StandardK8STrigger +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger ## Properties @@ -6,9 +6,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **live_object** | **bool** | | [optional] **operation** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved K8s trigger object. | [optional] **patch_strategy** | **str** | | [optional] -**source** | [**IoArgoprojEventsV1alpha1ArtifactLocation**](IoArgoprojEventsV1alpha1ArtifactLocation.md) | | [optional] +**source** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md similarity index 69% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md index 0c38640bbc90..6b943e177707 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Status.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status.md @@ -1,11 +1,11 @@ -# IoArgoprojEventsV1alpha1Status +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status Status is a common structure which can be used for Status field. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**conditions** | [**[IoArgoprojEventsV1alpha1Condition]**](IoArgoprojEventsV1alpha1Condition.md) | | [optional] +**conditions** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md index 7a24c62f85aa..24da47d1d8a5 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StatusPolicy.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1StatusPolicy +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md similarity index 67% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md index 7ecb7ca43a1d..a353cc82ec26 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1StorageGridEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridEventSource ## Properties @@ -8,11 +8,11 @@ Name | Type | Description | Notes **auth_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] **bucket** | **str** | Name of the bucket to register notifications for. | [optional] **events** | **[str]** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1StorageGridFilter**](IoArgoprojEventsV1alpha1StorageGridFilter.md) | | [optional] +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md) | | [optional] **metadata** | **{str: (str,)}** | | [optional] **region** | **str** | | [optional] **topic_arn** | **str** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md similarity index 88% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md index 4427ab20b072..d52dabaeb85f 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StorageGridFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1StorageGridFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StorageGridFilter ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md similarity index 73% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md index b23eb28bc1a6..9b4ffbec64db 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1StripeEventSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1StripeEventSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StripeEventSource ## Properties @@ -8,7 +8,7 @@ Name | Type | Description | Notes **create_webhook** | **bool** | | [optional] **event_filter** | **[str]** | | [optional] **metadata** | **{str: (str,)}** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] +**webhook** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md similarity index 93% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md index 20810d5d7563..dde10f2540bf 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TLSConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1TLSConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig TLSConfig refers to TLS configuration for a client. diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md similarity index 73% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md index cbdbe9bc7ad7..1702a14537e9 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Template.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template.md @@ -1,13 +1,13 @@ -# IoArgoprojEventsV1alpha1Template +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **affinity** | [**Affinity**](Affinity.md) | | [optional] -**container** | [**Container**](Container.md) | | [optional] +**container** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container.md) | | [optional] **image_pull_secrets** | [**[LocalObjectReference]**](LocalObjectReference.md) | | [optional] -**metadata** | [**IoArgoprojEventsV1alpha1Metadata**](IoArgoprojEventsV1alpha1Metadata.md) | | [optional] +**metadata** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata.md) | | [optional] **node_selector** | **{str: (str,)}** | | [optional] **priority** | **int** | | [optional] **priority_class_name** | **str** | | [optional] diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md similarity index 94% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md index b16640181dcc..2a63de1d5c5e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TimeFilter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1TimeFilter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter TimeFilter describes a window in time. It filters out events that occur outside the time limits. In other words, only events that occur after Start and before Stop will pass this filter. diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md new file mode 100644 index 000000000000..1c2aaffd3b76 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md @@ -0,0 +1,18 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**at_least_once** | **bool** | | [optional] +**dlq_trigger** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger.md) | | [optional] +**parameters** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md) | | [optional] +**policy** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md) | | [optional] +**rate_limit** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit.md) | | [optional] +**retry_strategy** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff.md) | | [optional] +**template** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md similarity index 79% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md index 533abe24f57c..21433163cedb 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameter.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1TriggerParameter +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter ## Properties @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **dest** | **str** | Dest is the JSONPath of a resource key. A path is a series of keys separated by a dot. The colon character can be escaped with '.' The -1 key can be used to append a value to an existing array. See https://github.com/tidwall/sjson#path-syntax for more information about how this is used. | [optional] **operation** | **str** | Operation is what to do with the existing value at Dest, whether to 'prepend', 'overwrite', or 'append' it. | [optional] -**src** | [**IoArgoprojEventsV1alpha1TriggerParameterSource**](IoArgoprojEventsV1alpha1TriggerParameterSource.md) | | [optional] +**src** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md similarity index 96% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md index 92f928eba30b..0934282fe583 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerParameterSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1TriggerParameterSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md new file mode 100644 index 000000000000..66f9c5748a63 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy.md @@ -0,0 +1,13 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**k8s** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy.md) | | [optional] +**status** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md new file mode 100644 index 000000000000..103e731495db --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate.md @@ -0,0 +1,29 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate + +TriggerTemplate is the template that describes trigger specification. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**argo_workflow** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger.md) | | [optional] +**aws_lambda** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger.md) | | [optional] +**azure_event_hubs** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger.md) | | [optional] +**azure_service_bus** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger.md) | | [optional] +**conditions** | **str** | | [optional] +**conditions_reset** | [**[GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria]**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria.md) | | [optional] +**custom** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger.md) | | [optional] +**email** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger.md) | | [optional] +**http** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger.md) | | [optional] +**k8s** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger.md) | | [optional] +**kafka** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger.md) | | [optional] +**log** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger.md) | | [optional] +**name** | **str** | Name is a unique name of the action to take. | [optional] +**nats** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger.md) | | [optional] +**open_whisk** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger.md) | | [optional] +**pulsar** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger.md) | | [optional] +**slack** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md index 48dd7f10d23d..eb9d2c5c0c8c 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1URLArtifact.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1URLArtifact +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact URLArtifact contains information about an artifact at an http endpoint. diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md similarity index 90% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md index 6feb2846fb32..82a00bf1c591 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ValueFromSource.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1ValueFromSource +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md similarity index 89% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md index 9594f1962997..2fefb56b6de1 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WatchPathConfig.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1WatchPathConfig +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WatchPathConfig ## Properties diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md similarity index 94% rename from sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md rename to sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md index 4dfce684b23d..74d749adfd3e 100644 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookContext.md +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md @@ -1,4 +1,4 @@ -# IoArgoprojEventsV1alpha1WebhookContext +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext ## Properties diff --git a/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md new file mode 100644 index 000000000000..8465c2926096 --- /dev/null +++ b/sdks/python/client/docs/GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource.md @@ -0,0 +1,13 @@ +# GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookEventSource + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**filter** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventSourceFilter.md) | | [optional] +**webhook_context** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1WebhookContext.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/ImageVolumeSource.md b/sdks/python/client/docs/ImageVolumeSource.md new file mode 100644 index 000000000000..c0a0e774b90c --- /dev/null +++ b/sdks/python/client/docs/ImageVolumeSource.md @@ -0,0 +1,14 @@ +# ImageVolumeSource + +ImageVolumeSource represents a image volume resource. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pull_policy** | **str** | Policy for pulling OCI objects. Possible values are: Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. | [optional] +**reference** | **str** | Required: Image or artifact reference to be used. Behaves in the same way as pod.spec.containers[*].image. Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. More info: https://kubernetes.io/docs/concepts/containers/images This field is optional to allow higher level config management to default or override container images in workload controllers like Deployments and StatefulSets. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md deleted file mode 100644 index 3a2a7b9c774b..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AMQPEventSource.md +++ /dev/null @@ -1,26 +0,0 @@ -# IoArgoprojEventsV1alpha1AMQPEventSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**consume** | [**IoArgoprojEventsV1alpha1AMQPConsumeConfig**](IoArgoprojEventsV1alpha1AMQPConsumeConfig.md) | | [optional] -**exchange_declare** | [**IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig**](IoArgoprojEventsV1alpha1AMQPExchangeDeclareConfig.md) | | [optional] -**exchange_name** | **str** | | [optional] -**exchange_type** | **str** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**queue_bind** | [**IoArgoprojEventsV1alpha1AMQPQueueBindConfig**](IoArgoprojEventsV1alpha1AMQPQueueBindConfig.md) | | [optional] -**queue_declare** | [**IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig**](IoArgoprojEventsV1alpha1AMQPQueueDeclareConfig.md) | | [optional] -**routing_key** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **str** | | [optional] -**url_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md deleted file mode 100644 index dfdff21498b5..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ArtifactLocation.md +++ /dev/null @@ -1,18 +0,0 @@ -# IoArgoprojEventsV1alpha1ArtifactLocation - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**configmap** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] -**file** | [**IoArgoprojEventsV1alpha1FileArtifact**](IoArgoprojEventsV1alpha1FileArtifact.md) | | [optional] -**git** | [**IoArgoprojEventsV1alpha1GitArtifact**](IoArgoprojEventsV1alpha1GitArtifact.md) | | [optional] -**inline** | **str** | | [optional] -**resource** | [**IoArgoprojEventsV1alpha1Resource**](IoArgoprojEventsV1alpha1Resource.md) | | [optional] -**s3** | [**IoArgoprojEventsV1alpha1S3Artifact**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] -**url** | [**IoArgoprojEventsV1alpha1URLArtifact**](IoArgoprojEventsV1alpha1URLArtifact.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md deleted file mode 100644 index 3ace72346914..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md +++ /dev/null @@ -1,18 +0,0 @@ -# IoArgoprojEventsV1alpha1AzureServiceBusTrigger - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**connection_string** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**queue_name** | **str** | | [optional] -**subscription_name** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic_name** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md deleted file mode 100644 index e164eab042bf..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Backoff.md +++ /dev/null @@ -1,15 +0,0 @@ -# IoArgoprojEventsV1alpha1Backoff - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**duration** | [**IoArgoprojEventsV1alpha1Int64OrString**](IoArgoprojEventsV1alpha1Int64OrString.md) | | [optional] -**factor** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] -**jitter** | [**IoArgoprojEventsV1alpha1Amount**](IoArgoprojEventsV1alpha1Amount.md) | | [optional] -**steps** | **int** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md deleted file mode 100644 index dc850e57011b..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketEventSource.md +++ /dev/null @@ -1,21 +0,0 @@ -# IoArgoprojEventsV1alpha1BitbucketEventSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BitbucketAuth**](IoArgoprojEventsV1alpha1BitbucketAuth.md) | | [optional] -**delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | Events this webhook is subscribed to. | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**owner** | **str** | | [optional] -**project_key** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1BitbucketRepository]**](IoArgoprojEventsV1alpha1BitbucketRepository.md) | | [optional] -**repository_slug** | **str** | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md deleted file mode 100644 index 53eedc2725d6..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerEventSource.md +++ /dev/null @@ -1,23 +0,0 @@ -# IoArgoprojEventsV1alpha1BitbucketServerEventSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**access_token** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**bitbucketserver_base_url** | **str** | | [optional] -**delete_hook_on_finish** | **bool** | | [optional] -**events** | **[str]** | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**project_key** | **str** | | [optional] -**repositories** | [**[IoArgoprojEventsV1alpha1BitbucketServerRepository]**](IoArgoprojEventsV1alpha1BitbucketServerRepository.md) | | [optional] -**repository_slug** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**webhook** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**webhook_secret** | [**SecretKeySelector**](SecretKeySelector.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md deleted file mode 100644 index 86ab4e4b55cf..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1BitbucketServerRepository.md +++ /dev/null @@ -1,13 +0,0 @@ -# IoArgoprojEventsV1alpha1BitbucketServerRepository - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**project_key** | **str** | | [optional] -**repository_slug** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md deleted file mode 100644 index 1e721e1eb847..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1ConditionsResetCriteria.md +++ /dev/null @@ -1,12 +0,0 @@ -# IoArgoprojEventsV1alpha1ConditionsResetCriteria - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**by_time** | [**IoArgoprojEventsV1alpha1ConditionsResetByTime**](IoArgoprojEventsV1alpha1ConditionsResetByTime.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md deleted file mode 100644 index a7d85878c6f0..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventPersistence.md +++ /dev/null @@ -1,13 +0,0 @@ -# IoArgoprojEventsV1alpha1EventPersistence - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**catchup** | [**IoArgoprojEventsV1alpha1CatchupConfiguration**](IoArgoprojEventsV1alpha1CatchupConfiguration.md) | | [optional] -**config_map** | [**IoArgoprojEventsV1alpha1ConfigMapPersistence**](IoArgoprojEventsV1alpha1ConfigMapPersistence.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md deleted file mode 100644 index 060cb041f0a1..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceSpec.md +++ /dev/null @@ -1,46 +0,0 @@ -# IoArgoprojEventsV1alpha1EventSourceSpec - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**amqp** | [**{str: (IoArgoprojEventsV1alpha1AMQPEventSource,)}**](IoArgoprojEventsV1alpha1AMQPEventSource.md) | | [optional] -**azure_events_hub** | [**{str: (IoArgoprojEventsV1alpha1AzureEventsHubEventSource,)}**](IoArgoprojEventsV1alpha1AzureEventsHubEventSource.md) | | [optional] -**azure_queue_storage** | [**{str: (IoArgoprojEventsV1alpha1AzureQueueStorageEventSource,)}**](IoArgoprojEventsV1alpha1AzureQueueStorageEventSource.md) | | [optional] -**azure_service_bus** | [**{str: (IoArgoprojEventsV1alpha1AzureServiceBusEventSource,)}**](IoArgoprojEventsV1alpha1AzureServiceBusEventSource.md) | | [optional] -**bitbucket** | [**{str: (IoArgoprojEventsV1alpha1BitbucketEventSource,)}**](IoArgoprojEventsV1alpha1BitbucketEventSource.md) | | [optional] -**bitbucketserver** | [**{str: (IoArgoprojEventsV1alpha1BitbucketServerEventSource,)}**](IoArgoprojEventsV1alpha1BitbucketServerEventSource.md) | | [optional] -**calendar** | [**{str: (IoArgoprojEventsV1alpha1CalendarEventSource,)}**](IoArgoprojEventsV1alpha1CalendarEventSource.md) | | [optional] -**emitter** | [**{str: (IoArgoprojEventsV1alpha1EmitterEventSource,)}**](IoArgoprojEventsV1alpha1EmitterEventSource.md) | | [optional] -**event_bus_name** | **str** | | [optional] -**file** | [**{str: (IoArgoprojEventsV1alpha1FileEventSource,)}**](IoArgoprojEventsV1alpha1FileEventSource.md) | | [optional] -**generic** | [**{str: (IoArgoprojEventsV1alpha1GenericEventSource,)}**](IoArgoprojEventsV1alpha1GenericEventSource.md) | | [optional] -**gerrit** | [**{str: (IoArgoprojEventsV1alpha1GerritEventSource,)}**](IoArgoprojEventsV1alpha1GerritEventSource.md) | | [optional] -**github** | [**{str: (IoArgoprojEventsV1alpha1GithubEventSource,)}**](IoArgoprojEventsV1alpha1GithubEventSource.md) | | [optional] -**gitlab** | [**{str: (IoArgoprojEventsV1alpha1GitlabEventSource,)}**](IoArgoprojEventsV1alpha1GitlabEventSource.md) | | [optional] -**hdfs** | [**{str: (IoArgoprojEventsV1alpha1HDFSEventSource,)}**](IoArgoprojEventsV1alpha1HDFSEventSource.md) | | [optional] -**kafka** | [**{str: (IoArgoprojEventsV1alpha1KafkaEventSource,)}**](IoArgoprojEventsV1alpha1KafkaEventSource.md) | | [optional] -**minio** | [**{str: (IoArgoprojEventsV1alpha1S3Artifact,)}**](IoArgoprojEventsV1alpha1S3Artifact.md) | | [optional] -**mqtt** | [**{str: (IoArgoprojEventsV1alpha1MQTTEventSource,)}**](IoArgoprojEventsV1alpha1MQTTEventSource.md) | | [optional] -**nats** | [**{str: (IoArgoprojEventsV1alpha1NATSEventsSource,)}**](IoArgoprojEventsV1alpha1NATSEventsSource.md) | | [optional] -**nsq** | [**{str: (IoArgoprojEventsV1alpha1NSQEventSource,)}**](IoArgoprojEventsV1alpha1NSQEventSource.md) | | [optional] -**pub_sub** | [**{str: (IoArgoprojEventsV1alpha1PubSubEventSource,)}**](IoArgoprojEventsV1alpha1PubSubEventSource.md) | | [optional] -**pulsar** | [**{str: (IoArgoprojEventsV1alpha1PulsarEventSource,)}**](IoArgoprojEventsV1alpha1PulsarEventSource.md) | | [optional] -**redis** | [**{str: (IoArgoprojEventsV1alpha1RedisEventSource,)}**](IoArgoprojEventsV1alpha1RedisEventSource.md) | | [optional] -**redis_stream** | [**{str: (IoArgoprojEventsV1alpha1RedisStreamEventSource,)}**](IoArgoprojEventsV1alpha1RedisStreamEventSource.md) | | [optional] -**replicas** | **int** | | [optional] -**resource** | [**{str: (IoArgoprojEventsV1alpha1ResourceEventSource,)}**](IoArgoprojEventsV1alpha1ResourceEventSource.md) | | [optional] -**service** | [**IoArgoprojEventsV1alpha1Service**](IoArgoprojEventsV1alpha1Service.md) | | [optional] -**sftp** | [**{str: (IoArgoprojEventsV1alpha1SFTPEventSource,)}**](IoArgoprojEventsV1alpha1SFTPEventSource.md) | | [optional] -**slack** | [**{str: (IoArgoprojEventsV1alpha1SlackEventSource,)}**](IoArgoprojEventsV1alpha1SlackEventSource.md) | | [optional] -**sns** | [**{str: (IoArgoprojEventsV1alpha1SNSEventSource,)}**](IoArgoprojEventsV1alpha1SNSEventSource.md) | | [optional] -**sqs** | [**{str: (IoArgoprojEventsV1alpha1SQSEventSource,)}**](IoArgoprojEventsV1alpha1SQSEventSource.md) | | [optional] -**storage_grid** | [**{str: (IoArgoprojEventsV1alpha1StorageGridEventSource,)}**](IoArgoprojEventsV1alpha1StorageGridEventSource.md) | | [optional] -**stripe** | [**{str: (IoArgoprojEventsV1alpha1StripeEventSource,)}**](IoArgoprojEventsV1alpha1StripeEventSource.md) | | [optional] -**template** | [**IoArgoprojEventsV1alpha1Template**](IoArgoprojEventsV1alpha1Template.md) | | [optional] -**webhook** | [**{str: (IoArgoprojEventsV1alpha1WebhookEventSource,)}**](IoArgoprojEventsV1alpha1WebhookEventSource.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md deleted file mode 100644 index cc92d67a515a..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1EventSourceStatus.md +++ /dev/null @@ -1,12 +0,0 @@ -# IoArgoprojEventsV1alpha1EventSourceStatus - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**status** | [**IoArgoprojEventsV1alpha1Status**](IoArgoprojEventsV1alpha1Status.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md deleted file mode 100644 index 258bd5459386..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1HTTPTrigger.md +++ /dev/null @@ -1,20 +0,0 @@ -# IoArgoprojEventsV1alpha1HTTPTrigger - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**basic_auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**headers** | **{str: (str,)}** | | [optional] -**method** | **str** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of key-value extracted from event's payload that are applied to the HTTP trigger resource. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**secure_headers** | [**[IoArgoprojEventsV1alpha1SecureHeader]**](IoArgoprojEventsV1alpha1SecureHeader.md) | | [optional] -**timeout** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **str** | URL refers to the URL to send HTTP request to. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md deleted file mode 100644 index 6fe39a9583b2..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1KafkaTrigger.md +++ /dev/null @@ -1,25 +0,0 @@ -# IoArgoprojEventsV1alpha1KafkaTrigger - -KafkaTrigger refers to the specification of the Kafka trigger. - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**compress** | **bool** | | [optional] -**flush_frequency** | **int** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Parameters is the list of parameters that is applied to resolved Kafka trigger object. | [optional] -**partition** | **int** | | [optional] -**partitioning_key** | **str** | The partitioning key for the messages put on the Kafka topic. +optional. | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | Payload is the list of key-value extracted from an event payload to construct the request payload. | [optional] -**required_acks** | **int** | RequiredAcks used in producer to tell the broker how many replica acknowledgements Defaults to 1 (Only wait for the leader to ack). +optional. | [optional] -**sasl** | [**IoArgoprojEventsV1alpha1SASLConfig**](IoArgoprojEventsV1alpha1SASLConfig.md) | | [optional] -**schema_registry** | [**IoArgoprojEventsV1alpha1SchemaRegistryConfig**](IoArgoprojEventsV1alpha1SchemaRegistryConfig.md) | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **str** | | [optional] -**url** | **str** | URL of the Kafka broker, multiple URLs separated by comma. | [optional] -**version** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md deleted file mode 100644 index 5c9b08b700d9..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1MQTTEventSource.md +++ /dev/null @@ -1,20 +0,0 @@ -# IoArgoprojEventsV1alpha1MQTTEventSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1BasicAuth**](IoArgoprojEventsV1alpha1BasicAuth.md) | | [optional] -**client_id** | **str** | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**topic** | **str** | | [optional] -**url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md deleted file mode 100644 index 7b3ee4432c5d..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSEventsSource.md +++ /dev/null @@ -1,19 +0,0 @@ -# IoArgoprojEventsV1alpha1NATSEventsSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**auth** | [**IoArgoprojEventsV1alpha1NATSAuth**](IoArgoprojEventsV1alpha1NATSAuth.md) | | [optional] -**connection_backoff** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**json_body** | **bool** | | [optional] -**metadata** | **{str: (str,)}** | | [optional] -**subject** | **str** | | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **str** | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md deleted file mode 100644 index 9259dfaa33b9..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1NATSTrigger.md +++ /dev/null @@ -1,17 +0,0 @@ -# IoArgoprojEventsV1alpha1NATSTrigger - -NATSTrigger refers to the specification of the NATS trigger. - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**payload** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**subject** | **str** | Name of the subject to put message on. | [optional] -**tls** | [**IoArgoprojEventsV1alpha1TLSConfig**](IoArgoprojEventsV1alpha1TLSConfig.md) | | [optional] -**url** | **str** | URL of the NATS cluster. | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md deleted file mode 100644 index 958a9dd01cf1..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1Trigger.md +++ /dev/null @@ -1,17 +0,0 @@ -# IoArgoprojEventsV1alpha1Trigger - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**at_least_once** | **bool** | | [optional] -**parameters** | [**[IoArgoprojEventsV1alpha1TriggerParameter]**](IoArgoprojEventsV1alpha1TriggerParameter.md) | | [optional] -**policy** | [**IoArgoprojEventsV1alpha1TriggerPolicy**](IoArgoprojEventsV1alpha1TriggerPolicy.md) | | [optional] -**rate_limit** | [**IoArgoprojEventsV1alpha1RateLimit**](IoArgoprojEventsV1alpha1RateLimit.md) | | [optional] -**retry_strategy** | [**IoArgoprojEventsV1alpha1Backoff**](IoArgoprojEventsV1alpha1Backoff.md) | | [optional] -**template** | [**IoArgoprojEventsV1alpha1TriggerTemplate**](IoArgoprojEventsV1alpha1TriggerTemplate.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md deleted file mode 100644 index 486b013ab84d..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerPolicy.md +++ /dev/null @@ -1,13 +0,0 @@ -# IoArgoprojEventsV1alpha1TriggerPolicy - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**k8s** | [**IoArgoprojEventsV1alpha1K8SResourcePolicy**](IoArgoprojEventsV1alpha1K8SResourcePolicy.md) | | [optional] -**status** | [**IoArgoprojEventsV1alpha1StatusPolicy**](IoArgoprojEventsV1alpha1StatusPolicy.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md deleted file mode 100644 index 8fa1fb8a116c..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1TriggerTemplate.md +++ /dev/null @@ -1,29 +0,0 @@ -# IoArgoprojEventsV1alpha1TriggerTemplate - -TriggerTemplate is the template that describes trigger specification. - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**argo_workflow** | [**IoArgoprojEventsV1alpha1ArgoWorkflowTrigger**](IoArgoprojEventsV1alpha1ArgoWorkflowTrigger.md) | | [optional] -**aws_lambda** | [**IoArgoprojEventsV1alpha1AWSLambdaTrigger**](IoArgoprojEventsV1alpha1AWSLambdaTrigger.md) | | [optional] -**azure_event_hubs** | [**IoArgoprojEventsV1alpha1AzureEventHubsTrigger**](IoArgoprojEventsV1alpha1AzureEventHubsTrigger.md) | | [optional] -**azure_service_bus** | [**IoArgoprojEventsV1alpha1AzureServiceBusTrigger**](IoArgoprojEventsV1alpha1AzureServiceBusTrigger.md) | | [optional] -**conditions** | **str** | | [optional] -**conditions_reset** | [**[IoArgoprojEventsV1alpha1ConditionsResetCriteria]**](IoArgoprojEventsV1alpha1ConditionsResetCriteria.md) | | [optional] -**custom** | [**IoArgoprojEventsV1alpha1CustomTrigger**](IoArgoprojEventsV1alpha1CustomTrigger.md) | | [optional] -**email** | [**IoArgoprojEventsV1alpha1EmailTrigger**](IoArgoprojEventsV1alpha1EmailTrigger.md) | | [optional] -**http** | [**IoArgoprojEventsV1alpha1HTTPTrigger**](IoArgoprojEventsV1alpha1HTTPTrigger.md) | | [optional] -**k8s** | [**IoArgoprojEventsV1alpha1StandardK8STrigger**](IoArgoprojEventsV1alpha1StandardK8STrigger.md) | | [optional] -**kafka** | [**IoArgoprojEventsV1alpha1KafkaTrigger**](IoArgoprojEventsV1alpha1KafkaTrigger.md) | | [optional] -**log** | [**IoArgoprojEventsV1alpha1LogTrigger**](IoArgoprojEventsV1alpha1LogTrigger.md) | | [optional] -**name** | **str** | Name is a unique name of the action to take. | [optional] -**nats** | [**IoArgoprojEventsV1alpha1NATSTrigger**](IoArgoprojEventsV1alpha1NATSTrigger.md) | | [optional] -**open_whisk** | [**IoArgoprojEventsV1alpha1OpenWhiskTrigger**](IoArgoprojEventsV1alpha1OpenWhiskTrigger.md) | | [optional] -**pulsar** | [**IoArgoprojEventsV1alpha1PulsarTrigger**](IoArgoprojEventsV1alpha1PulsarTrigger.md) | | [optional] -**slack** | [**IoArgoprojEventsV1alpha1SlackTrigger**](IoArgoprojEventsV1alpha1SlackTrigger.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md b/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md deleted file mode 100644 index be3c1b1659a2..000000000000 --- a/sdks/python/client/docs/IoArgoprojEventsV1alpha1WebhookEventSource.md +++ /dev/null @@ -1,13 +0,0 @@ -# IoArgoprojEventsV1alpha1WebhookEventSource - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**filter** | [**IoArgoprojEventsV1alpha1EventSourceFilter**](IoArgoprojEventsV1alpha1EventSourceFilter.md) | | [optional] -**webhook_context** | [**IoArgoprojEventsV1alpha1WebhookContext**](IoArgoprojEventsV1alpha1WebhookContext.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/sdks/python/client/docs/PersistentVolumeClaimCondition.md b/sdks/python/client/docs/PersistentVolumeClaimCondition.md index feeed8a9f3ec..e98443931d8f 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimCondition.md +++ b/sdks/python/client/docs/PersistentVolumeClaimCondition.md @@ -5,8 +5,8 @@ PersistentVolumeClaimCondition contains details about state of pvc ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**status** | **str** | | -**type** | **str** | | +**status** | **str** | Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=state%20of%20pvc-,conditions.status,-(string)%2C%20required | +**type** | **str** | Type is the type of the condition. More info: https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#:~:text=set%20to%20%27ResizeStarted%27.-,PersistentVolumeClaimCondition,-contains%20details%20about | **last_probe_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **last_transition_time** | **datetime** | Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers. | [optional] **message** | **str** | message is the human-readable message indicating details about last transition. | [optional] diff --git a/sdks/python/client/docs/PersistentVolumeClaimSpec.md b/sdks/python/client/docs/PersistentVolumeClaimSpec.md index 12318842b292..849fd1b36e09 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimSpec.md +++ b/sdks/python/client/docs/PersistentVolumeClaimSpec.md @@ -11,7 +11,7 @@ Name | Type | Description | Notes **resources** | [**VolumeResourceRequirements**](VolumeResourceRequirements.md) | | [optional] **selector** | [**LabelSelector**](LabelSelector.md) | | [optional] **storage_class_name** | **str** | storageClassName is the name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 | [optional] -**volume_attributes_class_name** | **str** | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled. | [optional] +**volume_attributes_class_name** | **str** | volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. If specified, the CSI driver will create or update the volume with the attributes defined in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass will be applied to the claim but it's not allowed to reset this field to empty string once it is set. If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass will be set by the persistentvolume controller if it exists. If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource exists. More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default). | [optional] **volume_mode** | **str** | volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec. | [optional] **volume_name** | **str** | volumeName is the binding reference to the PersistentVolume backing this claim. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/PersistentVolumeClaimStatus.md b/sdks/python/client/docs/PersistentVolumeClaimStatus.md index f88c6ddb88e8..d44f987f6edc 100644 --- a/sdks/python/client/docs/PersistentVolumeClaimStatus.md +++ b/sdks/python/client/docs/PersistentVolumeClaimStatus.md @@ -10,7 +10,7 @@ Name | Type | Description | Notes **allocated_resources** | **{str: (str,)}** | allocatedResources tracks the resources allocated to a PVC including its capacity. Key names follow standard Kubernetes label syntax. Valid values are either: * Un-prefixed keys: - storage - the capacity of the volume. * Custom resources must use implementation-defined prefixed names such as \"example.com/my-custom-resource\" Apart from above values - keys that are unprefixed or have kubernetes.io prefix are considered reserved and hence may not be used. Capacity reported here may be larger than the actual capacity when a volume expansion operation is requested. For storage quota, the larger value from allocatedResources and PVC.spec.resources is used. If allocatedResources is not set, PVC.spec.resources alone is used for quota calculation. If a volume expansion capacity request is lowered, allocatedResources is only lowered if there are no expansion operations in progress and if the actual volume capacity is equal or lower than the requested capacity. A controller that receives PVC update with previously unknown resourceName should ignore the update for the purpose it was designed. For example - a controller that only is responsible for resizing capacity of the volume, should ignore PVC updates that change other valid resources associated with PVC. This is an alpha field and requires enabling RecoverVolumeExpansionFailure feature. | [optional] **capacity** | **{str: (str,)}** | capacity represents the actual resources of the underlying volume. | [optional] **conditions** | [**[PersistentVolumeClaimCondition]**](PersistentVolumeClaimCondition.md) | conditions is the current Condition of persistent volume claim. If underlying persistent volume is being resized then the Condition will be set to 'Resizing'. | [optional] -**current_volume_attributes_class_name** | **str** | currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is an alpha field and requires enabling VolumeAttributesClass feature. | [optional] +**current_volume_attributes_class_name** | **str** | currentVolumeAttributesClassName is the current name of the VolumeAttributesClass the PVC is using. When unset, there is no VolumeAttributeClass applied to this PersistentVolumeClaim This is a beta field and requires enabling VolumeAttributesClass feature (off by default). | [optional] **modify_volume_status** | [**ModifyVolumeStatus**](ModifyVolumeStatus.md) | | [optional] **phase** | **str** | phase represents the current phase of PersistentVolumeClaim. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/PodAffinityTerm.md b/sdks/python/client/docs/PodAffinityTerm.md index 71f0c6ecb309..3127328bac46 100644 --- a/sdks/python/client/docs/PodAffinityTerm.md +++ b/sdks/python/client/docs/PodAffinityTerm.md @@ -7,8 +7,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **topology_key** | **str** | This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. | **label_selector** | [**LabelSelector**](LabelSelector.md) | | [optional] -**match_label_keys** | **[str]** | MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. | [optional] -**mismatch_label_keys** | **[str]** | MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. | [optional] +**match_label_keys** | **[str]** | MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both matchLabelKeys and labelSelector. Also, matchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). | [optional] +**mismatch_label_keys** | **[str]** | MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both mismatchLabelKeys and labelSelector. Also, mismatchLabelKeys cannot be set when labelSelector isn't set. This is a beta field and requires enabling MatchLabelKeysInPodAffinity feature gate (enabled by default). | [optional] **namespace_selector** | [**LabelSelector**](LabelSelector.md) | | [optional] **namespaces** | **[str]** | namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means \"this pod's namespace\". | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/PodDNSConfigOption.md b/sdks/python/client/docs/PodDNSConfigOption.md index 987dd82b2051..9cfe6d3dbd14 100644 --- a/sdks/python/client/docs/PodDNSConfigOption.md +++ b/sdks/python/client/docs/PodDNSConfigOption.md @@ -5,8 +5,8 @@ PodDNSConfigOption defines DNS resolver options of a pod. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**name** | **str** | Required. | [optional] -**value** | **str** | | [optional] +**name** | **str** | Name is this DNS resolver option's name. Required. | [optional] +**value** | **str** | Value is this DNS resolver option's value. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/PodSecurityContext.md b/sdks/python/client/docs/PodSecurityContext.md index ec2eef1e604c..5f3ec0d9ab5d 100644 --- a/sdks/python/client/docs/PodSecurityContext.md +++ b/sdks/python/client/docs/PodSecurityContext.md @@ -11,9 +11,11 @@ Name | Type | Description | Notes **run_as_group** | **int** | The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows. | [optional] **run_as_non_root** | **bool** | Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. | [optional] **run_as_user** | **int** | The UID to run the entrypoint of the container process. Defaults to user specified in image metadata if unspecified. May also be set in SecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence for that container. Note that this field cannot be set when spec.os.name is windows. | [optional] +**se_linux_change_policy** | **str** | seLinuxChangePolicy defines how the container's SELinux label is applied to all volumes used by the Pod. It has no effect on nodes that do not support SELinux or to volumes does not support SELinux. Valid values are \"MountOption\" and \"Recursive\". \"Recursive\" means relabeling of all files on all Pod volumes by the container runtime. This may be slow for large volumes, but allows mixing privileged and unprivileged Pods sharing the same volume on the same node. \"MountOption\" mounts all eligible Pod volumes with `-o context` mount option. This requires all Pods that share the same volume to use the same SELinux label. It is not possible to share the same volume among privileged and unprivileged Pods. Eligible volumes are in-tree FibreChannel and iSCSI volumes, and all CSI volumes whose CSI driver announces SELinux support by setting spec.seLinuxMount: true in their CSIDriver instance. Other volumes are always re-labelled recursively. \"MountOption\" value is allowed only when SELinuxMount feature gate is enabled. If not specified and SELinuxMount feature gate is enabled, \"MountOption\" is used. If not specified and SELinuxMount feature gate is disabled, \"MountOption\" is used for ReadWriteOncePod volumes and \"Recursive\" for all other volumes. This field affects only Pods that have SELinux label set, either in PodSecurityContext or in SecurityContext of all containers. All Pods that use the same volume should use the same seLinuxChangePolicy, otherwise some pods can get stuck in ContainerCreating state. Note that this field cannot be set when spec.os.name is windows. | [optional] **se_linux_options** | [**SELinuxOptions**](SELinuxOptions.md) | | [optional] **seccomp_profile** | [**SeccompProfile**](SeccompProfile.md) | | [optional] -**supplemental_groups** | **[int]** | A list of groups applied to the first process run in each container, in addition to the container's primary GID, the fsGroup (if specified), and group memberships defined in the container image for the uid of the container process. If unspecified, no additional groups are added to any container. Note that group memberships defined in the container image for the uid of the container process are still effective, even if they are not included in this list. Note that this field cannot be set when spec.os.name is windows. | [optional] +**supplemental_groups** | **[int]** | A list of groups applied to the first process run in each container, in addition to the container's primary GID and fsGroup (if specified). If the SupplementalGroupsPolicy feature is enabled, the supplementalGroupsPolicy field determines whether these are in addition to or instead of any group memberships defined in the container image. If unspecified, no additional groups are added, though group memberships defined in the container image may still be used, depending on the supplementalGroupsPolicy field. Note that this field cannot be set when spec.os.name is windows. | [optional] +**supplemental_groups_policy** | **str** | Defines how supplemental groups of the first container processes are calculated. Valid values are \"Merge\" and \"Strict\". If not specified, \"Merge\" is used. (Alpha) Using the field requires the SupplementalGroupsPolicy feature gate to be enabled and the container runtime must implement support for this feature. Note that this field cannot be set when spec.os.name is windows. | [optional] **sysctls** | [**[Sysctl]**](Sysctl.md) | Sysctls hold a list of namespaced sysctls used for the pod. Pods with unsupported sysctls (by the container runtime) might fail to launch. Note that this field cannot be set when spec.os.name is windows. | [optional] **windows_options** | [**WindowsSecurityContextOptions**](WindowsSecurityContextOptions.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/ProjectedVolumeSource.md b/sdks/python/client/docs/ProjectedVolumeSource.md index 885e63456b66..bd62a9ce1914 100644 --- a/sdks/python/client/docs/ProjectedVolumeSource.md +++ b/sdks/python/client/docs/ProjectedVolumeSource.md @@ -6,7 +6,7 @@ Represents a projected volume source Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **default_mode** | **int** | defaultMode are the mode bits used to set permissions on created files by default. Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. | [optional] -**sources** | [**[VolumeProjection]**](VolumeProjection.md) | sources is the list of volume projections | [optional] +**sources** | [**[VolumeProjection]**](VolumeProjection.md) | sources is the list of volume projections. Each entry in this list handles one source. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/ResourceClaim.md b/sdks/python/client/docs/ResourceClaim.md index 6bc50fcf05db..c1bd0014c2f4 100644 --- a/sdks/python/client/docs/ResourceClaim.md +++ b/sdks/python/client/docs/ResourceClaim.md @@ -6,6 +6,7 @@ ResourceClaim references one entry in PodSpec.ResourceClaims. Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | Name must match the name of one entry in pod.spec.resourceClaims of the Pod where this field is used. It makes that resource available inside a container. | +**request** | **str** | Request is the name chosen for a request in the referenced claim. If empty, everything from the claim is made available, otherwise only the result of this request. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SecurityContext.md b/sdks/python/client/docs/SecurityContext.md index 2fbe87da9e26..15fb56c82e46 100644 --- a/sdks/python/client/docs/SecurityContext.md +++ b/sdks/python/client/docs/SecurityContext.md @@ -9,7 +9,7 @@ Name | Type | Description | Notes **app_armor_profile** | [**AppArmorProfile**](AppArmorProfile.md) | | [optional] **capabilities** | [**Capabilities**](Capabilities.md) | | [optional] **privileged** | **bool** | Run container in privileged mode. Processes in privileged containers are essentially equivalent to root on the host. Defaults to false. Note that this field cannot be set when spec.os.name is windows. | [optional] -**proc_mount** | **str** | procMount denotes the type of proc mount to use for the containers. The default is DefaultProcMount which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows. | [optional] +**proc_mount** | **str** | procMount denotes the type of proc mount to use for the containers. The default value is Default which uses the container runtime defaults for readonly paths and masked paths. This requires the ProcMountType feature flag to be enabled. Note that this field cannot be set when spec.os.name is windows. | [optional] **read_only_root_filesystem** | **bool** | Whether this container has a read-only root filesystem. Default is false. Note that this field cannot be set when spec.os.name is windows. | [optional] **run_as_group** | **int** | The GID to run the entrypoint of the container process. Uses runtime default if unset. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. Note that this field cannot be set when spec.os.name is windows. | [optional] **run_as_non_root** | **bool** | Indicates that the container must run as a non-root user. If true, the Kubelet will validate the image at runtime to ensure that it does not run as UID 0 (root) and fail to start the container if it does. If unset or false, no such validation will be performed. May also be set in PodSecurityContext. If set in both SecurityContext and PodSecurityContext, the value specified in SecurityContext takes precedence. | [optional] diff --git a/sdks/python/client/docs/SensorCreateSensorRequest.md b/sdks/python/client/docs/SensorCreateSensorRequest.md index 972dcf8a3a83..f422a9b61c48 100644 --- a/sdks/python/client/docs/SensorCreateSensorRequest.md +++ b/sdks/python/client/docs/SensorCreateSensorRequest.md @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **create_options** | [**CreateOptions**](CreateOptions.md) | | [optional] **namespace** | **str** | | [optional] -**sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**sensor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/SensorSensorWatchEvent.md b/sdks/python/client/docs/SensorSensorWatchEvent.md index 5b7bd03a4de0..a6eb7347cfa7 100644 --- a/sdks/python/client/docs/SensorSensorWatchEvent.md +++ b/sdks/python/client/docs/SensorSensorWatchEvent.md @@ -4,7 +4,7 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**object** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**object** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **type** | **str** | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/sdks/python/client/docs/SensorServiceApi.md b/sdks/python/client/docs/SensorServiceApi.md index 069c99c5f3b4..2f63d732b967 100644 --- a/sdks/python/client/docs/SensorServiceApi.md +++ b/sdks/python/client/docs/SensorServiceApi.md @@ -14,7 +14,7 @@ Method | HTTP request | Description # **create_sensor** -> IoArgoprojEventsV1alpha1Sensor create_sensor(namespace, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor create_sensor(namespace, body) @@ -27,7 +27,7 @@ import time import argo_workflows from argo_workflows.api import sensor_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor from argo_workflows.model.sensor_create_sensor_request import SensorCreateSensorRequest from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 @@ -61,7 +61,7 @@ with argo_workflows.ApiClient(configuration) as api_client: field_validation="field_validation_example", ), namespace="namespace_example", - sensor=IoArgoprojEventsV1alpha1Sensor( + sensor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor( metadata=ObjectMeta( annotations={ "key": "key_example", @@ -104,13 +104,13 @@ with argo_workflows.ApiClient(configuration) as api_client: self_link="self_link_example", uid="uid_example", ), - spec=IoArgoprojEventsV1alpha1SensorSpec( + spec=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec( dependencies=[ - IoArgoprojEventsV1alpha1EventDependency( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency( event_name="event_name_example", event_source_name="event_source_name_example", - filters=IoArgoprojEventsV1alpha1EventDependencyFilter( - context=IoArgoprojEventsV1alpha1EventContext( + filters=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter( + context=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext( datacontenttype="datacontenttype_example", id="id_example", source="source_example", @@ -120,7 +120,7 @@ with argo_workflows.ApiClient(configuration) as api_client: type="type_example", ), data=[ - IoArgoprojEventsV1alpha1DataFilter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter( comparator="comparator_example", path="path_example", template="template_example", @@ -133,10 +133,10 @@ with argo_workflows.ApiClient(configuration) as api_client: data_logical_operator="data_logical_operator_example", expr_logical_operator="expr_logical_operator_example", exprs=[ - IoArgoprojEventsV1alpha1ExprFilter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter( expr="expr_example", fields=[ - IoArgoprojEventsV1alpha1PayloadField( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField( name="name_example", path="path_example", ), @@ -144,14 +144,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], script="script_example", - time=IoArgoprojEventsV1alpha1TimeFilter( + time=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter( start="start_example", stop="stop_example", ), ), filters_logical_operator="filters_logical_operator_example", name="name_example", - transform=IoArgoprojEventsV1alpha1EventDependencyTransformer( + transform=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer( jq="jq_example", script="script_example", ), @@ -164,7 +164,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, replicas=1, revision_history_limit=1, - template=IoArgoprojEventsV1alpha1Template( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template( affinity=Affinity( node_affinity=NodeAffinity( preferred_during_scheduling_ignored_during_execution=[ @@ -396,13 +396,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], + container=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container( env=[ EnvVar( name="name_example", @@ -443,148 +437,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - image="image_example", image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="protocol_example", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resize_policy=[ - ContainerResizePolicy( - resource_name="resource_name_example", - restart_policy="restart_policy_example", - ), - ], resources=ResourceRequirements( claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -594,7 +452,6 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - restart_policy="restart_policy_example", security_context=SecurityContext( allow_privilege_escalation=True, app_armor_profile=AppArmorProfile( @@ -632,50 +489,6 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_user_name="run_as_user_name_example", ), ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], volume_mounts=[ VolumeMount( mount_path="mount_path_example", @@ -687,14 +500,13 @@ with argo_workflows.ApiClient(configuration) as api_client: sub_path_expr="sub_path_expr_example", ), ], - working_dir="working_dir_example", ), image_pull_secrets=[ LocalObjectReference( name="name_example", ), ], - metadata=IoArgoprojEventsV1alpha1Metadata( + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( annotations={ "key": "key_example", }, @@ -717,6 +529,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -730,6 +543,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -973,6 +787,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -1147,13 +965,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), triggers=[ - IoArgoprojEventsV1alpha1Trigger( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger( at_least_once=True, + dlq_trigger=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger(), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1164,18 +983,18 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - policy=IoArgoprojEventsV1alpha1TriggerPolicy( - k8s=IoArgoprojEventsV1alpha1K8SResourcePolicy( - backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + policy=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy( + k8s=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy( + backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, @@ -1185,41 +1004,41 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - status=IoArgoprojEventsV1alpha1StatusPolicy( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy( allow=[ 1, ], ), ), - rate_limit=IoArgoprojEventsV1alpha1RateLimit( + rate_limit=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit( requests_per_unit=1, unit="unit_example", ), - retry_strategy=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + retry_strategy=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - template=IoArgoprojEventsV1alpha1TriggerTemplate( - argo_workflow=IoArgoprojEventsV1alpha1ArgoWorkflowTrigger( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate( + argo_workflow=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger( args=[ "args_example", ], operation="operation_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1230,19 +1049,19 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - source=IoArgoprojEventsV1alpha1ArtifactLocation( + source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation( configmap=ConfigMapKeySelector( key="key_example", name="name_example", optional=True, ), - file=IoArgoprojEventsV1alpha1FileArtifact( + file=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact( path="path_example", ), - git=IoArgoprojEventsV1alpha1GitArtifact( + git=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact( branch="branch_example", clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( + creds=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds( password=SecretKeySelector( key="key_example", name="name_example", @@ -1257,7 +1076,7 @@ with argo_workflows.ApiClient(configuration) as api_client: file_path="file_path_example", insecure_ignore_host_key=True, ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( + remote=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig( name="name_example", urls=[ "urls_example", @@ -1272,16 +1091,16 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", ), inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( + resource=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource( value='YQ==', ), - s3=IoArgoprojEventsV1alpha1S3Artifact( + s3=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -1294,7 +1113,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -1309,13 +1128,13 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - url=IoArgoprojEventsV1alpha1URLArtifact( + url=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact( path="path_example", verify_cert=True, ), ), ), - aws_lambda=IoArgoprojEventsV1alpha1AWSLambdaTrigger( + aws_lambda=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger( access_key=SecretKeySelector( key="key_example", name="name_example", @@ -1324,10 +1143,10 @@ with argo_workflows.ApiClient(configuration) as api_client: function_name="function_name_example", invocation_type="invocation_type_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1339,10 +1158,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1361,14 +1180,14 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - azure_event_hubs=IoArgoprojEventsV1alpha1AzureEventHubsTrigger( + azure_event_hubs=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger( fqdn="fqdn_example", hub_name="hub_name_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1380,10 +1199,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1405,17 +1224,17 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - azure_service_bus=IoArgoprojEventsV1alpha1AzureServiceBusTrigger( + azure_service_bus=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger( connection_string=SecretKeySelector( key="key_example", name="name_example", optional=True, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1427,10 +1246,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1443,7 +1262,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], queue_name="queue_name_example", subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1465,24 +1284,24 @@ with argo_workflows.ApiClient(configuration) as api_client: ), conditions="conditions_example", conditions_reset=[ - IoArgoprojEventsV1alpha1ConditionsResetCriteria( - by_time=IoArgoprojEventsV1alpha1ConditionsResetByTime( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria( + by_time=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime( cron="cron_example", timezone="timezone_example", ), ), ], - custom=IoArgoprojEventsV1alpha1CustomTrigger( + custom=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger( cert_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1494,10 +1313,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1515,15 +1334,15 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - email=IoArgoprojEventsV1alpha1EmailTrigger( + email=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger( body="body_example", _from="_from_example", host="host_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1546,8 +1365,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ], username="username_example", ), - http=IoArgoprojEventsV1alpha1HTTPTrigger( - basic_auth=IoArgoprojEventsV1alpha1BasicAuth( + http=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger( + basic_auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -1564,10 +1383,10 @@ with argo_workflows.ApiClient(configuration) as api_client: }, method="method_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1579,10 +1398,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1594,9 +1413,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], secure_headers=[ - IoArgoprojEventsV1alpha1SecureHeader( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader( name="name_example", - value_from=IoArgoprojEventsV1alpha1ValueFromSource( + value_from=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource( config_map_key_ref=ConfigMapKeySelector( key="key_example", name="name_example", @@ -1611,7 +1430,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], timeout="timeout_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1631,14 +1450,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), url="url_example", ), - k8s=IoArgoprojEventsV1alpha1StandardK8STrigger( + k8s=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger( live_object=True, operation="operation_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1650,19 +1469,19 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], patch_strategy="patch_strategy_example", - source=IoArgoprojEventsV1alpha1ArtifactLocation( + source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation( configmap=ConfigMapKeySelector( key="key_example", name="name_example", optional=True, ), - file=IoArgoprojEventsV1alpha1FileArtifact( + file=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact( path="path_example", ), - git=IoArgoprojEventsV1alpha1GitArtifact( + git=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact( branch="branch_example", clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( + creds=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds( password=SecretKeySelector( key="key_example", name="name_example", @@ -1677,7 +1496,7 @@ with argo_workflows.ApiClient(configuration) as api_client: file_path="file_path_example", insecure_ignore_host_key=True, ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( + remote=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig( name="name_example", urls=[ "urls_example", @@ -1692,16 +1511,16 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", ), inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( + resource=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource( value='YQ==', ), - s3=IoArgoprojEventsV1alpha1S3Artifact( + s3=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -1714,7 +1533,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -1729,20 +1548,23 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - url=IoArgoprojEventsV1alpha1URLArtifact( + url=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact( path="path_example", verify_cert=True, ), ), ), - kafka=IoArgoprojEventsV1alpha1KafkaTrigger( + kafka=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger( compress=True, flush_frequency=1, + headers={ + "key": "key_example", + }, parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1756,10 +1578,10 @@ with argo_workflows.ApiClient(configuration) as api_client: partition=1, partitioning_key="partitioning_key_example", payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1771,7 +1593,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], required_acks=1, - sasl=IoArgoprojEventsV1alpha1SASLConfig( + sasl=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig( mechanism="mechanism_example", password_secret=SecretKeySelector( key="key_example", @@ -1784,8 +1606,8 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - schema_registry=IoArgoprojEventsV1alpha1SchemaRegistryConfig( - auth=IoArgoprojEventsV1alpha1BasicAuth( + schema_registry=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -1800,7 +1622,24 @@ with argo_workflows.ApiClient(configuration) as api_client: schema_id=1, url="url_example", ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + secure_headers=[ + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader( + name="name_example", + value_from=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1822,16 +1661,45 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", version="version_example", ), - log=IoArgoprojEventsV1alpha1LogTrigger( + log=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger( interval_seconds="interval_seconds_example", ), name="name_example", - nats=IoArgoprojEventsV1alpha1NATSTrigger( + nats=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( + password=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + credential=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + nkey=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + token=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1843,10 +1711,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1858,7 +1726,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -1878,7 +1746,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), url="url_example", ), - open_whisk=IoArgoprojEventsV1alpha1OpenWhiskTrigger( + open_whisk=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger( action_name="action_name_example", auth_token=SecretKeySelector( key="key_example", @@ -1888,10 +1756,10 @@ with argo_workflows.ApiClient(configuration) as api_client: host="host_example", namespace="namespace_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1903,10 +1771,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1919,7 +1787,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], version="version_example", ), - pulsar=IoArgoprojEventsV1alpha1PulsarTrigger( + pulsar=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger( auth_athenz_params={ "key": "key_example", }, @@ -1933,25 +1801,25 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1963,10 +1831,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -1977,7 +1845,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -2005,16 +1873,16 @@ with argo_workflows.ApiClient(configuration) as api_client: topic="topic_example", url="url_example", ), - slack=IoArgoprojEventsV1alpha1SlackTrigger( + slack=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger( attachments="attachments_example", blocks="blocks_example", channel="channel_example", message="message_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -2025,7 +1893,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - sender=IoArgoprojEventsV1alpha1SlackSender( + sender=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender( icon="icon_example", username="username_example", ), @@ -2034,7 +1902,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - thread=IoArgoprojEventsV1alpha1SlackThread( + thread=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread( broadcast_message_to_channel=True, message_aggregation_key="message_aggregation_key_example", ), @@ -2043,10 +1911,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], ), - status=IoArgoprojEventsV1alpha1SensorStatus( - status=IoArgoprojEventsV1alpha1Status( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status( conditions=[ - IoArgoprojEventsV1alpha1Condition( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition( last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), message="message_example", reason="reason_example", @@ -2077,7 +1945,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization @@ -2144,6 +2012,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) # example passing only required values which don't have defaults set try: @@ -2155,7 +2024,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_sensor(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_sensor(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling SensorServiceApi->delete_sensor: %s\n" % e) @@ -2174,6 +2043,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -2199,7 +2069,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **get_sensor** -> IoArgoprojEventsV1alpha1Sensor get_sensor(namespace, name) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor get_sensor(namespace, name) @@ -2212,7 +2082,7 @@ import time import argo_workflows from argo_workflows.api import sensor_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -2266,7 +2136,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization @@ -2288,7 +2158,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **list_sensors** -> IoArgoprojEventsV1alpha1SensorList list_sensors(namespace) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList list_sensors(namespace) @@ -2301,7 +2171,7 @@ import time import argo_workflows from argo_workflows.api import sensor_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor_list import IoArgoprojEventsV1alpha1SensorList +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor_list import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 # See configuration.py for a list of all supported configuration parameters. @@ -2371,7 +2241,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1SensorList**](IoArgoprojEventsV1alpha1SensorList.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorList.md) ### Authorization @@ -2440,9 +2310,10 @@ with argo_workflows.ApiClient(configuration) as api_client: pod_log_options_since_time_seconds = "podLogOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) pod_log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) pod_log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + pod_log_options_tail_lines = "podLogOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. (optional) pod_log_options_limit_bytes = "podLogOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) pod_log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) + pod_log_options_stream = "podLogOptions.stream_example" # str | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. (optional) # example passing only required values which don't have defaults set try: @@ -2454,7 +2325,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.sensors_logs(namespace, name=name, trigger_name=trigger_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend) + api_response = api_instance.sensors_logs(namespace, name=name, trigger_name=trigger_name, grep=grep, pod_log_options_container=pod_log_options_container, pod_log_options_follow=pod_log_options_follow, pod_log_options_previous=pod_log_options_previous, pod_log_options_since_seconds=pod_log_options_since_seconds, pod_log_options_since_time_seconds=pod_log_options_since_time_seconds, pod_log_options_since_time_nanos=pod_log_options_since_time_nanos, pod_log_options_timestamps=pod_log_options_timestamps, pod_log_options_tail_lines=pod_log_options_tail_lines, pod_log_options_limit_bytes=pod_log_options_limit_bytes, pod_log_options_insecure_skip_tls_verify_backend=pod_log_options_insecure_skip_tls_verify_backend, pod_log_options_stream=pod_log_options_stream) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling SensorServiceApi->sensors_logs: %s\n" % e) @@ -2476,9 +2347,10 @@ Name | Type | Description | Notes **pod_log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **pod_log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **pod_log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **pod_log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **pod_log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **pod_log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **pod_log_options_stream** | **str**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] ### Return type @@ -2504,7 +2376,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **update_sensor** -> IoArgoprojEventsV1alpha1Sensor update_sensor(namespace, name, body) +> GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor update_sensor(namespace, name, body) @@ -2517,7 +2389,7 @@ import time import argo_workflows from argo_workflows.api import sensor_service_api from argo_workflows.model.grpc_gateway_runtime_error import GrpcGatewayRuntimeError -from argo_workflows.model.io_argoproj_events_v1alpha1_sensor import IoArgoprojEventsV1alpha1Sensor +from argo_workflows.model.github_com_argoproj_argo_events_pkg_apis_events_v1alpha1_sensor import GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor from argo_workflows.model.sensor_update_sensor_request import SensorUpdateSensorRequest from pprint import pprint # Defining the host is optional and defaults to http://localhost:2746 @@ -2546,7 +2418,7 @@ with argo_workflows.ApiClient(configuration) as api_client: body = SensorUpdateSensorRequest( name="name_example", namespace="namespace_example", - sensor=IoArgoprojEventsV1alpha1Sensor( + sensor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor( metadata=ObjectMeta( annotations={ "key": "key_example", @@ -2589,13 +2461,13 @@ with argo_workflows.ApiClient(configuration) as api_client: self_link="self_link_example", uid="uid_example", ), - spec=IoArgoprojEventsV1alpha1SensorSpec( + spec=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorSpec( dependencies=[ - IoArgoprojEventsV1alpha1EventDependency( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependency( event_name="event_name_example", event_source_name="event_source_name_example", - filters=IoArgoprojEventsV1alpha1EventDependencyFilter( - context=IoArgoprojEventsV1alpha1EventContext( + filters=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyFilter( + context=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventContext( datacontenttype="datacontenttype_example", id="id_example", source="source_example", @@ -2605,7 +2477,7 @@ with argo_workflows.ApiClient(configuration) as api_client: type="type_example", ), data=[ - IoArgoprojEventsV1alpha1DataFilter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1DataFilter( comparator="comparator_example", path="path_example", template="template_example", @@ -2618,10 +2490,10 @@ with argo_workflows.ApiClient(configuration) as api_client: data_logical_operator="data_logical_operator_example", expr_logical_operator="expr_logical_operator_example", exprs=[ - IoArgoprojEventsV1alpha1ExprFilter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ExprFilter( expr="expr_example", fields=[ - IoArgoprojEventsV1alpha1PayloadField( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PayloadField( name="name_example", path="path_example", ), @@ -2629,14 +2501,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], script="script_example", - time=IoArgoprojEventsV1alpha1TimeFilter( + time=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TimeFilter( start="start_example", stop="stop_example", ), ), filters_logical_operator="filters_logical_operator_example", name="name_example", - transform=IoArgoprojEventsV1alpha1EventDependencyTransformer( + transform=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EventDependencyTransformer( jq="jq_example", script="script_example", ), @@ -2649,7 +2521,7 @@ with argo_workflows.ApiClient(configuration) as api_client: }, replicas=1, revision_history_limit=1, - template=IoArgoprojEventsV1alpha1Template( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Template( affinity=Affinity( node_affinity=NodeAffinity( preferred_during_scheduling_ignored_during_execution=[ @@ -2881,13 +2753,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), ), - container=Container( - args=[ - "args_example", - ], - command=[ - "command_example", - ], + container=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Container( env=[ EnvVar( name="name_example", @@ -2928,148 +2794,12 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - image="image_example", image_pull_policy="image_pull_policy_example", - lifecycle=Lifecycle( - post_start=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - pre_stop=LifecycleHandler( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - sleep=SleepAction( - seconds=1, - ), - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - ), - ), - liveness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - name="name_example", - ports=[ - ContainerPort( - container_port=1, - host_ip="host_ip_example", - host_port=1, - name="name_example", - protocol="protocol_example", - ), - ], - readiness_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - resize_policy=[ - ContainerResizePolicy( - resource_name="resource_name_example", - restart_policy="restart_policy_example", - ), - ], resources=ResourceRequirements( claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3079,7 +2809,6 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - restart_policy="restart_policy_example", security_context=SecurityContext( allow_privilege_escalation=True, app_armor_profile=AppArmorProfile( @@ -3117,50 +2846,6 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_user_name="run_as_user_name_example", ), ), - startup_probe=Probe( - _exec=ExecAction( - command=[ - "command_example", - ], - ), - failure_threshold=1, - grpc=GRPCAction( - port=1, - service="service_example", - ), - http_get=HTTPGetAction( - host="host_example", - http_headers=[ - HTTPHeader( - name="name_example", - value="value_example", - ), - ], - path="path_example", - port="port_example", - scheme="scheme_example", - ), - initial_delay_seconds=1, - period_seconds=1, - success_threshold=1, - tcp_socket=TCPSocketAction( - host="host_example", - port="port_example", - ), - termination_grace_period_seconds=1, - timeout_seconds=1, - ), - stdin=True, - stdin_once=True, - termination_message_path="termination_message_path_example", - termination_message_policy="termination_message_policy_example", - tty=True, - volume_devices=[ - VolumeDevice( - device_path="device_path_example", - name="name_example", - ), - ], volume_mounts=[ VolumeMount( mount_path="mount_path_example", @@ -3172,14 +2857,13 @@ with argo_workflows.ApiClient(configuration) as api_client: sub_path_expr="sub_path_expr_example", ), ], - working_dir="working_dir_example", ), image_pull_secrets=[ LocalObjectReference( name="name_example", ), ], - metadata=IoArgoprojEventsV1alpha1Metadata( + metadata=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Metadata( annotations={ "key": "key_example", }, @@ -3202,6 +2886,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -3215,6 +2900,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -3458,6 +3144,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -3632,13 +3322,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ], ), triggers=[ - IoArgoprojEventsV1alpha1Trigger( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger( at_least_once=True, + dlq_trigger=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Trigger(), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3649,18 +3340,18 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - policy=IoArgoprojEventsV1alpha1TriggerPolicy( - k8s=IoArgoprojEventsV1alpha1K8SResourcePolicy( - backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + policy=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerPolicy( + k8s=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResourcePolicy( + backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, @@ -3670,41 +3361,41 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - status=IoArgoprojEventsV1alpha1StatusPolicy( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StatusPolicy( allow=[ 1, ], ), ), - rate_limit=IoArgoprojEventsV1alpha1RateLimit( + rate_limit=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1RateLimit( requests_per_unit=1, unit="unit_example", ), - retry_strategy=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + retry_strategy=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), - template=IoArgoprojEventsV1alpha1TriggerTemplate( - argo_workflow=IoArgoprojEventsV1alpha1ArgoWorkflowTrigger( + template=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerTemplate( + argo_workflow=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArgoWorkflowTrigger( args=[ "args_example", ], operation="operation_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3715,19 +3406,19 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - source=IoArgoprojEventsV1alpha1ArtifactLocation( + source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation( configmap=ConfigMapKeySelector( key="key_example", name="name_example", optional=True, ), - file=IoArgoprojEventsV1alpha1FileArtifact( + file=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact( path="path_example", ), - git=IoArgoprojEventsV1alpha1GitArtifact( + git=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact( branch="branch_example", clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( + creds=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds( password=SecretKeySelector( key="key_example", name="name_example", @@ -3742,7 +3433,7 @@ with argo_workflows.ApiClient(configuration) as api_client: file_path="file_path_example", insecure_ignore_host_key=True, ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( + remote=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig( name="name_example", urls=[ "urls_example", @@ -3757,16 +3448,16 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", ), inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( + resource=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource( value='YQ==', ), - s3=IoArgoprojEventsV1alpha1S3Artifact( + s3=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -3779,7 +3470,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -3794,13 +3485,13 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - url=IoArgoprojEventsV1alpha1URLArtifact( + url=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact( path="path_example", verify_cert=True, ), ), ), - aws_lambda=IoArgoprojEventsV1alpha1AWSLambdaTrigger( + aws_lambda=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AWSLambdaTrigger( access_key=SecretKeySelector( key="key_example", name="name_example", @@ -3809,10 +3500,10 @@ with argo_workflows.ApiClient(configuration) as api_client: function_name="function_name_example", invocation_type="invocation_type_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3824,10 +3515,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3846,14 +3537,14 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - azure_event_hubs=IoArgoprojEventsV1alpha1AzureEventHubsTrigger( + azure_event_hubs=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureEventHubsTrigger( fqdn="fqdn_example", hub_name="hub_name_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3865,10 +3556,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3890,17 +3581,17 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - azure_service_bus=IoArgoprojEventsV1alpha1AzureServiceBusTrigger( + azure_service_bus=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1AzureServiceBusTrigger( connection_string=SecretKeySelector( key="key_example", name="name_example", optional=True, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3912,10 +3603,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3928,7 +3619,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], queue_name="queue_name_example", subscription_name="subscription_name_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -3950,24 +3641,24 @@ with argo_workflows.ApiClient(configuration) as api_client: ), conditions="conditions_example", conditions_reset=[ - IoArgoprojEventsV1alpha1ConditionsResetCriteria( - by_time=IoArgoprojEventsV1alpha1ConditionsResetByTime( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetCriteria( + by_time=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ConditionsResetByTime( cron="cron_example", timezone="timezone_example", ), ), ], - custom=IoArgoprojEventsV1alpha1CustomTrigger( + custom=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1CustomTrigger( cert_secret=SecretKeySelector( key="key_example", name="name_example", optional=True, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -3979,10 +3670,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4000,15 +3691,15 @@ with argo_workflows.ApiClient(configuration) as api_client: "key": "key_example", }, ), - email=IoArgoprojEventsV1alpha1EmailTrigger( + email=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1EmailTrigger( body="body_example", _from="_from_example", host="host_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4031,8 +3722,8 @@ with argo_workflows.ApiClient(configuration) as api_client: ], username="username_example", ), - http=IoArgoprojEventsV1alpha1HTTPTrigger( - basic_auth=IoArgoprojEventsV1alpha1BasicAuth( + http=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1HTTPTrigger( + basic_auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -4049,10 +3740,10 @@ with argo_workflows.ApiClient(configuration) as api_client: }, method="method_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4064,10 +3755,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4079,9 +3770,9 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], secure_headers=[ - IoArgoprojEventsV1alpha1SecureHeader( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader( name="name_example", - value_from=IoArgoprojEventsV1alpha1ValueFromSource( + value_from=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource( config_map_key_ref=ConfigMapKeySelector( key="key_example", name="name_example", @@ -4096,7 +3787,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], timeout="timeout_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4116,14 +3807,14 @@ with argo_workflows.ApiClient(configuration) as api_client: ), url="url_example", ), - k8s=IoArgoprojEventsV1alpha1StandardK8STrigger( + k8s=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1StandardK8STrigger( live_object=True, operation="operation_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4135,19 +3826,19 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], patch_strategy="patch_strategy_example", - source=IoArgoprojEventsV1alpha1ArtifactLocation( + source=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ArtifactLocation( configmap=ConfigMapKeySelector( key="key_example", name="name_example", optional=True, ), - file=IoArgoprojEventsV1alpha1FileArtifact( + file=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1FileArtifact( path="path_example", ), - git=IoArgoprojEventsV1alpha1GitArtifact( + git=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitArtifact( branch="branch_example", clone_directory="clone_directory_example", - creds=IoArgoprojEventsV1alpha1GitCreds( + creds=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitCreds( password=SecretKeySelector( key="key_example", name="name_example", @@ -4162,7 +3853,7 @@ with argo_workflows.ApiClient(configuration) as api_client: file_path="file_path_example", insecure_ignore_host_key=True, ref="ref_example", - remote=IoArgoprojEventsV1alpha1GitRemoteConfig( + remote=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1GitRemoteConfig( name="name_example", urls=[ "urls_example", @@ -4177,16 +3868,16 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", ), inline="inline_example", - resource=IoArgoprojEventsV1alpha1Resource( + resource=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1K8SResource( value='YQ==', ), - s3=IoArgoprojEventsV1alpha1S3Artifact( + s3=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Artifact( access_key=SecretKeySelector( key="key_example", name="name_example", optional=True, ), - bucket=IoArgoprojEventsV1alpha1S3Bucket( + bucket=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Bucket( key="key_example", name="name_example", ), @@ -4199,7 +3890,7 @@ with argo_workflows.ApiClient(configuration) as api_client: events=[ "events_example", ], - filter=IoArgoprojEventsV1alpha1S3Filter( + filter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1S3Filter( prefix="prefix_example", suffix="suffix_example", ), @@ -4214,20 +3905,23 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - url=IoArgoprojEventsV1alpha1URLArtifact( + url=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1URLArtifact( path="path_example", verify_cert=True, ), ), ), - kafka=IoArgoprojEventsV1alpha1KafkaTrigger( + kafka=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1KafkaTrigger( compress=True, flush_frequency=1, + headers={ + "key": "key_example", + }, parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4241,10 +3935,10 @@ with argo_workflows.ApiClient(configuration) as api_client: partition=1, partitioning_key="partitioning_key_example", payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4256,7 +3950,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], required_acks=1, - sasl=IoArgoprojEventsV1alpha1SASLConfig( + sasl=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SASLConfig( mechanism="mechanism_example", password_secret=SecretKeySelector( key="key_example", @@ -4269,8 +3963,8 @@ with argo_workflows.ApiClient(configuration) as api_client: optional=True, ), ), - schema_registry=IoArgoprojEventsV1alpha1SchemaRegistryConfig( - auth=IoArgoprojEventsV1alpha1BasicAuth( + schema_registry=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SchemaRegistryConfig( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( password=SecretKeySelector( key="key_example", name="name_example", @@ -4285,7 +3979,24 @@ with argo_workflows.ApiClient(configuration) as api_client: schema_id=1, url="url_example", ), - tls=IoArgoprojEventsV1alpha1TLSConfig( + secure_headers=[ + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SecureHeader( + name="name_example", + value_from=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1ValueFromSource( + config_map_key_ref=ConfigMapKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + secret_key_ref=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + ), + ], + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4307,16 +4018,45 @@ with argo_workflows.ApiClient(configuration) as api_client: url="url_example", version="version_example", ), - log=IoArgoprojEventsV1alpha1LogTrigger( + log=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1LogTrigger( interval_seconds="interval_seconds_example", ), name="name_example", - nats=IoArgoprojEventsV1alpha1NATSTrigger( + nats=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSTrigger( + auth=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1NATSAuth( + basic=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1BasicAuth( + password=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + username=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), + credential=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + nkey=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + token=SecretKeySelector( + key="key_example", + name="name_example", + optional=True, + ), + ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4328,10 +4068,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4343,7 +4083,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], subject="subject_example", - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4363,7 +4103,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), url="url_example", ), - open_whisk=IoArgoprojEventsV1alpha1OpenWhiskTrigger( + open_whisk=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1OpenWhiskTrigger( action_name="action_name_example", auth_token=SecretKeySelector( key="key_example", @@ -4373,10 +4113,10 @@ with argo_workflows.ApiClient(configuration) as api_client: host="host_example", namespace="namespace_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4388,10 +4128,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4404,7 +4144,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ], version="version_example", ), - pulsar=IoArgoprojEventsV1alpha1PulsarTrigger( + pulsar=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1PulsarTrigger( auth_athenz_params={ "key": "key_example", }, @@ -4418,25 +4158,25 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - connection_backoff=IoArgoprojEventsV1alpha1Backoff( - duration=IoArgoprojEventsV1alpha1Int64OrString( + connection_backoff=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Backoff( + duration=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Int64OrString( int64_val="int64_val_example", str_val="str_val_example", type="type_example", ), - factor=IoArgoprojEventsV1alpha1Amount( + factor=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), - jitter=IoArgoprojEventsV1alpha1Amount( + jitter=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Amount( value='YQ==', ), steps=1, ), parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4448,10 +4188,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], payload=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4462,7 +4202,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - tls=IoArgoprojEventsV1alpha1TLSConfig( + tls=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TLSConfig( ca_cert_secret=SecretKeySelector( key="key_example", name="name_example", @@ -4490,16 +4230,16 @@ with argo_workflows.ApiClient(configuration) as api_client: topic="topic_example", url="url_example", ), - slack=IoArgoprojEventsV1alpha1SlackTrigger( + slack=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackTrigger( attachments="attachments_example", blocks="blocks_example", channel="channel_example", message="message_example", parameters=[ - IoArgoprojEventsV1alpha1TriggerParameter( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameter( dest="dest_example", operation="operation_example", - src=IoArgoprojEventsV1alpha1TriggerParameterSource( + src=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1TriggerParameterSource( context_key="context_key_example", context_template="context_template_example", data_key="data_key_example", @@ -4510,7 +4250,7 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ), ], - sender=IoArgoprojEventsV1alpha1SlackSender( + sender=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackSender( icon="icon_example", username="username_example", ), @@ -4519,7 +4259,7 @@ with argo_workflows.ApiClient(configuration) as api_client: name="name_example", optional=True, ), - thread=IoArgoprojEventsV1alpha1SlackThread( + thread=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SlackThread( broadcast_message_to_channel=True, message_aggregation_key="message_aggregation_key_example", ), @@ -4528,10 +4268,10 @@ with argo_workflows.ApiClient(configuration) as api_client: ), ], ), - status=IoArgoprojEventsV1alpha1SensorStatus( - status=IoArgoprojEventsV1alpha1Status( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1SensorStatus( + status=GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Status( conditions=[ - IoArgoprojEventsV1alpha1Condition( + GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Condition( last_transition_time=dateutil_parser('1970-01-01T00:00:00.00Z'), message="message_example", reason="reason_example", @@ -4563,7 +4303,7 @@ Name | Type | Description | Notes ### Return type -[**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) +[**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) ### Authorization diff --git a/sdks/python/client/docs/SensorUpdateSensorRequest.md b/sdks/python/client/docs/SensorUpdateSensorRequest.md index 4912ac800cfb..1fbdc3ed4f80 100644 --- a/sdks/python/client/docs/SensorUpdateSensorRequest.md +++ b/sdks/python/client/docs/SensorUpdateSensorRequest.md @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **name** | **str** | | [optional] **namespace** | **str** | | [optional] -**sensor** | [**IoArgoprojEventsV1alpha1Sensor**](IoArgoprojEventsV1alpha1Sensor.md) | | [optional] +**sensor** | [**GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor**](GithubComArgoprojArgoEventsPkgApisEventsV1alpha1Sensor.md) | | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/TypedObjectReference.md b/sdks/python/client/docs/TypedObjectReference.md index c6c53950ad30..b9e94d0d1858 100644 --- a/sdks/python/client/docs/TypedObjectReference.md +++ b/sdks/python/client/docs/TypedObjectReference.md @@ -1,5 +1,6 @@ # TypedObjectReference +TypedObjectReference contains enough information to let you locate the typed referenced object ## Properties Name | Type | Description | Notes diff --git a/sdks/python/client/docs/Volume.md b/sdks/python/client/docs/Volume.md index e2e0e30b6cbb..1781b76e9d12 100644 --- a/sdks/python/client/docs/Volume.md +++ b/sdks/python/client/docs/Volume.md @@ -23,6 +23,7 @@ Name | Type | Description | Notes **git_repo** | [**GitRepoVolumeSource**](GitRepoVolumeSource.md) | | [optional] **glusterfs** | [**GlusterfsVolumeSource**](GlusterfsVolumeSource.md) | | [optional] **host_path** | [**HostPathVolumeSource**](HostPathVolumeSource.md) | | [optional] +**image** | [**ImageVolumeSource**](ImageVolumeSource.md) | | [optional] **iscsi** | [**ISCSIVolumeSource**](ISCSIVolumeSource.md) | | [optional] **nfs** | [**NFSVolumeSource**](NFSVolumeSource.md) | | [optional] **persistent_volume_claim** | [**PersistentVolumeClaimVolumeSource**](PersistentVolumeClaimVolumeSource.md) | | [optional] diff --git a/sdks/python/client/docs/VolumeProjection.md b/sdks/python/client/docs/VolumeProjection.md index e88c1e4d8082..783ecfd6c859 100644 --- a/sdks/python/client/docs/VolumeProjection.md +++ b/sdks/python/client/docs/VolumeProjection.md @@ -1,6 +1,6 @@ # VolumeProjection -Projection that may be projected along with other supported volume types +Projection that may be projected along with other supported volume types. Exactly one of these fields must be set. ## Properties Name | Type | Description | Notes diff --git a/sdks/python/client/docs/WorkflowServiceApi.md b/sdks/python/client/docs/WorkflowServiceApi.md index cb35d223384f..dac6fd0385b3 100644 --- a/sdks/python/client/docs/WorkflowServiceApi.md +++ b/sdks/python/client/docs/WorkflowServiceApi.md @@ -1080,6 +1080,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -1093,6 +1094,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -1805,6 +1807,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -2103,6 +2106,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3314,6 +3318,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4500,6 +4505,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4615,6 +4621,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -4628,6 +4635,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -4833,6 +4841,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -5824,6 +5833,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -6640,6 +6653,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -6938,6 +6952,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -8149,6 +8164,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9335,6 +9351,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9450,6 +9467,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -9463,6 +9481,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -9668,6 +9687,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -10659,6 +10679,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11190,6 +11214,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -12644,6 +12672,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -13465,6 +13497,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -13763,6 +13796,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -14974,6 +15008,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16160,6 +16195,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16275,6 +16311,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -16288,6 +16325,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -16493,6 +16531,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -17484,6 +17523,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -18620,6 +18663,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -18633,6 +18677,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -19345,6 +19390,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -19643,6 +19689,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -20854,6 +20901,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22040,6 +22088,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22155,6 +22204,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -22168,6 +22218,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -22373,6 +22424,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -23364,6 +23416,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -24180,6 +24236,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -24478,6 +24535,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -25689,6 +25747,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -26875,6 +26934,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -26990,6 +27050,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -27003,6 +27064,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -27208,6 +27270,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -28199,6 +28262,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -28730,6 +28797,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -29046,6 +29117,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) force = True # bool | (optional) # example passing only required values which don't have defaults set @@ -29058,7 +29130,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, force=force) + api_response = api_instance.delete_workflow(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential, force=force) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling WorkflowServiceApi->delete_workflow: %s\n" % e) @@ -29077,6 +29149,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] **force** | **bool**| | [optional] ### Return type @@ -30241,6 +30314,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -30254,6 +30328,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -30966,6 +31041,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -31264,6 +31340,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32475,6 +32552,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33661,6 +33739,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33776,6 +33855,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -33789,6 +33869,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -33994,6 +34075,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -34985,6 +35067,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -35801,6 +35887,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -36099,6 +36186,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -37310,6 +37398,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -38496,6 +38585,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -38611,6 +38701,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -38624,6 +38715,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -38829,6 +38921,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -39820,6 +39913,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -40351,6 +40448,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -41805,6 +41906,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -42626,6 +42731,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -42924,6 +43030,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -44135,6 +44242,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -45321,6 +45429,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -45436,6 +45545,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -45449,6 +45559,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -45654,6 +45765,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -46645,6 +46757,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -47781,6 +47897,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -47794,6 +47911,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -48506,6 +48624,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -48804,6 +48923,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -50015,6 +50135,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -51201,6 +51322,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -51316,6 +51438,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -51329,6 +51452,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -51534,6 +51658,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -52525,6 +52650,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -53341,6 +53470,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -53639,6 +53769,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -54850,6 +54981,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -56036,6 +56168,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -56151,6 +56284,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -56164,6 +56298,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -56369,6 +56504,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -57360,6 +57496,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -57891,6 +58031,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -58317,9 +58461,10 @@ with argo_workflows.ApiClient(configuration) as api_client: log_options_since_time_seconds = "logOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. (optional) log_options_limit_bytes = "logOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) + log_options_stream = "logOptions.stream_example" # str | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. (optional) grep = "grep_example" # str | (optional) selector = "selector_example" # str | (optional) @@ -58335,7 +58480,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # and optional values try: # DEPRECATED: Cannot work via HTTP if podName is an empty string. Use WorkflowLogs. - api_response = api_instance.pod_logs(namespace, name, pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) + api_response = api_instance.pod_logs(namespace, name, pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, log_options_stream=log_options_stream, grep=grep, selector=selector) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling WorkflowServiceApi->pod_logs: %s\n" % e) @@ -58356,9 +58501,10 @@ Name | Type | Description | Notes **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **log_options_stream** | **str**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] **grep** | **str**| | [optional] **selector** | **str**| | [optional] @@ -59362,9 +59508,10 @@ with argo_workflows.ApiClient(configuration) as api_client: log_options_since_time_seconds = "logOptions.sinceTime.seconds_example" # str | Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. (optional) log_options_since_time_nanos = 1 # int | Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. (optional) log_options_timestamps = True # bool | If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. (optional) - log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. (optional) + log_options_tail_lines = "logOptions.tailLines_example" # str | If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. (optional) log_options_limit_bytes = "logOptions.limitBytes_example" # str | If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. (optional) log_options_insecure_skip_tls_verify_backend = True # bool | insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. (optional) + log_options_stream = "logOptions.stream_example" # str | Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. (optional) grep = "grep_example" # str | (optional) selector = "selector_example" # str | (optional) @@ -59378,7 +59525,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.workflow_logs(namespace, name, pod_name=pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, grep=grep, selector=selector) + api_response = api_instance.workflow_logs(namespace, name, pod_name=pod_name, log_options_container=log_options_container, log_options_follow=log_options_follow, log_options_previous=log_options_previous, log_options_since_seconds=log_options_since_seconds, log_options_since_time_seconds=log_options_since_time_seconds, log_options_since_time_nanos=log_options_since_time_nanos, log_options_timestamps=log_options_timestamps, log_options_tail_lines=log_options_tail_lines, log_options_limit_bytes=log_options_limit_bytes, log_options_insecure_skip_tls_verify_backend=log_options_insecure_skip_tls_verify_backend, log_options_stream=log_options_stream, grep=grep, selector=selector) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling WorkflowServiceApi->workflow_logs: %s\n" % e) @@ -59399,9 +59546,10 @@ Name | Type | Description | Notes **log_options_since_time_seconds** | **str**| Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. | [optional] **log_options_since_time_nanos** | **int**| Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. This field may be limited in precision depending on context. | [optional] **log_options_timestamps** | **bool**| If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line of log output. Defaults to false. +optional. | [optional] - **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime +optional. | [optional] + **log_options_tail_lines** | **str**| If set, the number of lines from the end of the logs to show. If not specified, logs are shown from the creation of the container or sinceSeconds or sinceTime. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +optional. | [optional] **log_options_limit_bytes** | **str**| If set, the number of bytes to read from the server before terminating the log output. This may not display a complete final line of logging, and may return slightly more or slightly less than the specified limit. +optional. | [optional] **log_options_insecure_skip_tls_verify_backend** | **bool**| insecureSkipTLSVerifyBackend indicates that the apiserver should not confirm the validity of the serving certificate of the backend it is connecting to. This will make the HTTPS connection between the apiserver and the backend insecure. This means the apiserver cannot verify the log data it is receiving came from the real kubelet. If the kubelet is configured to verify the apiserver's TLS credentials, it does not mean the connection to the real kubelet is vulnerable to a man in the middle attack (e.g. an attacker could not intercept the actual log data coming from the real kubelet). +optional. | [optional] + **log_options_stream** | **str**| Specify which container log stream to return to the client. Acceptable values are \"All\", \"Stdout\" and \"Stderr\". If not specified, \"All\" is used, and both stdout and stderr are returned interleaved. Note that when \"TailLines\" is specified, \"Stream\" can only be set to nil or \"All\". +featureGate=PodLogsQuerySplitStreams +optional. | [optional] **grep** | **str**| | [optional] **selector** | **str**| | [optional] diff --git a/sdks/python/client/docs/WorkflowTemplateServiceApi.md b/sdks/python/client/docs/WorkflowTemplateServiceApi.md index 77331e14a6c2..e43c6eff3552 100644 --- a/sdks/python/client/docs/WorkflowTemplateServiceApi.md +++ b/sdks/python/client/docs/WorkflowTemplateServiceApi.md @@ -1067,6 +1067,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -1080,6 +1081,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -1792,6 +1794,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -2090,6 +2093,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -3301,6 +3305,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4487,6 +4492,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -4602,6 +4608,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -4615,6 +4622,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -4820,6 +4828,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -5811,6 +5820,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -6627,6 +6640,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -6925,6 +6939,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -8136,6 +8151,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9322,6 +9338,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -9437,6 +9454,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -9450,6 +9468,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -9655,6 +9674,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -10646,6 +10666,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11177,6 +11201,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -11455,6 +11483,7 @@ with argo_workflows.ApiClient(configuration) as api_client: delete_options_dry_run = [ "deleteOptions.dryRun_example", ] # [str] | When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. (optional) + delete_options_ignore_store_read_error_with_cluster_breaking_potential = True # bool | if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. (optional) # example passing only required values which don't have defaults set try: @@ -11466,7 +11495,7 @@ with argo_workflows.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.delete_workflow_template(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run) + api_response = api_instance.delete_workflow_template(namespace, name, delete_options_grace_period_seconds=delete_options_grace_period_seconds, delete_options_preconditions_uid=delete_options_preconditions_uid, delete_options_preconditions_resource_version=delete_options_preconditions_resource_version, delete_options_orphan_dependents=delete_options_orphan_dependents, delete_options_propagation_policy=delete_options_propagation_policy, delete_options_dry_run=delete_options_dry_run, delete_options_ignore_store_read_error_with_cluster_breaking_potential=delete_options_ignore_store_read_error_with_cluster_breaking_potential) pprint(api_response) except argo_workflows.ApiException as e: print("Exception when calling WorkflowTemplateServiceApi->delete_workflow_template: %s\n" % e) @@ -11485,6 +11514,7 @@ Name | Type | Description | Notes **delete_options_orphan_dependents** | **bool**| Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. +optional. | [optional] **delete_options_propagation_policy** | **str**| Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. +optional. | [optional] **delete_options_dry_run** | **[str]**| When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed +optional +listType=atomic. | [optional] + **delete_options_ignore_store_read_error_with_cluster_breaking_potential** | **bool**| if set to true, it will trigger an unsafe deletion of the resource in case the normal deletion flow fails with a corrupt object error. A resource is considered corrupt if it can not be retrieved from the underlying storage successfully because of a) its data can not be transformed e.g. decryption failure, or b) it fails to decode into an object. NOTE: unsafe deletion ignores finalizer constraints, skips precondition checks, and removes the object from the storage. WARNING: This may potentially break the cluster if the workload associated with the resource being unsafe-deleted relies on normal deletion flow. Use only if you REALLY know what you are doing. The default value is false, and the user must opt in to enable it +optional. | [optional] ### Return type @@ -12653,6 +12683,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -12666,6 +12697,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -13378,6 +13410,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -13676,6 +13709,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -14887,6 +14921,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16073,6 +16108,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -16188,6 +16224,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -16201,6 +16238,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -16406,6 +16444,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -17397,6 +17436,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -18213,6 +18256,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -18511,6 +18555,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -19722,6 +19767,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -20908,6 +20954,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -21023,6 +21070,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -21036,6 +21084,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -21241,6 +21290,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -22232,6 +22282,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -22763,6 +22817,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -24152,6 +24210,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -24165,6 +24224,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -24877,6 +24937,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -25175,6 +25236,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -26386,6 +26448,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27572,6 +27635,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -27687,6 +27751,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -27700,6 +27765,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -27905,6 +27971,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -28896,6 +28963,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -29712,6 +29783,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -30010,6 +30082,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -31221,6 +31294,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32407,6 +32481,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -32522,6 +32597,7 @@ with argo_workflows.ApiClient(configuration) as api_client: run_as_group=1, run_as_non_root=True, run_as_user=1, + se_linux_change_policy="se_linux_change_policy_example", se_linux_options=SELinuxOptions( level="level_example", role="role_example", @@ -32535,6 +32611,7 @@ with argo_workflows.ApiClient(configuration) as api_client: supplemental_groups=[ 1, ], + supplemental_groups_policy="supplemental_groups_policy_example", sysctls=[ Sysctl( name="name_example", @@ -32740,6 +32817,7 @@ with argo_workflows.ApiClient(configuration) as api_client: claims=[ ResourceClaim( name="name_example", + request="request_example", ), ], limits={ @@ -33731,6 +33809,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, @@ -34262,6 +34344,10 @@ with argo_workflows.ApiClient(configuration) as api_client: path="path_example", type="type_example", ), + image=ImageVolumeSource( + pull_policy="pull_policy_example", + reference="reference_example", + ), iscsi=ISCSIVolumeSource( chap_auth_discovery=True, chap_auth_session=True, diff --git a/server/artifacts/artifact_server.go b/server/artifacts/artifact_server.go index 711c54cfc1db..e92fca0f19d8 100644 --- a/server/artifacts/artifact_server.go +++ b/server/artifacts/artifact_server.go @@ -155,10 +155,7 @@ func (a *ArtifactServer) GetArtifactFile(w http.ResponseWriter, r *http.Request) return } - isInput := false - if direction == Inputs { - isInput = true - } + isInput := direction == Inputs artifact, driver, err := a.getArtifactAndDriver(ctx, nodeId, artifactName, isInput, wf, fileName) if err != nil { @@ -217,7 +214,7 @@ func (a *ArtifactServer) getArtifactFromPath(artifact *wfv1.Artifact, driver com dirs := map[string]bool{} // to de-dupe sub-dirs - _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", "..", ".."))) + _, _ = fmt.Fprintf(w, "
  • %s
  • \n", "..", "..") for _, object := range objects { @@ -226,11 +223,11 @@ func (a *ArtifactServer) getArtifactFromPath(artifact *wfv1.Artifact, driver com // if dir is empty string, we are in the root dir if dir == "" { - _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", file, file))) + _, _ = fmt.Fprintf(w, "
  • %s
  • \n", file, file) } else if dirs[dir] { continue } else { - _, _ = w.Write([]byte(fmt.Sprintf("
  • %s
  • \n", dir, dir))) + _, _ = fmt.Fprintf(w, "
  • %s
  • \n", dir, dir) dirs[dir] = true } } diff --git a/server/artifacts/artifact_server_test.go b/server/artifacts/artifact_server_test.go index e3a6fa920091..6db611236a30 100644 --- a/server/artifacts/artifact_server_test.go +++ b/server/artifacts/artifact_server_test.go @@ -79,7 +79,7 @@ var bucketsOfKeys = map[string][]string{ func (a *fakeArtifactDriver) OpenStream(artifact *wfv1.Artifact) (io.ReadCloser, error) { //fmt.Printf("deletethis: artifact=%+v\n", artifact) - key, err := artifact.ArtifactLocation.GetKey() + key, err := artifact.GetKey() if err != nil { return nil, err } @@ -477,7 +477,7 @@ func TestArtifactServer_GetArtifactFile(t *testing.T) { // verify that the files are contained in the listing we got back assert.Len(t, tt.directoryFiles, strings.Count(string(all), "
  • ")) for _, file := range tt.directoryFiles { - assert.True(t, strings.Contains(string(all), file)) + assert.Contains(t, string(all), file) } } else { assert.Equal(t, "my-data", string(all)) diff --git a/server/auth/authorizing_server_stream.go b/server/auth/authorizing_server_stream.go index 7d3c07010512..86aafc3ce9ae 100644 --- a/server/auth/authorizing_server_stream.go +++ b/server/auth/authorizing_server_stream.go @@ -35,7 +35,7 @@ func (l *authorizingServerStream) RecvMsg(m interface{}) error { if err != nil { return err } - ctx, err := l.Gatekeeper.ContextWithRequest(l.ctx, m) + ctx, err := l.ContextWithRequest(l.ctx, m) if err != nil { return err } diff --git a/server/auth/gatekeeper.go b/server/auth/gatekeeper.go index a7797800c292..54827d0663a0 100644 --- a/server/auth/gatekeeper.go +++ b/server/auth/gatekeeper.go @@ -10,8 +10,7 @@ import ( "github.com/argoproj/argo-workflows/v3/util/secrets" - eventsource "github.com/argoproj/argo-events/pkg/client/eventsource/clientset/versioned" - sensor "github.com/argoproj/argo-events/pkg/client/sensor/clientset/versioned" + events "github.com/argoproj/argo-events/pkg/client/clientset/versioned" log "github.com/sirupsen/logrus" "google.golang.org/grpc" "google.golang.org/grpc/codes" @@ -38,12 +37,11 @@ import ( type ContextKey string const ( - DynamicKey ContextKey = "dynamic.Interface" - WfKey ContextKey = "workflow.Interface" - SensorKey ContextKey = "sensor.Interface" - EventSourceKey ContextKey = "eventsource.Interface" - KubeKey ContextKey = "kubernetes.Interface" - ClaimsKey ContextKey = "types.Claims" + DynamicKey ContextKey = "dynamic.Interface" + WfKey ContextKey = "workflow.Interface" + EventsKey ContextKey = "events.Interface" + KubeKey ContextKey = "kubernetes.Interface" + ClaimsKey ContextKey = "types.Claims" ) //go:generate mockery --name=Gatekeeper @@ -112,8 +110,7 @@ func (s *gatekeeper) ContextWithRequest(ctx context.Context, req interface{}) (c } ctx = context.WithValue(ctx, DynamicKey, clients.Dynamic) ctx = context.WithValue(ctx, WfKey, clients.Workflow) - ctx = context.WithValue(ctx, EventSourceKey, clients.EventSource) - ctx = context.WithValue(ctx, SensorKey, clients.Sensor) + ctx = context.WithValue(ctx, EventsKey, clients.Events) ctx = context.WithValue(ctx, KubeKey, clients.Kubernetes) ctx = context.WithValue(ctx, ClaimsKey, claims) return ctx, nil @@ -131,12 +128,8 @@ func GetWfClient(ctx context.Context) workflow.Interface { return ctx.Value(WfKey).(workflow.Interface) } -func GetEventSourceClient(ctx context.Context) eventsource.Interface { - return ctx.Value(EventSourceKey).(eventsource.Interface) -} - -func GetSensorClient(ctx context.Context) sensor.Interface { - return ctx.Value(SensorKey).(sensor.Interface) +func GetEventsClient(ctx context.Context) events.Interface { + return ctx.Value(EventsKey).(events.Interface) } func GetKubeClient(ctx context.Context) kubernetes.Interface { @@ -351,24 +344,19 @@ func DefaultClientForAuthorization(authorization string, config *rest.Config) (* if err != nil { return nil, nil, fmt.Errorf("failure to create workflow client: %w", err) } - eventSourceClient, err := eventsource.NewForConfig(restConfig) - if err != nil { - return nil, nil, fmt.Errorf("failure to create event source client: %w", err) - } - sensorClient, err := sensor.NewForConfig(restConfig) + eventsClient, err := events.NewForConfig(restConfig) if err != nil { - return nil, nil, fmt.Errorf("failure to create sensor client: %w", err) + return nil, nil, fmt.Errorf("failure to create events client: %w", err) } kubeClient, err := kubernetes.NewForConfig(restConfig) if err != nil { return nil, nil, fmt.Errorf("failure to create kubernetes client: %w", err) } return restConfig, &servertypes.Clients{ - Dynamic: dynamicClient, - Workflow: wfClient, - Sensor: sensorClient, - EventSource: eventSourceClient, - Kubernetes: kubeClient, + Dynamic: dynamicClient, + Workflow: wfClient, + Events: eventsClient, + Kubernetes: kubeClient, }, nil } diff --git a/server/auth/mocks/Gatekeeper.go b/server/auth/mocks/Gatekeeper.go index a5f25ff32023..a5f35a0df9f0 100644 --- a/server/auth/mocks/Gatekeeper.go +++ b/server/auth/mocks/Gatekeeper.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -75,7 +75,7 @@ func (_m *Gatekeeper) ContextWithRequest(ctx context.Context, req interface{}) ( return r0, r1 } -// StreamServerInterceptor provides a mock function with given fields: +// StreamServerInterceptor provides a mock function with no fields func (_m *Gatekeeper) StreamServerInterceptor() grpc.StreamServerInterceptor { ret := _m.Called() @@ -95,7 +95,7 @@ func (_m *Gatekeeper) StreamServerInterceptor() grpc.StreamServerInterceptor { return r0 } -// UnaryServerInterceptor provides a mock function with given fields: +// UnaryServerInterceptor provides a mock function with no fields func (_m *Gatekeeper) UnaryServerInterceptor() grpc.UnaryServerInterceptor { ret := _m.Called() diff --git a/server/auth/sso/mocks/Interface.go b/server/auth/sso/mocks/Interface.go index 1bfab2251c75..9ef7c87d82c4 100644 --- a/server/auth/sso/mocks/Interface.go +++ b/server/auth/sso/mocks/Interface.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -55,7 +55,7 @@ func (_m *Interface) HandleRedirect(writer http.ResponseWriter, request *http.Re _m.Called(writer, request) } -// IsRBACEnabled provides a mock function with given fields: +// IsRBACEnabled provides a mock function with no fields func (_m *Interface) IsRBACEnabled() bool { ret := _m.Called() diff --git a/server/auth/sso/sso.go b/server/auth/sso/sso.go index 386d11d450f8..6f072b8eb452 100644 --- a/server/auth/sso/sso.go +++ b/server/auth/sso/sso.go @@ -351,7 +351,7 @@ func (s *sso) HandleCallback(w http.ResponseWriter, r *http.Request) { if strings.HasPrefix(cookie.Value, prefix) { redirect = cookie.Value } - http.Redirect(w, r, redirect, 302) + http.Redirect(w, r, redirect, http.StatusFound) } // authorize verifies a bearer token and pulls user information form the claims. diff --git a/server/auth/sso/sso_test.go b/server/auth/sso/sso_test.go index 217266061f4c..780daa5cd53b 100644 --- a/server/auth/sso/sso_test.go +++ b/server/auth/sso/sso_test.go @@ -70,7 +70,7 @@ func TestLoadSsoClientIdFromSecret(t *testing.T) { assert.Equal(t, "sso-client-id-value", ssoObject.config.ClientID) assert.Equal(t, "sso-client-secret-value", ssoObject.config.ClientSecret) assert.Equal(t, "argo_groups", ssoObject.customClaimName) - assert.Equal(t, "", config.IssuerAlias) + assert.Empty(t, config.IssuerAlias) assert.Equal(t, 10*time.Hour, ssoObject.expiry) } diff --git a/server/auth/types/claims.go b/server/auth/types/claims.go index ecab3d54637f..9d170b80d8ae 100644 --- a/server/auth/types/claims.go +++ b/server/auth/types/claims.go @@ -38,7 +38,7 @@ func init() { // json.Unmarshal to mash every claim into a custom map func (c *Claims) UnmarshalJSON(data []byte) error { type claimAlias Claims - var localClaim claimAlias = claimAlias(*c) + var localClaim = claimAlias(*c) // Populate the claims struct as much as possible err := json.Unmarshal(data, &localClaim) diff --git a/server/auth/webhook/interceptor_test.go b/server/auth/webhook/interceptor_test.go index cf8af53f6a92..5026ad943cee 100644 --- a/server/auth/webhook/interceptor_test.go +++ b/server/auth/webhook/interceptor_test.go @@ -36,7 +36,7 @@ func TestInterceptor(t *testing.T) { assert.Empty(t, r.Header["Authorization"]) // we check the status code here - because we get a 403 assert.Equal(t, 403, w.Code) - assert.Equal(t, `{"message": "failed to process webhook request"}`, w.Body.String()) + assert.JSONEq(t, `{"message": "failed to process webhook request"}`, w.Body.String()) }) t.Run("NoDiscriminator", func(t *testing.T) { r, _ := intercept("POST", "/api/v1/events/my-ns/", nil) diff --git a/server/event/dispatch/operation_test.go b/server/event/dispatch/operation_test.go index ca37c5c22522..8169a441ae74 100644 --- a/server/event/dispatch/operation_test.go +++ b/server/event/dispatch/operation_test.go @@ -395,7 +395,7 @@ func Test_populateWorkflowMetadata(t *testing.T) { } assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow name expression: unexpected token Operator(\"..\") (1:10)\n | payload.......foo[.numeric]\n | .........^", <-recorder.Events) - assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow label \"invalidLabel\" expression: cannot use pointer accessor outside closure (1:6)\n | foo...bar\n | .....^", <-recorder.Events) + assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow label \"invalidLabel\" expression: unexpected token Operator(\".\") (1:6)\n | foo...bar\n | .....^", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow annotation \"invalidAnnotation\" expression: expected name (1:6)\n | foo.[..]bar\n | .....^", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: workflow name expression must evaluate to a string, not a float64", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: workflow name expression must evaluate to a string, not a bool", <-recorder.Events) diff --git a/server/eventsource/event_source_server.go b/server/eventsource/event_source_server.go index f70d009af7ae..9fefd6d3df14 100644 --- a/server/eventsource/event_source_server.go +++ b/server/eventsource/event_source_server.go @@ -5,7 +5,7 @@ import ( "encoding/json" "io" - esv1 "github.com/argoproj/argo-events/pkg/apis/eventsource/v1alpha1" + eventsv1a1 "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1" "google.golang.org/grpc/codes" corev1 "k8s.io/api/core/v1" apierr "k8s.io/apimachinery/pkg/api/errors" @@ -20,8 +20,8 @@ import ( type eventSourceServer struct{} -func (e *eventSourceServer) CreateEventSource(ctx context.Context, in *eventsourcepkg.CreateEventSourceRequest) (*esv1.EventSource, error) { - client := auth.GetEventSourceClient(ctx) +func (e *eventSourceServer) CreateEventSource(ctx context.Context, in *eventsourcepkg.CreateEventSourceRequest) (*eventsv1a1.EventSource, error) { + client := auth.GetEventsClient(ctx) es, err := client.ArgoprojV1alpha1().EventSources(in.Namespace).Create(ctx, in.EventSource, metav1.CreateOptions{}) if err != nil { @@ -30,8 +30,8 @@ func (e *eventSourceServer) CreateEventSource(ctx context.Context, in *eventsour return es, nil } -func (e *eventSourceServer) GetEventSource(ctx context.Context, in *eventsourcepkg.GetEventSourceRequest) (*esv1.EventSource, error) { - client := auth.GetEventSourceClient(ctx) +func (e *eventSourceServer) GetEventSource(ctx context.Context, in *eventsourcepkg.GetEventSourceRequest) (*eventsv1a1.EventSource, error) { + client := auth.GetEventsClient(ctx) es, err := client.ArgoprojV1alpha1().EventSources(in.Namespace).Get(ctx, in.Name, metav1.GetOptions{}) if err != nil { @@ -41,7 +41,7 @@ func (e *eventSourceServer) GetEventSource(ctx context.Context, in *eventsourcep } func (e *eventSourceServer) DeleteEventSource(ctx context.Context, in *eventsourcepkg.DeleteEventSourceRequest) (*eventsourcepkg.EventSourceDeletedResponse, error) { - client := auth.GetEventSourceClient(ctx) + client := auth.GetEventsClient(ctx) err := client.ArgoprojV1alpha1().EventSources(in.Namespace).Delete(ctx, in.Name, metav1.DeleteOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -49,8 +49,8 @@ func (e *eventSourceServer) DeleteEventSource(ctx context.Context, in *eventsour return &eventsourcepkg.EventSourceDeletedResponse{}, nil } -func (e *eventSourceServer) UpdateEventSource(ctx context.Context, in *eventsourcepkg.UpdateEventSourceRequest) (*esv1.EventSource, error) { - client := auth.GetEventSourceClient(ctx) +func (e *eventSourceServer) UpdateEventSource(ctx context.Context, in *eventsourcepkg.UpdateEventSourceRequest) (*eventsv1a1.EventSource, error) { + client := auth.GetEventsClient(ctx) es, err := client.ArgoprojV1alpha1().EventSources(in.Namespace).Update(ctx, in.EventSource, metav1.UpdateOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -58,8 +58,8 @@ func (e *eventSourceServer) UpdateEventSource(ctx context.Context, in *eventsour return es, nil } -func (e *eventSourceServer) ListEventSources(ctx context.Context, in *eventsourcepkg.ListEventSourcesRequest) (*esv1.EventSourceList, error) { - client := auth.GetEventSourceClient(ctx) +func (e *eventSourceServer) ListEventSources(ctx context.Context, in *eventsourcepkg.ListEventSourcesRequest) (*eventsv1a1.EventSourceList, error) { + client := auth.GetEventsClient(ctx) list, err := client.ArgoprojV1alpha1().EventSources(in.Namespace).List(ctx, metav1.ListOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -108,7 +108,7 @@ func (e *eventSourceServer) WatchEventSources(in *eventsourcepkg.ListEventSource if in.ListOptions != nil { listOptions = *in.ListOptions } - eventSourceInterface := auth.GetEventSourceClient(ctx).ArgoprojV1alpha1().EventSources(in.Namespace) + eventSourceInterface := auth.GetEventsClient(ctx).ArgoprojV1alpha1().EventSources(in.Namespace) watcher, err := eventSourceInterface.Watch(ctx, listOptions) if err != nil { return sutils.ToStatusError(err, codes.Internal) @@ -121,7 +121,7 @@ func (e *eventSourceServer) WatchEventSources(in *eventsourcepkg.ListEventSource if !open { return sutils.ToStatusError(io.EOF, codes.ResourceExhausted) } - es, ok := event.Object.(*esv1.EventSource) + es, ok := event.Object.(*eventsv1a1.EventSource) if !ok { return sutils.ToStatusError(apierr.FromObject(event.Object), codes.Internal) } diff --git a/server/info/info_server_test.go b/server/info/info_server_test.go index 5b747223ef89..a4cb6ada60db 100644 --- a/server/info/info_server_test.go +++ b/server/info/info_server_test.go @@ -53,9 +53,9 @@ func Test_infoServer_GetInfo(t *testing.T) { i := &infoServer{} info, err := i.GetInfo(context.TODO(), nil) require.NoError(t, err) - assert.Equal(t, "", info.ManagedNamespace) + assert.Empty(t, info.ManagedNamespace) assert.Empty(t, info.Links) assert.Empty(t, info.Columns) - assert.Equal(t, "", info.NavColor) + assert.Empty(t, info.NavColor) }) } diff --git a/server/sensor/sensor_server.go b/server/sensor/sensor_server.go index 77f273984a6e..759f62bc8831 100644 --- a/server/sensor/sensor_server.go +++ b/server/sensor/sensor_server.go @@ -5,7 +5,7 @@ import ( "encoding/json" "io" - sv1 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" + eventsv1a1 "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1" "google.golang.org/grpc/codes" corev1 "k8s.io/api/core/v1" apierr "k8s.io/apimachinery/pkg/api/errors" @@ -19,8 +19,8 @@ import ( type sensorServer struct{} -func (s *sensorServer) ListSensors(ctx context.Context, in *sensorpkg.ListSensorsRequest) (*sv1.SensorList, error) { - client := auth.GetSensorClient(ctx) +func (s *sensorServer) ListSensors(ctx context.Context, in *sensorpkg.ListSensorsRequest) (*eventsv1a1.SensorList, error) { + client := auth.GetEventsClient(ctx) list, err := client.ArgoprojV1alpha1().Sensors(in.Namespace).List(ctx, metav1.ListOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -28,8 +28,8 @@ func (s *sensorServer) ListSensors(ctx context.Context, in *sensorpkg.ListSensor return list, nil } -func (s *sensorServer) GetSensor(ctx context.Context, in *sensorpkg.GetSensorRequest) (*sv1.Sensor, error) { - client := auth.GetSensorClient(ctx) +func (s *sensorServer) GetSensor(ctx context.Context, in *sensorpkg.GetSensorRequest) (*eventsv1a1.Sensor, error) { + client := auth.GetEventsClient(ctx) sensor, err := client.ArgoprojV1alpha1().Sensors(in.Namespace).Get(ctx, in.Name, metav1.GetOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -37,8 +37,8 @@ func (s *sensorServer) GetSensor(ctx context.Context, in *sensorpkg.GetSensorReq return sensor, nil } -func (s *sensorServer) CreateSensor(ctx context.Context, in *sensorpkg.CreateSensorRequest) (*sv1.Sensor, error) { - client := auth.GetSensorClient(ctx) +func (s *sensorServer) CreateSensor(ctx context.Context, in *sensorpkg.CreateSensorRequest) (*eventsv1a1.Sensor, error) { + client := auth.GetEventsClient(ctx) sensor, err := client.ArgoprojV1alpha1().Sensors(in.Namespace).Create(ctx, in.Sensor, metav1.CreateOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -46,8 +46,8 @@ func (s *sensorServer) CreateSensor(ctx context.Context, in *sensorpkg.CreateSen return sensor, nil } -func (s *sensorServer) UpdateSensor(ctx context.Context, in *sensorpkg.UpdateSensorRequest) (*sv1.Sensor, error) { - client := auth.GetSensorClient(ctx) +func (s *sensorServer) UpdateSensor(ctx context.Context, in *sensorpkg.UpdateSensorRequest) (*eventsv1a1.Sensor, error) { + client := auth.GetEventsClient(ctx) sensor, err := client.ArgoprojV1alpha1().Sensors(in.Namespace).Update(ctx, in.Sensor, metav1.UpdateOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) @@ -56,7 +56,7 @@ func (s *sensorServer) UpdateSensor(ctx context.Context, in *sensorpkg.UpdateSen } func (s *sensorServer) DeleteSensor(ctx context.Context, in *sensorpkg.DeleteSensorRequest) (*sensorpkg.DeleteSensorResponse, error) { - client := auth.GetSensorClient(ctx) + client := auth.GetEventsClient(ctx) if err := client.ArgoprojV1alpha1().Sensors(in.Namespace).Delete(ctx, in.Name, metav1.DeleteOptions{}); err != nil { return nil, sutils.ToStatusError(err, codes.Internal) } @@ -101,7 +101,7 @@ func (s *sensorServer) WatchSensors(in *sensorpkg.ListSensorsRequest, srv sensor if in.ListOptions != nil { listOptions = *in.ListOptions } - eventSourceInterface := auth.GetSensorClient(ctx).ArgoprojV1alpha1().Sensors(in.Namespace) + eventSourceInterface := auth.GetEventsClient(ctx).ArgoprojV1alpha1().Sensors(in.Namespace) watcher, err := eventSourceInterface.Watch(ctx, listOptions) if err != nil { return sutils.ToStatusError(err, codes.Internal) @@ -114,7 +114,7 @@ func (s *sensorServer) WatchSensors(in *sensorpkg.ListSensorsRequest, srv sensor if !open { return sutils.ToStatusError(io.EOF, codes.ResourceExhausted) } - es, ok := event.Object.(*sv1.Sensor) + es, ok := event.Object.(*eventsv1a1.Sensor) if !ok { return sutils.ToStatusError(apierr.FromObject(event.Object), codes.Internal) } diff --git a/server/sensor/sensor_server_test.go b/server/sensor/sensor_server_test.go index 2175c04bd014..8774d4b265cb 100644 --- a/server/sensor/sensor_server_test.go +++ b/server/sensor/sensor_server_test.go @@ -5,8 +5,8 @@ import ( "fmt" "testing" - sv1 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" - "github.com/argoproj/argo-events/pkg/client/sensor/clientset/versioned/typed/sensor/v1alpha1" + eventsv1a1 "github.com/argoproj/argo-events/pkg/apis/events/v1alpha1" + "github.com/argoproj/argo-events/pkg/client/clientset/versioned/typed/events/v1alpha1" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -24,9 +24,9 @@ func (m *MockSensorClient) ArgoprojV1alpha1Sensor() v1alpha1.SensorInterface { return nil } -func (m *MockSensorClient) List(ctx context.Context, opts metav1.ListOptions) (*sv1.SensorList, error) { - sensorList := &sv1.SensorList{ - Items: []sv1.Sensor{ +func (m *MockSensorClient) List(ctx context.Context, opts metav1.ListOptions) (*eventsv1a1.SensorList, error) { + sensorList := &eventsv1a1.SensorList{ + Items: []eventsv1a1.Sensor{ {ObjectMeta: metav1.ObjectMeta{Name: "sensor1"}}, {ObjectMeta: metav1.ObjectMeta{Name: "sensor2"}}, }, @@ -38,7 +38,7 @@ type mockSensorServer struct { sensorClient v1alpha1.SensorInterface } -func (s *mockSensorServer) ListSensors(ctx context.Context, req *sensorpkg.ListSensorsRequest) (*sv1.SensorList, error) { +func (s *mockSensorServer) ListSensors(ctx context.Context, req *sensorpkg.ListSensorsRequest) (*eventsv1a1.SensorList, error) { if s.sensorClient == nil { return nil, fmt.Errorf("sensor client is not set") } @@ -56,7 +56,7 @@ func TestListSensors(t *testing.T) { mockClient := &MockSensorClient{ctrl: ctrl} - ctx := context.WithValue(context.Background(), auth.SensorKey, mockClient) + ctx := context.WithValue(context.Background(), auth.EventsKey, mockClient) server := &mockSensorServer{ sensorClient: mockClient.ArgoprojV1alpha1Sensor(), @@ -80,7 +80,7 @@ func TestListSensors_SensorClientNotSet(t *testing.T) { mockClient := &MockSensorClient{ctrl: ctrl} - ctx := context.WithValue(context.Background(), auth.SensorKey, mockClient) + ctx := context.WithValue(context.Background(), auth.EventsKey, mockClient) server := &mockSensorServer{ sensorClient: mockClient.ArgoprojV1alpha1Sensor(), diff --git a/server/types/clients.go b/server/types/clients.go index d9033d82371d..6d560bcf1151 100644 --- a/server/types/clients.go +++ b/server/types/clients.go @@ -1,8 +1,7 @@ package types import ( - eventsource "github.com/argoproj/argo-events/pkg/client/eventsource/clientset/versioned" - sensor "github.com/argoproj/argo-events/pkg/client/sensor/clientset/versioned" + events "github.com/argoproj/argo-events/pkg/client/clientset/versioned" "k8s.io/client-go/dynamic" "k8s.io/client-go/kubernetes" @@ -10,9 +9,8 @@ import ( ) type Clients struct { - Dynamic dynamic.Interface - Workflow workflow.Interface - Sensor sensor.Interface - EventSource eventsource.Interface - Kubernetes kubernetes.Interface + Dynamic dynamic.Interface + Workflow workflow.Interface + Events events.Interface + Kubernetes kubernetes.Interface } diff --git a/server/workflow/workflow_server.go b/server/workflow/workflow_server.go index e575dc0e0ea1..59f2cd844522 100644 --- a/server/workflow/workflow_server.go +++ b/server/workflow/workflow_server.go @@ -751,7 +751,7 @@ func getLatestWorkflow(ctx context.Context, wfClient versioned.Interface, namesp } latest := wfList.Items[0] for _, wf := range wfList.Items { - if latest.ObjectMeta.CreationTimestamp.Before(&wf.ObjectMeta.CreationTimestamp) { + if latest.CreationTimestamp.Before(&wf.CreationTimestamp) { latest = wf } } diff --git a/server/workflowarchive/archived_workflow_server.go b/server/workflowarchive/archived_workflow_server.go index 980352dd5cfa..67f7149852b3 100644 --- a/server/workflowarchive/archived_workflow_server.go +++ b/server/workflowarchive/archived_workflow_server.go @@ -245,8 +245,8 @@ func (w *archivedWorkflowServer) RetryArchivedWorkflow(ctx context.Context, req return nil, sutils.ToStatusError(err, codes.Internal) } - wf.ObjectMeta.ResourceVersion = "" - wf.ObjectMeta.UID = "" + wf.ResourceVersion = "" + wf.UID = "" result, err := wfClient.ArgoprojV1alpha1().Workflows(req.Namespace).Create(ctx, wf, metav1.CreateOptions{}) if err != nil { return nil, sutils.ToStatusError(err, codes.Internal) diff --git a/server/workflowarchive/archived_workflow_server_test.go b/server/workflowarchive/archived_workflow_server_test.go index 3f66a5919c09..0de85ec901e5 100644 --- a/server/workflowarchive/archived_workflow_server_test.go +++ b/server/workflowarchive/archived_workflow_server_test.go @@ -21,7 +21,6 @@ import ( "github.com/argoproj/argo-workflows/v3/persist/sqldb/mocks" workflowarchivepkg "github.com/argoproj/argo-workflows/v3/pkg/apiclient/workflowarchive" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" argofake "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned/fake" "github.com/argoproj/argo-workflows/v3/server/auth" sutils "github.com/argoproj/argo-workflows/v3/server/utils" @@ -54,31 +53,31 @@ func Test_archivedWorkflowServer(t *testing.T) { }, nil }) // two pages of results for limit 1 - repo.On("ListWorkflows", sutils.ListOptions{Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}, {}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Limit: 2, Offset: 1}).Return(wfv1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}, {}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Limit: 2, Offset: 1}).Return(v1alpha1.Workflows{{}}, nil) minStartAt, _ := time.Parse(time.RFC3339, "2020-01-01T00:00:00Z") maxStartAt, _ := time.Parse(time.RFC3339, "2020-01-02T00:00:00Z") createdTime := metav1.Time{Time: time.Now().UTC()} finishedTime := metav1.Time{Time: createdTime.Add(time.Second * 2)} - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "", NamePrefix: "", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0, ShowRemainingItemCount: true}).Return(wfv1.Workflows{{}}, nil) - repo.On("ListWorkflows", sutils.ListOptions{Namespace: "user-ns", Name: "", NamePrefix: "", MinStartedAt: time.Time{}, MaxStartedAt: time.Time{}, Limit: 2, Offset: 0}).Return(wfv1.Workflows{{}, {}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "", NamePrefix: "", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0, ShowRemainingItemCount: true}).Return(v1alpha1.Workflows{{}}, nil) + repo.On("ListWorkflows", sutils.ListOptions{Namespace: "user-ns", Name: "", NamePrefix: "", MinStartedAt: time.Time{}, MaxStartedAt: time.Time{}, Limit: 2, Offset: 0}).Return(v1alpha1.Workflows{{}, {}}, nil) repo.On("CountWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0}).Return(int64(5), nil) repo.On("CountWorkflows", sutils.ListOptions{Namespace: "", Name: "my-name", NamePrefix: "my-", MinStartedAt: minStartAt, MaxStartedAt: maxStartAt, Limit: 2, Offset: 0, ShowRemainingItemCount: true}).Return(int64(5), nil) repo.On("GetWorkflow", "", "", "").Return(nil, nil) - repo.On("GetWorkflow", "my-uid", "", "").Return(&wfv1.Workflow{ + repo.On("GetWorkflow", "my-uid", "", "").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "my-name"}, - Spec: wfv1.WorkflowSpec{ + Spec: v1alpha1.WorkflowSpec{ Entrypoint: "my-entrypoint", - Templates: []wfv1.Template{ + Templates: []v1alpha1.Template{ {Name: "my-entrypoint", Container: &apiv1.Container{}}, }, }, }, nil) - repo.On("GetWorkflow", "failed-uid", "", "").Return(&wfv1.Workflow{ + repo.On("GetWorkflow", "failed-uid", "", "").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{ Name: "failed-wf", Labels: map[string]string{ @@ -86,44 +85,44 @@ func Test_archivedWorkflowServer(t *testing.T) { common.LabelKeyWorkflowArchivingStatus: "Pending", }, }, - Status: wfv1.WorkflowStatus{ - Phase: wfv1.WorkflowFailed, + Status: v1alpha1.WorkflowStatus{ + Phase: v1alpha1.WorkflowFailed, StartedAt: createdTime, FinishedAt: finishedTime, - Nodes: map[string]wfv1.NodeStatus{ - "failed-node": {Name: "failed-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeFailed, Message: "failed"}, - "succeeded-node": {Name: "succeeded-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeSucceeded, Message: "succeeded"}}, + Nodes: map[string]v1alpha1.NodeStatus{ + "failed-node": {Name: "failed-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: v1alpha1.NodeFailed, Message: "failed"}, + "succeeded-node": {Name: "succeeded-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: v1alpha1.NodeSucceeded, Message: "succeeded"}}, }, }, nil) - repo.On("GetWorkflow", "resubmit-uid", "", "").Return(&wfv1.Workflow{ + repo.On("GetWorkflow", "resubmit-uid", "", "").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "resubmit-wf"}, - Spec: wfv1.WorkflowSpec{ + Spec: v1alpha1.WorkflowSpec{ Entrypoint: "my-entrypoint", - Templates: []wfv1.Template{ + Templates: []v1alpha1.Template{ {Name: "my-entrypoint", Container: &apiv1.Container{Image: "docker/whalesay:latest"}}, }, }, }, nil) wfClient.AddReactor("create", "workflows", func(action k8stesting.Action) (handled bool, ret runtime.Object, err error) { - return true, &wfv1.Workflow{ + return true, &v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "my-name-resubmitted"}, }, nil }) repo.On("DeleteWorkflow", "my-uid").Return(nil) - repo.On("ListWorkflowsLabelKeys").Return(&wfv1.LabelKeys{ + repo.On("ListWorkflowsLabelKeys").Return(&v1alpha1.LabelKeys{ Items: []string{"foo", "bar"}, }, nil) - repo.On("ListWorkflowsLabelValues", "my-key").Return(&wfv1.LabelValues{ + repo.On("ListWorkflowsLabelValues", "my-key").Return(&v1alpha1.LabelValues{ Items: []string{"my-key=foo", "my-key=bar"}, }, nil) - repo.On("RetryWorkflow", "failed-uid").Return(&wfv1.Workflow{ + repo.On("RetryWorkflow", "failed-uid").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "failed-wf"}, }, nil) - repo.On("ResubmitWorkflow", "my-uid").Return(&wfv1.Workflow{ + repo.On("ResubmitWorkflow", "my-uid").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "my-name"}, - Spec: wfv1.WorkflowSpec{ + Spec: v1alpha1.WorkflowSpec{ Entrypoint: "my-entrypoint", - Templates: []wfv1.Template{ + Templates: []v1alpha1.Template{ {Name: "my-entrypoint", Container: &apiv1.Container{}}, }, }, @@ -162,7 +161,7 @@ func Test_archivedWorkflowServer(t *testing.T) { resp, err = w.ListArchivedWorkflows(ctx, &workflowarchivepkg.ListArchivedWorkflowsRequest{ListOptions: &metav1.ListOptions{FieldSelector: "metadata.name=my-name,spec.startedAt>2020-01-01T00:00:00Z,spec.startedAt<2020-01-02T00:00:00Z,ext.showRemainingItemCount=true", Limit: 1}, NamePrefix: "my-"}) require.NoError(t, err) assert.Len(t, resp.Items, 1) - assert.Equal(t, int64(4), *resp.ListMeta.RemainingItemCount) + assert.Equal(t, int64(4), *resp.RemainingItemCount) assert.Empty(t, resp.Continue) /////// Currently, for the purpose of backward compatibility, namespace is supported both as its own query parameter and as part of the field selector /////// need to test both @@ -199,11 +198,11 @@ func Test_archivedWorkflowServer(t *testing.T) { require.NoError(t, err) assert.NotNil(t, wf) - repo.On("GetWorkflow", "", "my-ns", "my-name").Return(&wfv1.Workflow{ + repo.On("GetWorkflow", "", "my-ns", "my-name").Return(&v1alpha1.Workflow{ ObjectMeta: metav1.ObjectMeta{Name: "my-name", Namespace: "my-ns"}, - Spec: wfv1.WorkflowSpec{ + Spec: v1alpha1.WorkflowSpec{ Entrypoint: "my-entrypoint", - Templates: []wfv1.Template{ + Templates: []v1alpha1.Template{ {Name: "my-entrypoint", Container: &apiv1.Container{}}, }, }, diff --git a/server/workflowtemplate/workflow_template_server.go b/server/workflowtemplate/workflow_template_server.go index 90fe7040f2f6..3100dacc6cd4 100644 --- a/server/workflowtemplate/workflow_template_server.go +++ b/server/workflowtemplate/workflow_template_server.go @@ -108,7 +108,7 @@ func cursorPaginationByResourceVersion(items []v1alpha1.WorkflowTemplate, resour // For the next pagination, the resourceVersion of the last item is set in the Continue field. if limit != 0 && len(wfList.Items) == int(limit) { lastIndex := len(wfList.Items) - 1 - wfList.ListMeta.Continue = wfList.Items[lastIndex].ResourceVersion + wfList.Continue = wfList.Items[lastIndex].ResourceVersion } } @@ -143,7 +143,7 @@ func (wts *WorkflowTemplateServer) ListWorkflowTemplates(ctx context.Context, re var items []v1alpha1.WorkflowTemplate if req.NamePattern != "" { for _, item := range wfList.Items { - if strings.Contains(item.ObjectMeta.Name, req.NamePattern) { + if strings.Contains(item.Name, req.NamePattern) { items = append(items, item) } } diff --git a/test/e2e/cli_test.go b/test/e2e/cli_test.go index bfe327950138..0ed9cbf30521 100644 --- a/test/e2e/cli_test.go +++ b/test/e2e/cli_test.go @@ -1405,14 +1405,14 @@ func (s *CLISuite) TestCronCommands() { s.Run("Create Name Override", func() { s.Given().RunCli([]string{"cron", "create", "cron/basic.yaml", "--name", "basic-cron-wf-overridden-name", "-l", "workflows.argoproj.io/test=true"}, func(t *testing.T, output string, err error) { require.NoError(t, err) - assert.Contains(t, strings.Replace(output, " ", "", -1), "Name:basic-cron-wf-overridden-name") + assert.Contains(t, strings.ReplaceAll(output, " ", ""), "Name:basic-cron-wf-overridden-name") }) }) s.Run("Create GenerateName Override", func() { s.Given().RunCli([]string{"cron", "create", "cron/basic.yaml", "--generate-name", "basic-cron-wf-overridden-generate-name-", "-l", "workflows.argoproj.io/test=true"}, func(t *testing.T, output string, err error) { require.NoError(t, err) - assert.Contains(t, strings.Replace(output, " ", "", -1), "Name:basic-cron-wf-overridden-generate-name-") + assert.Contains(t, strings.ReplaceAll(output, " ", ""), "Name:basic-cron-wf-overridden-generate-name-") }) }) diff --git a/test/e2e/cron_test.go b/test/e2e/cron_test.go index 3e51d6ef618b..d194d724e2c2 100644 --- a/test/e2e/cron_test.go +++ b/test/e2e/cron_test.go @@ -29,7 +29,7 @@ type CronSuite struct { func (s *CronSuite) SetupSuite() { s.E2ESuite.SetupSuite() // Since tests run in parallel, delete all cron resources before the test suite is run - s.E2ESuite.DeleteResources() + s.DeleteResources() } func (s *CronSuite) BeforeTest(suiteName, testName string) { @@ -37,7 +37,7 @@ func (s *CronSuite) BeforeTest(suiteName, testName string) { } func (s *CronSuite) TearDownSuite() { - s.E2ESuite.DeleteResources() + s.DeleteResources() s.E2ESuite.TearDownSuite() } @@ -71,7 +71,7 @@ spec: Then(). ExpectCron(func(t *testing.T, cronWf *wfv1.CronWorkflow) { assert.Equal(t, cronWf.Spec.GetScheduleWithTimezoneString(), cronWf.GetLatestSchedule()) - assert.True(t, cronWf.Status.LastScheduledTime.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, cronWf.Status.LastScheduledTime.After(time.Now().Add(-1*time.Minute))) }) }) s.Run("TestBasicTimezone", func() { @@ -109,7 +109,7 @@ spec: Then(). ExpectCron(func(t *testing.T, cronWf *wfv1.CronWorkflow) { assert.Equal(t, cronWf.Spec.GetScheduleWithTimezoneString(), cronWf.GetLatestSchedule()) - assert.True(t, cronWf.Status.LastScheduledTime.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, cronWf.Status.LastScheduledTime.After(time.Now().Add(-1*time.Minute))) }) }) s.Run("TestSuspend", func() { @@ -272,7 +272,7 @@ spec: ExpectCron(func(t *testing.T, cronWf *wfv1.CronWorkflow) { assert.Len(t, cronWf.Status.Active, 1) require.NotNil(t, cronWf.Status.LastScheduledTime) - assert.True(t, cronWf.Status.LastScheduledTime.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, cronWf.Status.LastScheduledTime.After(time.Now().Add(-1*time.Minute))) }) }) s.Run("TestSuccessfulJobHistoryLimit", func() { @@ -306,7 +306,7 @@ spec: Then(). ExpectWorkflowList(listOptions, func(t *testing.T, wfList *wfv1.WorkflowList) { assert.Len(t, wfList.Items, 1) - assert.True(t, wfList.Items[0].Status.FinishedAt.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, wfList.Items[0].Status.FinishedAt.After(time.Now().Add(-1*time.Minute))) }) }) s.Run("TestFailedJobHistoryLimit", func() { @@ -341,7 +341,7 @@ spec: Then(). ExpectWorkflowList(listOptions, func(t *testing.T, wfList *wfv1.WorkflowList) { assert.Len(t, wfList.Items, 1) - assert.True(t, wfList.Items[0].Status.FinishedAt.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, wfList.Items[0].Status.FinishedAt.After(time.Now().Add(-1*time.Minute))) }) }) s.Run("TestStoppingConditionWithSucceeded", func() { @@ -447,7 +447,7 @@ spec: Then(). ExpectCron(func(t *testing.T, cronWf *wfv1.CronWorkflow) { assert.Equal(t, cronWf.Spec.GetScheduleWithTimezoneString(), cronWf.GetLatestSchedule()) - assert.True(t, cronWf.Status.LastScheduledTime.Time.After(time.Now().Add(-1*time.Minute))) + assert.True(t, cronWf.Status.LastScheduledTime.After(time.Now().Add(-1*time.Minute))) }) }) } diff --git a/test/e2e/executor_plugins_test.go b/test/e2e/executor_plugins_test.go index 2ac7c052b2c2..246ae4ce3f13 100644 --- a/test/e2e/executor_plugins_test.go +++ b/test/e2e/executor_plugins_test.go @@ -9,7 +9,6 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" apiv1 "k8s.io/api/core/v1" - v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/utils/ptr" @@ -42,7 +41,7 @@ func (s *ExecutorPluginsSuite) TestTemplateExecutor() { assert.Equal(t, &apiv1.PodSecurityContext{ RunAsUser: ptr.To(int64(8737)), RunAsNonRoot: ptr.To(true), - SeccompProfile: &v1.SeccompProfile{Type: "RuntimeDefault"}, + SeccompProfile: &apiv1.SeccompProfile{Type: "RuntimeDefault"}, }, spec.SecurityContext) require.Len(t, spec.Volumes, 4) assert.Contains(t, spec.Volumes[0].Name, "kube-api-access-") @@ -72,7 +71,7 @@ func (s *ExecutorPluginsSuite) TestTemplateExecutor() { ReadOnlyRootFilesystem: ptr.To(true), Privileged: ptr.To(false), Capabilities: &apiv1.Capabilities{Drop: []apiv1.Capability{"ALL"}}, - SeccompProfile: &v1.SeccompProfile{Type: "RuntimeDefault"}, + SeccompProfile: &apiv1.SeccompProfile{Type: "RuntimeDefault"}, }, agent.SecurityContext) } }). diff --git a/test/e2e/expectedfailures/serviceaccount-insufficient-permissions.yaml b/test/e2e/expectedfailures/serviceaccount-insufficient-permissions.yaml new file mode 100644 index 000000000000..1b9df0227d0d --- /dev/null +++ b/test/e2e/expectedfailures/serviceaccount-insufficient-permissions.yaml @@ -0,0 +1,13 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: serviceaccount-insufficient-permissions +spec: + # This ServiceAccount is only intended for use in webhooks, and doesn't have + # the permissions needed by the executor. + serviceAccountName: github.com + entrypoint: main + templates: + - name: main + container: + image: argoproj/argosay:v2 \ No newline at end of file diff --git a/test/e2e/fixtures/then.go b/test/e2e/fixtures/then.go index 4470072c040c..8513a3b5c72f 100644 --- a/test/e2e/fixtures/then.go +++ b/test/e2e/fixtures/then.go @@ -3,6 +3,7 @@ package fixtures import ( "context" "fmt" + "io" "reflect" "testing" "time" @@ -200,7 +201,7 @@ func (t *Then) ExpectPVCDeleted() *Then { return t default: num := len(t.wf.Status.PersistentVolumeClaims) - pvcClient := t.kubeClient.CoreV1().PersistentVolumeClaims(t.wf.ObjectMeta.Namespace) + pvcClient := t.kubeClient.CoreV1().PersistentVolumeClaims(t.wf.Namespace) for _, p := range t.wf.Status.PersistentVolumeClaims { _, err := pvcClient.Get(ctx, p.PersistentVolumeClaim.ClaimName, metav1.GetOptions{}) if err == nil { @@ -264,6 +265,25 @@ func (t *Then) ExpectPods(f func(t *testing.T, pods []apiv1.Pod)) *Then { return t } +func (t *Then) ExpectContainerLogs(container string, f func(t *testing.T, logs string)) *Then { + t.t.Helper() + + stream, err := t.kubeClient.CoreV1().Pods(t.wf.Namespace).GetLogs(t.wf.Name, &apiv1.PodLogOptions{Container: container}).Stream(context.Background()) + if err != nil { + t.t.Fatal(err) + } + + defer stream.Close() + logBytes, err := io.ReadAll(stream) + if err != nil { + t.t.Fatal(err) + } + + f(t.t, string(logBytes)) + + return t +} + func (t *Then) ExpectWorkflowTaskSet(block func(t *testing.T, wfts *wfv1.WorkflowTaskSet)) *Then { t.t.Helper() ctx := context.Background() diff --git a/test/e2e/functional_test.go b/test/e2e/functional_test.go index 44c51a3227c1..0987c0495b0f 100644 --- a/test/e2e/functional_test.go +++ b/test/e2e/functional_test.go @@ -12,7 +12,6 @@ import ( "github.com/stretchr/testify/suite" apiv1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow" @@ -897,9 +896,10 @@ spec: ExpectWorkflowNode(wfv1.SucceededPodNode, func(t *testing.T, n *wfv1.NodeStatus, p *apiv1.Pod) { assert.Equal(t, int64(5), *p.Spec.TerminationGracePeriodSeconds) for _, c := range p.Spec.Containers { - if c.Name == "main" { + switch c.Name { + case "main": assert.Equal(t, "100m", c.Resources.Limits.Cpu().String()) - } else if c.Name == "wait" { + case "wait": assert.Equal(t, "101m", c.Resources.Limits.Cpu().String()) } } @@ -1374,7 +1374,7 @@ func (s *FunctionalSuite) TestTerminateWorkflowWhileOnExitHandlerRunning() { ShutdownWorkflow(wfv1.ShutdownStrategyTerminate). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *wfv1.WorkflowStatus) { + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { for _, node := range status.Nodes { if node.Type == wfv1.NodeTypeStepGroup || node.Type == wfv1.NodeTypeSteps { assert.Equal(t, wfv1.NodeFailed, node.Phase) @@ -1393,7 +1393,7 @@ func (s *FunctionalSuite) TestWorkflowExitHandlerCrashEnsureNodeIsPresent() { WaitForWorkflow(fixtures.ToBeRunning). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *wfv1.WorkflowStatus) { + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { var hasExitNode bool var exitNodeName string @@ -1424,7 +1424,7 @@ func (s *FunctionalSuite) TestWorkflowParallelismStepFailFast() { WaitForWorkflow(fixtures.ToBeRunning). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *wfv1.WorkflowStatus) { + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { assert.Equal(t, "template has failed or errored children and failFast enabled", status.Message) assert.Equal(t, wfv1.NodeFailed, status.Nodes.FindByDisplayName("[0]").Phase) assert.Equal(t, wfv1.NodeFailed, status.Nodes.FindByDisplayName("step1").Phase) @@ -1440,9 +1440,26 @@ func (s *FunctionalSuite) TestWorkflowParallelismDAGFailFast() { WaitForWorkflow(fixtures.ToBeRunning). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *wfv1.WorkflowStatus) { + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { assert.Equal(t, "template has failed or errored children and failFast enabled", status.Message) assert.Equal(t, wfv1.NodeFailed, status.Nodes.FindByDisplayName("task1").Phase) assert.Equal(t, wfv1.NodeSucceeded, status.Nodes.FindByDisplayName("task2").Phase) }) } + +func (s *FunctionalSuite) TestWorkflowInvalidServiceAccountError() { + s.Given(). + Workflow("@expectedfailures/serviceaccount-insufficient-permissions.yaml"). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeErrored). + Then(). + ExpectContainerLogs("main", func(t *testing.T, logs string) { + assert.Contains(t, logs, "hello argo") + }). + ExpectContainerLogs("wait", func(t *testing.T, logs string) { + assert.Contains(t, logs, "Error: workflowtaskresults.argoproj.io is forbidden: User \"system:serviceaccount:argo:github.com\" cannot create resource") + // Shouldn't have print help text + assert.NotContains(t, logs, "Usage:") + }) +} diff --git a/test/e2e/retry_test.go b/test/e2e/retry_test.go index f55caf262b89..db2324b0ea3d 100644 --- a/test/e2e/retry_test.go +++ b/test/e2e/retry_test.go @@ -3,8 +3,6 @@ package e2e import ( - "context" - "io" "strings" "testing" "time" @@ -15,7 +13,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" ) @@ -47,8 +44,8 @@ spec: SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowPhase("Failed"), status.Phase) assert.Equal(t, "No more retries left", status.Message) assert.Equal(t, v1alpha1.Progress("0/1"), status.Progress) }). @@ -90,8 +87,8 @@ spec: SubmitWorkflow(). WaitForWorkflow(time.Second * 90). Then(). - ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowPhase("Failed"), status.Phase) assert.LessOrEqual(t, len(status.Nodes), 10) }) s.Given(). @@ -116,15 +113,13 @@ spec: SubmitWorkflow(). WaitForWorkflow(time.Second * 90). Then(). - ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowPhase("Failed"), status.Phase) + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowPhase("Failed"), status.Phase) assert.LessOrEqual(t, len(status.Nodes), 10) }) } func (s *RetryTestSuite) TestWorkflowTemplateWithRetryStrategyInContainerSet() { - var name string - var ns string s.Given(). WorkflowTemplate("@testdata/workflow-template-with-containerset.yaml"). Workflow(` @@ -139,58 +134,28 @@ spec: SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeFailed). Then(). - ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowFailed, status.Phase) + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowFailed, status.Phase) }). - ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { - return status.Name == "workflow-template-containerset" - }, func(t *testing.T, status *v1alpha1.NodeStatus, pod *apiv1.Pod) { - name = pod.GetName() - ns = pod.GetNamespace() + // Success, no need retry + ExpectContainerLogs("c1", func(t *testing.T, logs string) { + count := strings.Count(logs, "capturing logs") + assert.Equal(t, 1, count) + assert.Contains(t, logs, "hi") + }). + // Command err. No retry logic is entered. + ExpectContainerLogs("c2", func(t *testing.T, logs string) { + count := strings.Count(logs, "capturing logs") + assert.Equal(t, 0, count) + assert.Contains(t, logs, "executable file not found in $PATH") + }). + // Retry when err. + ExpectContainerLogs("c3", func(t *testing.T, logs string) { + count := strings.Count(logs, "capturing logs") + assert.Equal(t, 2, count) + countFailureInfo := strings.Count(logs, "intentional failure") + assert.Equal(t, 2, countFailureInfo) }) - // Success, no need retry - s.Run("ContainerLogs", func() { - ctx := context.Background() - podLogOptions := &apiv1.PodLogOptions{Container: "c1"} - stream, err := s.KubeClient.CoreV1().Pods(ns).GetLogs(name, podLogOptions).Stream(ctx) - s.Require().NoError(err) - defer stream.Close() - logBytes, err := io.ReadAll(stream) - s.Require().NoError(err) - output := string(logBytes) - count := strings.Count(output, "capturing logs") - s.Equal(1, count) - s.Contains(output, "hi") - }) - // Command err. No retry logic is entered. - s.Run("ContainerLogs", func() { - ctx := context.Background() - podLogOptions := &apiv1.PodLogOptions{Container: "c2"} - stream, err := s.KubeClient.CoreV1().Pods(ns).GetLogs(name, podLogOptions).Stream(ctx) - s.Require().NoError(err) - defer stream.Close() - logBytes, err := io.ReadAll(stream) - s.Require().NoError(err) - output := string(logBytes) - count := strings.Count(output, "capturing logs") - s.Equal(0, count) - s.Contains(output, "executable file not found in $PATH") - }) - // Retry when err. - s.Run("ContainerLogs", func() { - ctx := context.Background() - podLogOptions := &apiv1.PodLogOptions{Container: "c3"} - stream, err := s.KubeClient.CoreV1().Pods(ns).GetLogs(name, podLogOptions).Stream(ctx) - s.Require().NoError(err) - defer stream.Close() - logBytes, err := io.ReadAll(stream) - s.Require().NoError(err) - output := string(logBytes) - count := strings.Count(output, "capturing logs") - s.Equal(2, count) - countFailureInfo := strings.Count(output, "intentional failure") - s.Equal(2, countFailureInfo) - }) } func (s *RetryTestSuite) TestRetryNodeAntiAffinity() { @@ -217,13 +182,13 @@ spec: WaitForWorkflow(fixtures.ToHaveFailedPod). Wait(5 * time.Second). Then(). - ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - if status.Phase == wfv1.WorkflowFailed { + ExpectWorkflow(func(t *testing.T, _ *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + if status.Phase == v1alpha1.WorkflowFailed { nodeStatus := status.Nodes.FindByDisplayName("test-nodeantiaffinity-strategy(0)") nodeStatusRetry := status.Nodes.FindByDisplayName("test-nodeantiaffinity-strategy(1)") assert.NotEqual(t, nodeStatus.HostNodeName, nodeStatusRetry.HostNodeName) } - if status.Phase == wfv1.WorkflowRunning { + if status.Phase == v1alpha1.WorkflowRunning { nodeStatus := status.Nodes.FindByDisplayName("test-nodeantiaffinity-strategy(0)") nodeStatusRetry := status.Nodes.FindByDisplayName("test-nodeantiaffinity-strategy(1)") assert.Contains(t, nodeStatusRetry.Message, "didn't match Pod's node affinity/selector") diff --git a/test/e2e/workflow_test.go b/test/e2e/workflow_test.go index 51c194614d5b..b45cfed84d0c 100644 --- a/test/e2e/workflow_test.go +++ b/test/e2e/workflow_test.go @@ -13,7 +13,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" "github.com/argoproj/argo-workflows/v3/workflow/common" ) @@ -66,11 +65,11 @@ spec: SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeFailed, time.Minute*11). Then(). - ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowFailed, status.Phase) + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowFailed, status.Phase) for _, node := range status.Nodes { - if node.Type == wfv1.NodeTypePod { - assert.Equal(t, wfv1.NodeFailed, node.Phase) + if node.Type == v1alpha1.NodeTypePod { + assert.Equal(t, v1alpha1.NodeFailed, node.Phase) assert.Contains(t, node.Message, "Pod was active on the node longer than the specified deadline") } } @@ -147,8 +146,8 @@ spec: SubmitWorkflow(). WaitForWorkflow(fixtures.ToBeCompleted, time.Minute*1). Then(). - ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { - assert.Equal(t, wfv1.WorkflowSucceeded, status.Phase) + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, v1alpha1.WorkflowSucceeded, status.Phase) }). ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { return strings.Contains(status.Name, "a") diff --git a/ui/src/cron-workflows/cron-workflow-editor.tsx b/ui/src/cron-workflows/cron-workflow-editor.tsx index fcad11038366..8326191768d9 100644 --- a/ui/src/cron-workflows/cron-workflow-editor.tsx +++ b/ui/src/cron-workflows/cron-workflow-editor.tsx @@ -7,7 +7,7 @@ import {WorkflowParametersEditor} from '../shared/components/editors/workflow-pa import {ObjectEditor} from '../shared/components/object-editor'; import type {Lang} from '../shared/components/object-parser'; import {CronWorkflow} from '../shared/models'; -import {CronWorkflowSpecEditor} from './cron-workflow-spec-editior'; +import {CronWorkflowSpecEditor} from './cron-workflow-spec-editor'; import {CronWorkflowStatusViewer} from './cron-workflow-status-viewer'; export function CronWorkflowEditor({ diff --git a/ui/src/cron-workflows/cron-workflow-list.tsx b/ui/src/cron-workflows/cron-workflow-list.tsx index 4ff8ed3ff647..cf7e357c48ad 100644 --- a/ui/src/cron-workflows/cron-workflow-list.tsx +++ b/ui/src/cron-workflows/cron-workflow-list.tsx @@ -169,28 +169,20 @@ export function CronWorkflowList({match, location, history}: RouteComponentProps
    {w.metadata.namespace}
    {w.spec.timezone}
    - {w.spec.schedule - ? w.spec.schedule - : w.spec.schedules.map(schedule => ( - <> - {schedule} -
    - - ))} + {w.spec.schedules.map(schedule => ( + <> + {schedule} +
    + + ))}
    - {w.spec.schedule ? ( - - ) : ( + {w.spec.schedules.map(schedule => ( <> - {w.spec.schedules.map(schedule => ( - <> - -
    - - ))} + +
    - )} + ))}
    @@ -223,10 +215,6 @@ export function CronWorkflowList({match, location, history}: RouteComponentProps } function getSpecNextScheduledTime(spec: CronWorkflowSpec): Date { - if (spec.schedule) { - return getNextScheduledTime(spec.schedule, spec.timezone); - } - let out: Date; spec.schedules.forEach(schedule => { const next = getNextScheduledTime(schedule, spec.timezone); diff --git a/ui/src/cron-workflows/cron-workflow-spec-editior.tsx b/ui/src/cron-workflows/cron-workflow-spec-editor.tsx similarity index 84% rename from ui/src/cron-workflows/cron-workflow-spec-editior.tsx rename to ui/src/cron-workflows/cron-workflow-spec-editor.tsx index 3168285e5d1c..53d1fa6ad6b1 100644 --- a/ui/src/cron-workflows/cron-workflow-spec-editior.tsx +++ b/ui/src/cron-workflows/cron-workflow-spec-editor.tsx @@ -14,22 +14,12 @@ export function CronWorkflowSpecEditor({onChange, spec}: {spec: CronWorkflowSpec
    Schedules
    - {(spec.schedule ?? '') != '' ? ( + {spec.schedules.map((schedule, index) => ( <> - onChange({...spec, schedule})} /> - + onChange({...spec, schedules: updateScheduleAtIndex(spec.schedules, index, newSchedule)})} /> + - ) : ( - spec.schedules.map((schedule, index) => ( - <> - onChange({...spec, schedules: updateScheduleAtIndex(spec.schedules, index, newSchedule)})} - /> - - - )) - )} + ))}
    diff --git a/ui/src/cron-workflows/cron-workflow-status-viewer.tsx b/ui/src/cron-workflows/cron-workflow-status-viewer.tsx index d3cc9a36194a..c53fd092f896 100644 --- a/ui/src/cron-workflows/cron-workflow-status-viewer.tsx +++ b/ui/src/cron-workflows/cron-workflow-status-viewer.tsx @@ -19,22 +19,12 @@ export function CronWorkflowStatusViewer({spec, status}: {spec: CronWorkflowSpec {title: 'Active', value: status.active ? getCronWorkflowActiveWorkflowList(status.active) : No Workflows Active}, { title: 'Schedules', - value: ( + value: spec.schedules.map(schedule => ( <> - {(spec.schedule ?? '') != '' ? ( - <> - {spec.schedule} - - ) : ( - spec.schedules.map(schedule => ( - <> - {schedule} -
    - - )) - )} + {schedule} +
    - ) + )) }, {title: 'Last Scheduled Time', value: }, {title: 'Conditions', value: } diff --git a/ui/src/shared/components/graph/dfs-sorter.ts b/ui/src/shared/components/graph/dfs-sorter.ts index aa3d4c518163..8009815f2b12 100644 --- a/ui/src/shared/components/graph/dfs-sorter.ts +++ b/ui/src/shared/components/graph/dfs-sorter.ts @@ -3,7 +3,8 @@ import {Graph, Node} from './types'; export class DfsSorter { private graph: Graph; private sorted: Node[] = []; - private discovered: Set = new Set(); + private discovered: Set = new Set(); // Fully visited + private visiting: Set = new Set(); // Currently in DFS call stack constructor(g: Graph) { this.graph = g; @@ -19,7 +20,14 @@ export class DfsSorter { if (this.discovered.has(n)) { return; } + if (this.visiting.has(n)) { + // Cycle detected, prevent infinite recursion + return; + } + + this.visiting.add(n); this.graph.outgoingEdges(n).forEach(outgoing => this.visit(outgoing)); + this.visiting.delete(n); this.discovered.add(n); this.sorted.push(n); } diff --git a/ui/src/shared/examples.ts b/ui/src/shared/examples.ts index bc16d140e634..17919801c57e 100644 --- a/ui/src/shared/examples.ts +++ b/ui/src/shared/examples.ts @@ -84,7 +84,7 @@ export const exampleCronWorkflow = (namespace: string): CronWorkflow => ({ }, spec: { workflowMetadata: {labels}, - schedule: '* * * * *', + schedules: ['* * * * *'], workflowSpec: { entrypoint, arguments: argumentz, diff --git a/ui/src/shared/models/cron-workflows.ts b/ui/src/shared/models/cron-workflows.ts index 8e940fb88fc8..a39b3fba524d 100644 --- a/ui/src/shared/models/cron-workflows.ts +++ b/ui/src/shared/models/cron-workflows.ts @@ -15,7 +15,6 @@ export type ConcurrencyPolicy = 'Allow' | 'Forbid' | 'Replace'; export interface CronWorkflowSpec { workflowSpec: WorkflowSpec; workflowMetadata?: kubernetes.ObjectMeta; - schedule: string; schedules?: string[]; concurrencyPolicy?: ConcurrencyPolicy; suspend?: boolean; diff --git a/ui/src/shared/services/cron-workflow-service.test.ts b/ui/src/shared/services/cron-workflow-service.test.ts new file mode 100644 index 000000000000..2be919d520b5 --- /dev/null +++ b/ui/src/shared/services/cron-workflow-service.test.ts @@ -0,0 +1,113 @@ +import {exampleCronWorkflow} from '../examples'; +import {CronWorkflowService} from './cron-workflow-service'; +import requests from './requests'; + +jest.mock('./requests'); + +describe('cron workflow service', () => { + describe('create', () => { + test('with valid CronWorkflow', async () => { + const cronWf = exampleCronWorkflow('ns'); + const request = {send: jest.fn().mockResolvedValue({body: cronWf})}; + jest.spyOn(requests, 'post').mockReturnValue(request as any); + + const result = await CronWorkflowService.create(cronWf, cronWf.metadata.namespace); + + expect(result).toStrictEqual(cronWf); + expect(requests.post).toHaveBeenCalledWith('api/v1/cron-workflows/ns'); + }); + }); + + describe('list', () => { + test('with no results', async () => { + jest.spyOn(requests, 'get').mockResolvedValue({body: {}} as any); + + const result = await CronWorkflowService.list('ns'); + + expect(result).toStrictEqual([]); + expect(requests.get).toHaveBeenCalledWith('api/v1/cron-workflows/ns?'); + }); + + test('with multiple results', async () => { + const items = [exampleCronWorkflow('ns'), exampleCronWorkflow('ns')]; + jest.spyOn(requests, 'get').mockResolvedValue({body: {items}} as any); + + const result = await CronWorkflowService.list('ns', ['foo', 'bar']); + + expect(result).toStrictEqual(items); + expect(requests.get).toHaveBeenCalledWith('api/v1/cron-workflows/ns?listOptions.labelSelector=foo,bar'); + }); + }); + + describe('get', () => { + test('with valid CronWorkflow', async () => { + const cronWf = exampleCronWorkflow('ns'); + jest.spyOn(requests, 'get').mockResolvedValue({body: cronWf} as any); + + const result = await CronWorkflowService.get(cronWf.metadata.name, 'ns'); + + expect(result).toStrictEqual(cronWf); + expect(requests.get).toHaveBeenCalledWith(`api/v1/cron-workflows/ns/${cronWf.metadata.name}`); + }); + + test('with old CronWorkflow using "schedule"', async () => { + const cronWf = exampleCronWorkflow('otherns') as any; + cronWf.spec.schedule = '* * * * *'; + delete cronWf.spec.schedules; + jest.spyOn(requests, 'get').mockResolvedValue({body: cronWf} as any); + + const result = await CronWorkflowService.get(cronWf.metadata.name, 'otherns'); + + expect(result.spec.schedules).toEqual(['* * * * *']); + expect(requests.get).toHaveBeenCalledWith(`api/v1/cron-workflows/otherns/${cronWf.metadata.name}`); + }); + + test('with invalid CronWorkflow missing "schedules"', async () => { + const cronWf = exampleCronWorkflow('otherns'); + delete cronWf.spec.schedules; + jest.spyOn(requests, 'get').mockResolvedValue({body: cronWf} as any); + + const result = await CronWorkflowService.get(cronWf.metadata.name, 'otherns'); + + expect(result.spec.schedules).toEqual([]); + expect(requests.get).toHaveBeenCalledWith(`api/v1/cron-workflows/otherns/${cronWf.metadata.name}`); + }); + }); + + describe('update', () => { + test('with valid CronWorkflow', async () => { + const cronWf = exampleCronWorkflow('ns'); + const request = {send: jest.fn().mockResolvedValue({body: cronWf})}; + jest.spyOn(requests, 'put').mockReturnValue(request as any); + + const result = await CronWorkflowService.update(cronWf, cronWf.metadata.name, cronWf.metadata.namespace); + + expect(result).toStrictEqual(cronWf); + expect(requests.put).toHaveBeenCalledWith(`api/v1/cron-workflows/ns/${cronWf.metadata.name}`); + }); + }); + + describe('suspend', () => { + test('with valid CronWorkflow', async () => { + const cronWf = exampleCronWorkflow('ns'); + jest.spyOn(requests, 'put').mockResolvedValue({body: cronWf} as any); + + const result = await CronWorkflowService.suspend(cronWf.metadata.name, 'ns'); + + expect(result).toStrictEqual(cronWf); + expect(requests.put).toHaveBeenCalledWith(`api/v1/cron-workflows/ns/${cronWf.metadata.name}/suspend`); + }); + }); + + describe('resume', () => { + test('with valid CronWorkflow', async () => { + const cronWf = exampleCronWorkflow('ns'); + jest.spyOn(requests, 'put').mockResolvedValue({body: cronWf} as any); + + const result = await CronWorkflowService.resume(cronWf.metadata.name, 'ns'); + + expect(result).toStrictEqual(cronWf); + expect(requests.put).toHaveBeenCalledWith(`api/v1/cron-workflows/ns/${cronWf.metadata.name}/resume`); + }); + }); +}); diff --git a/ui/src/shared/services/cron-workflow-service.ts b/ui/src/shared/services/cron-workflow-service.ts index dc82880c7995..ef9c80c67271 100644 --- a/ui/src/shared/services/cron-workflow-service.ts +++ b/ui/src/shared/services/cron-workflow-service.ts @@ -2,30 +2,44 @@ import {CronWorkflow, CronWorkflowList} from '../models'; import requests from './requests'; import {queryParams} from './utils'; +// Handle CronWorkflows using the deprecated "schedule" field by automatically +// migrating them to use "schedules". +// Also, gracefully handle invalid CronWorkflows that are missing both +// "schedule" and "schedules". +function normalizeSchedules(cronWorkflow: any): CronWorkflow { + cronWorkflow.spec.schedules ??= []; + // TODO: Delete this once we drop support for "schedule" + if ((cronWorkflow.spec.schedule ?? '') != '') { + cronWorkflow.spec.schedules.push(cronWorkflow.spec.schedule); + delete cronWorkflow.spec.schedule; + } + return cronWorkflow as CronWorkflow; +} + export const CronWorkflowService = { create(cronWorkflow: CronWorkflow, namespace: string) { return requests .post(`api/v1/cron-workflows/${namespace}`) .send({cronWorkflow}) - .then(res => res.body as CronWorkflow); + .then(res => normalizeSchedules(res.body)); }, list(namespace: string, labels: string[] = []) { return requests .get(`api/v1/cron-workflows/${namespace}?${queryParams({labels}).join('&')}`) .then(res => res.body as CronWorkflowList) - .then(list => list.items || []); + .then(list => (list.items || []).map(normalizeSchedules)); }, get(name: string, namespace: string) { - return requests.get(`api/v1/cron-workflows/${namespace}/${name}`).then(res => res.body as CronWorkflow); + return requests.get(`api/v1/cron-workflows/${namespace}/${name}`).then(res => normalizeSchedules(res.body)); }, update(cronWorkflow: CronWorkflow, name: string, namespace: string) { return requests .put(`api/v1/cron-workflows/${namespace}/${name}`) .send({cronWorkflow}) - .then(res => res.body as CronWorkflow); + .then(res => normalizeSchedules(res.body)); }, delete(name: string, namespace: string) { @@ -33,10 +47,10 @@ export const CronWorkflowService = { }, suspend(name: string, namespace: string) { - return requests.put(`api/v1/cron-workflows/${namespace}/${name}/suspend`).then(res => res.body as CronWorkflow); + return requests.put(`api/v1/cron-workflows/${namespace}/${name}/suspend`).then(res => normalizeSchedules(res.body)); }, resume(name: string, namespace: string) { - return requests.put(`api/v1/cron-workflows/${namespace}/${name}/resume`).then(res => res.body as CronWorkflow); + return requests.put(`api/v1/cron-workflows/${namespace}/${name}/resume`).then(res => normalizeSchedules(res.body)); } }; diff --git a/util/json/fix.go b/util/json/fix.go index 6a7ee41ef451..7f95aa1d79c4 100644 --- a/util/json/fix.go +++ b/util/json/fix.go @@ -4,8 +4,8 @@ import "strings" func Fix(s string) string { // https://stackoverflow.com/questions/28595664/how-to-stop-json-marshal-from-escaping-and/28596225 - s = strings.Replace(s, "\\u003c", "<", -1) - s = strings.Replace(s, "\\u003e", ">", -1) - s = strings.Replace(s, "\\u0026", "&", -1) + s = strings.ReplaceAll(s, "\\u003c", "<") + s = strings.ReplaceAll(s, "\\u003e", ">") + s = strings.ReplaceAll(s, "\\u0026", "&") return s } diff --git a/util/printer/workflow-printer.go b/util/printer/workflow-printer.go index 69b92c92cf5c..45bbd3a7bafb 100644 --- a/util/printer/workflow-printer.go +++ b/util/printer/workflow-printer.go @@ -31,7 +31,7 @@ func PrintWorkflows(workflows wfv1.Workflows, out io.Writer, opts PrintOpts) err printCostOptimizationNudges(workflows, out) case "name": for _, wf := range workflows { - _, _ = fmt.Fprintln(out, wf.ObjectMeta.Name) + _, _ = fmt.Fprintln(out, wf.Name) } case "json": output, err := json.MarshalIndent(workflows, "", " ") @@ -74,17 +74,17 @@ func printTable(wfList []wfv1.Workflow, out io.Writer, opts PrintOpts) { _, _ = fmt.Fprint(w, "\n") } for _, wf := range wfList { - ageStr := humanize.RelativeDurationShort(wf.ObjectMeta.CreationTimestamp.Time, time.Now()) + ageStr := humanize.RelativeDurationShort(wf.CreationTimestamp.Time, time.Now()) durationStr := humanize.RelativeDurationShort(wf.Status.StartedAt.Time, wf.Status.FinishedAt.Time) messageStr := wf.Status.Message if opts.Namespace { - _, _ = fmt.Fprintf(w, "%s\t", wf.ObjectMeta.Namespace) + _, _ = fmt.Fprintf(w, "%s\t", wf.Namespace) } var priority int if wf.Spec.Priority != nil { priority = int(*wf.Spec.Priority) } - _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%d\t%s", wf.ObjectMeta.Name, WorkflowStatus(&wf), ageStr, durationStr, priority, messageStr) + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%d\t%s", wf.Name, WorkflowStatus(&wf), ageStr, durationStr, priority, messageStr) if opts.Output == "wide" { pending, running, completed := countPendingRunningCompletedNodes(&wf) _, _ = fmt.Fprintf(w, "\t%d/%d/%d", pending, running, completed) @@ -186,7 +186,7 @@ func WorkflowStatus(wf *wfv1.Workflow) string { return "Failed (Terminated)" } case wfv1.WorkflowUnknown, wfv1.WorkflowPending: - if !wf.ObjectMeta.CreationTimestamp.IsZero() { + if !wf.CreationTimestamp.IsZero() { return "Pending" } return "Unknown" diff --git a/util/resource/summary.go b/util/resource/summary.go index aa176cf1dd4c..4bf3da9be448 100644 --- a/util/resource/summary.go +++ b/util/resource/summary.go @@ -15,7 +15,7 @@ type Summary struct { func (s Summary) age() time.Duration { if s.ContainerState.Terminated != nil { - return s.ContainerState.Terminated.FinishedAt.Time.Sub(s.ContainerState.Terminated.StartedAt.Time) + return s.ContainerState.Terminated.FinishedAt.Sub(s.ContainerState.Terminated.StartedAt.Time) } else { return 0 } diff --git a/util/telemetry/exporter_prometheus.go b/util/telemetry/exporter_prometheus.go index 1ed45a90c91e..43d0b8437730 100644 --- a/util/telemetry/exporter_prometheus.go +++ b/util/telemetry/exporter_prometheus.go @@ -57,7 +57,7 @@ func (m *Metrics) RunPrometheusServer(ctx context.Context, isDummy bool) { if !m.config.Enabled { return } - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) name := "" mux := http.NewServeMux() diff --git a/util/template/expression_template.go b/util/template/expression_template.go index 8ea6bf248d61..59ce6dd0555b 100644 --- a/util/template/expression_template.go +++ b/util/template/expression_template.go @@ -27,7 +27,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ err := json.Unmarshal([]byte(fmt.Sprintf(`"%s"`, expression)), &unmarshalledExpression) if err != nil && allowUnresolved { log.WithError(err).Debug("unresolved is allowed ") - return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) + return fmt.Fprintf(w, "{{%s%s}}", kindExpression, expression) } if err != nil { return 0, fmt.Errorf("failed to unmarshall JSON expression: %w", err) @@ -37,7 +37,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ // this is to make sure expressions like `sprig.int(retries)` don't get resolved to 0 when `retries` don't exist in the env // See https://github.com/argoproj/argo-workflows/issues/5388 log.WithError(err).Debug("Retries are present and unresolved is allowed") - return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) + return fmt.Fprintf(w, "{{%s%s}}", kindExpression, expression) } // This is to make sure expressions which contains `workflow.status` and `work.failures` don't get resolved to nil @@ -48,7 +48,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ if ((hasWorkflowStatus(unmarshalledExpression) && !hasVarInEnv(env, "workflow.status")) || (hasWorkflowFailures(unmarshalledExpression) && !hasVarInEnv(env, "workflow.failures"))) && allowUnresolved { - return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) + return fmt.Fprintf(w, "{{%s%s}}", kindExpression, expression) } program, err := expr.Compile(unmarshalledExpression, expr.Env(env)) @@ -62,7 +62,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ if (err != nil || result == nil) && allowUnresolved { // result is also un-resolved, and any error can be unresolved log.WithError(err).Debug("Result and error are unresolved") - return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) + return fmt.Fprintf(w, "{{%s%s}}", kindExpression, expression) } if err != nil { return 0, fmt.Errorf("failed to evaluate expression: %w", err) @@ -73,7 +73,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ resultMarshaled, err := json.Marshal(result) if (err != nil || resultMarshaled == nil) && allowUnresolved { log.WithError(err).Debug("resultMarshaled is nil and unresolved is allowed ") - return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) + return fmt.Fprintf(w, "{{%s%s}}", kindExpression, expression) } if err != nil { return 0, fmt.Errorf("failed to marshal evaluated expression: %w", err) diff --git a/util/template/simple_template.go b/util/template/simple_template.go index 0ef23bb99912..a596dcff8fc6 100644 --- a/util/template/simple_template.go +++ b/util/template/simple_template.go @@ -30,7 +30,7 @@ func simpleReplace(w io.Writer, tag string, replaceMap map[string]interface{}, a if allowUnresolved { // just write the same string back log.WithError(errors.InternalError("unresolved")).Debug("unresolved is allowed ") - return w.Write([]byte(fmt.Sprintf("{{%s}}", tag))) + return fmt.Fprintf(w, "{{%s}}", tag) } return 0, errors.Errorf(errors.CodeBadRequest, "failed to resolve {{%s}}", tag) } diff --git a/util/template/template.go b/util/template/template.go index b356e69d72e8..5210b2021578 100644 --- a/util/template/template.go +++ b/util/template/template.go @@ -32,7 +32,7 @@ type impl struct { func (t *impl) Replace(replaceMap map[string]interface{}, allowUnresolved bool) (string, error) { replacedTmpl := &bytes.Buffer{} - _, err := t.Template.ExecuteFunc(replacedTmpl, func(w io.Writer, tag string) (int, error) { + _, err := t.ExecuteFunc(replacedTmpl, func(w io.Writer, tag string) (int, error) { kind, expression := parseTag(tag) switch kind { case kindExpression: diff --git a/util/util_test.go b/util/util_test.go index 1d0c3b1d7e23..fc43a4b1763a 100644 --- a/util/util_test.go +++ b/util/util_test.go @@ -62,7 +62,7 @@ func TestRecoverWorkflowNameFromSelectorString(t *testing.T) { }) } name := RecoverWorkflowNameFromSelectorStringIfAny("whatever=whalesay") - assert.Equal(t, "", name) + assert.Empty(t, name) assert.NotPanics(t, func() { _ = RecoverWorkflowNameFromSelectorStringIfAny("whatever") }) diff --git a/workflow/artifactrepositories/mocks/Interface.go b/workflow/artifactrepositories/mocks/Interface.go index a48227e0f801..dce40dbc5463 100644 --- a/workflow/artifactrepositories/mocks/Interface.go +++ b/workflow/artifactrepositories/mocks/Interface.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/workflow/artifacts/azure/azure_test.go b/workflow/artifacts/azure/azure_test.go index a6fbe66f6584..3f5e6429ff3e 100644 --- a/workflow/artifacts/azure/azure_test.go +++ b/workflow/artifacts/azure/azure_test.go @@ -48,7 +48,7 @@ func TestDetermineAccountName(t *testing.T) { require.NoError(t, err) accountName, err := determineAccountName(u) require.Error(t, err) - assert.Equal(t, "", accountName) + assert.Empty(t, accountName) } } @@ -66,7 +66,7 @@ func TestArtifactDriver_WithServiceKey_DownloadDirectory_Subdir(t *testing.T) { require.NoError(t, err) _, err = containerClient.Create(context.Background(), nil) var responseError *azcore.ResponseError - if err != nil && !(errors.As(err, &responseError) && responseError.ErrorCode == "ContainerAlreadyExists") { + if err != nil && (!errors.As(err, &responseError) || responseError.ErrorCode != "ContainerAlreadyExists") { require.NoError(t, err) } diff --git a/workflow/artifacts/common/load_to_stream_test.go b/workflow/artifacts/common/load_to_stream_test.go index 48fd5bcfd220..61782f65bdff 100644 --- a/workflow/artifacts/common/load_to_stream_test.go +++ b/workflow/artifacts/common/load_to_stream_test.go @@ -116,7 +116,7 @@ func TestLoadToStream(t *testing.T) { if err != nil { panic(err) } - assert.Equal(t, len(filesBefore), len(filesAfter)) + assert.Len(t, filesAfter, len(filesBefore)) } else { require.Error(t, err) assert.Equal(t, tc.errMsg, err.Error()) diff --git a/workflow/artifacts/s3/s3_test.go b/workflow/artifacts/s3/s3_test.go index ebb1a8bdcb2f..a85932893a3a 100644 --- a/workflow/artifacts/s3/s3_test.go +++ b/workflow/artifacts/s3/s3_test.go @@ -137,7 +137,7 @@ func TestOpenStreamS3Artifact(t *testing.T) { "Success": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art.tar.gz", }, }, @@ -163,7 +163,7 @@ func TestOpenStreamS3Artifact(t *testing.T) { "No such key": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -180,7 +180,7 @@ func TestOpenStreamS3Artifact(t *testing.T) { "Is Directory": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -197,7 +197,7 @@ func TestOpenStreamS3Artifact(t *testing.T) { "Test Directory Failed": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -257,7 +257,7 @@ func TestLoadS3Artifact(t *testing.T) { "Success": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art.tar.gz", }, }, @@ -285,7 +285,7 @@ func TestLoadS3Artifact(t *testing.T) { "No such key": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -303,7 +303,7 @@ func TestLoadS3Artifact(t *testing.T) { "Is Directory": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -321,7 +321,7 @@ func TestLoadS3Artifact(t *testing.T) { "Get File Other Transient Error": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -339,7 +339,7 @@ func TestLoadS3Artifact(t *testing.T) { "Test Directory Failed": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -360,7 +360,7 @@ func TestLoadS3Artifact(t *testing.T) { "Get Directory Failed": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art-2.tar.gz", }, }, @@ -397,7 +397,7 @@ func TestLoadS3Artifact(t *testing.T) { if err != nil { assert.Equal(t, tc.errMsg, err.Error()) } else { - assert.Equal(t, "", tc.errMsg) + assert.Empty(t, tc.errMsg) } }) } @@ -434,7 +434,7 @@ func TestSaveS3Artifact(t *testing.T) { "Success as Directory": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{}, + "my-bucket": {}, }, map[string]error{}), bucket: "my-bucket", @@ -531,7 +531,7 @@ func TestSaveS3Artifact(t *testing.T) { if err != nil { assert.Equal(t, tc.errMsg, err.Error()) } else { - assert.Equal(t, "", tc.errMsg) + assert.Empty(t, tc.errMsg) } }) } @@ -550,7 +550,7 @@ func TestListObjects(t *testing.T) { "Found objects": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder/hello-art.tar.gz", }, }, @@ -563,7 +563,7 @@ func TestListObjects(t *testing.T) { "Empty directory": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder", }, }, @@ -576,7 +576,7 @@ func TestListObjects(t *testing.T) { "Non-existent directory": { s3client: newMockS3Client( map[string][]string{ - "my-bucket": []string{ + "my-bucket": { "/folder", }, }, diff --git a/workflow/common/common.go b/workflow/common/common.go index 42b241b2a5be..1838972cc49a 100644 --- a/workflow/common/common.go +++ b/workflow/common/common.go @@ -52,6 +52,9 @@ const ( // AnnotationKeyArtifactGCStrategy is listed as an annotation on the Artifact GC Pod to identify // the strategy whose artifacts are being deleted AnnotationKeyArtifactGCStrategy = workflow.WorkflowFullName + "/artifact-gc-strategy" + // AnnotationKeyPodGCStrategy is listed as an annotation on the Pod + // the strategy for the pod, in case the pod is orphaned from its workflow + AnnotationKeyPodGCStrategy = workflow.WorkflowFullName + "/pod-gc-strategy" // LabelKeyControllerInstanceID is the label the controller will carry forward to workflows/pod labels // for the purposes of workflow segregation diff --git a/workflow/common/convert.go b/workflow/common/convert.go index 0abc2ee37d12..60b22e708174 100644 --- a/workflow/common/convert.go +++ b/workflow/common/convert.go @@ -30,7 +30,7 @@ func ConvertCronWorkflowToWorkflow(cronWf *wfv1.CronWorkflow) *wfv1.Workflow { } func ConvertCronWorkflowToWorkflowWithProperties(cronWf *wfv1.CronWorkflow, name string, scheduledTime time.Time) *wfv1.Workflow { - cronWfLabels := cronWf.ObjectMeta.GetLabels() + cronWfLabels := cronWf.GetLabels() wfLabels := make(map[string]string) for _, k := range labelsToPropagate { v, ok := cronWfLabels[k] @@ -79,14 +79,14 @@ func toWorkflow(cronWf wfv1.CronWorkflow, objectMeta metav1.ObjectMeta) *wfv1.Wo wf := &wfv1.Workflow{ TypeMeta: metav1.TypeMeta{ Kind: workflow.WorkflowKind, - APIVersion: cronWf.TypeMeta.APIVersion, + APIVersion: cronWf.APIVersion, }, ObjectMeta: objectMeta, Spec: cronWf.Spec.WorkflowSpec, } - if instanceId, ok := cronWf.ObjectMeta.GetLabels()[LabelKeyControllerInstanceID]; ok { - wf.ObjectMeta.GetLabels()[LabelKeyControllerInstanceID] = instanceId + if instanceId, ok := cronWf.GetLabels()[LabelKeyControllerInstanceID]; ok { + wf.GetLabels()[LabelKeyControllerInstanceID] = instanceId } wf.Labels[LabelKeyCronWorkflow] = cronWf.Name diff --git a/workflow/controller/agent.go b/workflow/controller/agent.go index de484272e802..ba60cc69093d 100644 --- a/workflow/controller/agent.go +++ b/workflow/controller/agent.go @@ -10,7 +10,6 @@ import ( apierr "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/client-go/tools/cache" "k8s.io/utils/ptr" "github.com/argoproj/argo-workflows/v3/errors" @@ -68,7 +67,7 @@ func assessAgentPodStatus(pod *apiv1.Pod) (wfv1.NodePhase, string) { message = pod.Status.Message default: newPhase = wfv1.NodeError - message = fmt.Sprintf("Unexpected pod phase for %s: %s", pod.ObjectMeta.Name, pod.Status.Phase) + message = fmt.Sprintf("Unexpected pod phase for %s: %s", pod.Name, pod.Status.Phase) } return newPhase, message } @@ -113,16 +112,12 @@ func (woc *wfOperationCtx) createAgentPod(ctx context.Context) (*apiv1.Pod, erro podName := woc.getAgentPodName() log := woc.log.WithField("podName", podName) - obj, exists, err := woc.controller.podInformer.GetStore().Get(cache.ExplicitKey(woc.wf.Namespace + "/" + podName)) + pod, err := woc.controller.PodController.GetPod(woc.wf.Namespace, podName) if err != nil { return nil, fmt.Errorf("failed to get pod from informer store: %w", err) } - if exists { - existing, ok := obj.(*apiv1.Pod) - if ok { - log.WithField("podPhase", existing.Status.Phase).Debug("Skipped pod creation: already exists") - return existing, nil - } + if pod != nil { + return pod, nil } certVolume, certVolumeMount, err := woc.getCertVolumeMount(ctx, common.CACertificatesVolumeMountName) @@ -197,10 +192,10 @@ func (woc *wfOperationCtx) createAgentPod(ctx context.Context) (*apiv1.Pod, erro agentMainCtr.Name = common.MainContainerName agentMainCtr.Args = append([]string{"agent", "main"}, woc.getExecutorLogOpts()...) - pod := &apiv1.Pod{ + pod = &apiv1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: podName, - Namespace: woc.wf.ObjectMeta.Namespace, + Namespace: woc.wf.Namespace, Labels: map[string]string{ common.LabelKeyWorkflow: woc.wf.Name, // Allows filtering by pods related to specific workflow common.LabelKeyCompleted: "false", // Allows filtering by incomplete workflow pods @@ -244,7 +239,7 @@ func (woc *wfOperationCtx) createAgentPod(ctx context.Context) (*apiv1.Pod, erro } if woc.controller.Config.InstanceID != "" { - pod.ObjectMeta.Labels[common.LabelKeyControllerInstanceID] = woc.controller.Config.InstanceID + pod.Labels[common.LabelKeyControllerInstanceID] = woc.controller.Config.InstanceID } log.Debug("Creating Agent pod") diff --git a/workflow/controller/agent_test.go b/workflow/controller/agent_test.go index f816c772dbcc..2b1b0b882fec 100644 --- a/workflow/controller/agent_test.go +++ b/workflow/controller/agent_test.go @@ -136,7 +136,7 @@ status: for _, pod := range pods.Items { assert.NotNil(t, pod) assert.True(t, strings.HasSuffix(pod.Name, "-agent")) - assert.Equal(t, "testID", pod.ObjectMeta.Labels[common.LabelKeyControllerInstanceID]) + assert.Equal(t, "testID", pod.Labels[common.LabelKeyControllerInstanceID]) assert.Equal(t, "virtual-node", pod.Spec.NodeName) } }) @@ -149,7 +149,7 @@ func TestAssessAgentPodStatus(t *testing.T) { } nodeStatus, msg := assessAgentPodStatus(pod1) assert.Equal(t, wfv1.NodeFailed, nodeStatus) - assert.Equal(t, "", msg) + assert.Empty(t, msg) }) t.Run("Running", func(t *testing.T) { pod1 := &apiv1.Pod{ @@ -158,7 +158,7 @@ func TestAssessAgentPodStatus(t *testing.T) { nodeStatus, msg := assessAgentPodStatus(pod1) assert.Equal(t, wfv1.NodePhase(""), nodeStatus) - assert.Equal(t, "", msg) + assert.Empty(t, msg) }) t.Run("Success", func(t *testing.T) { pod1 := &apiv1.Pod{ @@ -166,7 +166,7 @@ func TestAssessAgentPodStatus(t *testing.T) { } nodeStatus, msg := assessAgentPodStatus(pod1) assert.Equal(t, wfv1.NodePhase(""), nodeStatus) - assert.Equal(t, "", msg) + assert.Empty(t, msg) }) } diff --git a/workflow/controller/artifact_gc.go b/workflow/controller/artifact_gc.go index c485458a8fec..6736018b2fbe 100644 --- a/workflow/controller/artifact_gc.go +++ b/workflow/controller/artifact_gc.go @@ -478,10 +478,10 @@ func (woc *wfOperationCtx) createArtifactGCPod(ctx context.Context, strategy wfv pod.Spec.ServiceAccountName = podInfo.serviceAccount } for label, labelVal := range podInfo.podMetadata.Labels { - pod.ObjectMeta.Labels[label] = labelVal + pod.Labels[label] = labelVal } for annotation, annotationVal := range podInfo.podMetadata.Annotations { - pod.ObjectMeta.Annotations[annotation] = annotationVal + pod.Annotations[annotation] = annotationVal } if v := woc.controller.Config.InstanceID; v != "" { @@ -503,7 +503,7 @@ func (woc *wfOperationCtx) createArtifactGCPod(ctx context.Context, strategy wfv // go through any GC pods that are already running and may have completed func (woc *wfOperationCtx) processArtifactGCCompletion(ctx context.Context) error { // check if any previous Artifact GC Pods completed - pods, err := woc.controller.podInformer.GetIndexer().ByIndex(indexes.WorkflowIndex, woc.wf.GetNamespace()+"/"+woc.wf.GetName()) + pods, err := woc.controller.PodController.GetPodsByIndex(indexes.WorkflowIndex, woc.wf.GetNamespace()+"/"+woc.wf.GetName()) if err != nil { return fmt.Errorf("failed to get pods from informer: %w", err) } diff --git a/workflow/controller/cache_test.go b/workflow/controller/cache_test.go index 4a4aaf224417..9dd3cb27a4f9 100644 --- a/workflow/controller/cache_test.go +++ b/workflow/controller/cache_test.go @@ -59,7 +59,7 @@ func TestConfigMapCacheLoadHit(t *testing.T) { entry, err := c.Load(ctx, "hi-there-world") require.NoError(t, err) - assert.True(t, entry.LastHitTimestamp.Time.After(entry.CreationTimestamp.Time)) + assert.True(t, entry.LastHitTimestamp.After(entry.CreationTimestamp.Time)) outputs := entry.Outputs require.NoError(t, err) @@ -82,7 +82,7 @@ func TestConfigMapCacheLoadMiss(t *testing.T) { } func TestConfigMapCacheSave(t *testing.T) { - var MockParamValue string = "Hello world" + var MockParamValue = "Hello world" MockParam := wfv1.Parameter{ Name: "hello", Value: wfv1.AnyStringPtr(MockParamValue), diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index 04ea4c30b934..097ed0828bf8 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -8,7 +8,6 @@ import ( "slices" "strconv" gosync "sync" - "syscall" "time" "github.com/upper/db/v4" @@ -23,7 +22,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/labels" - "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/selection" "k8s.io/apimachinery/pkg/types" runtimeutil "k8s.io/apimachinery/pkg/util/runtime" @@ -32,7 +30,6 @@ import ( "k8s.io/client-go/dynamic" v1 "k8s.io/client-go/informers/core/v1" "k8s.io/client-go/kubernetes" - typedv1 "k8s.io/client-go/kubernetes/typed/core/v1" "k8s.io/client-go/rest" "k8s.io/client-go/tools/cache" apiwatch "k8s.io/client-go/tools/watch" @@ -50,9 +47,7 @@ import ( authutil "github.com/argoproj/argo-workflows/v3/util/auth" wfctx "github.com/argoproj/argo-workflows/v3/util/context" "github.com/argoproj/argo-workflows/v3/util/deprecation" - "github.com/argoproj/argo-workflows/v3/util/diff" "github.com/argoproj/argo-workflows/v3/util/env" - errorsutil "github.com/argoproj/argo-workflows/v3/util/errors" "github.com/argoproj/argo-workflows/v3/util/telemetry" "github.com/argoproj/argo-workflows/v3/workflow/artifactrepositories" "github.com/argoproj/argo-workflows/v3/workflow/common" @@ -67,7 +62,6 @@ import ( "github.com/argoproj/argo-workflows/v3/workflow/gccontroller" "github.com/argoproj/argo-workflows/v3/workflow/hydrator" "github.com/argoproj/argo-workflows/v3/workflow/metrics" - "github.com/argoproj/argo-workflows/v3/workflow/signal" "github.com/argoproj/argo-workflows/v3/workflow/sync" "github.com/argoproj/argo-workflows/v3/workflow/util" plugin "github.com/argoproj/argo-workflows/v3/workflow/util/plugins" @@ -124,11 +118,11 @@ type WorkflowController struct { wfInformer cache.SharedIndexInformer wftmplInformer wfextvv1alpha1.WorkflowTemplateInformer cwftmplInformer wfextvv1alpha1.ClusterWorkflowTemplateInformer - podInformer cache.SharedIndexInformer + PodController *pod.Controller // Currently public for woc to access, but would rather an accessor configMapInformer cache.SharedIndexInformer - wfQueue workqueue.RateLimitingInterface - podCleanupQueue workqueue.RateLimitingInterface // pods to be deleted or labelled depend on GC strategy - wfArchiveQueue workqueue.RateLimitingInterface + wfQueue workqueue.TypedRateLimitingInterface[string] + podCleanupQueue workqueue.TypedRateLimitingInterface[string] // pods to be deleted or labelled depend on GC strategy + wfArchiveQueue workqueue.TypedRateLimitingInterface[string] throttler sync.Throttler workflowKeyLock syncpkg.KeyLock // used to lock workflows for exclusive modification or access session db.Session @@ -225,19 +219,17 @@ func NewWorkflowController(ctx context.Context, restConfig *rest.Config, kubecli WorkflowCondition: wfc.getWorkflowConditionMetrics, IsLeader: wfc.IsLeader, }) - deprecation.Initialize(wfc.metrics.Metrics.DeprecatedFeature) - if err != nil { return nil, err } + + deprecation.Initialize(wfc.metrics.DeprecatedFeature) wfc.entrypoint = entrypoint.New(kubeclientset, wfc.Config.Images) workqueue.SetProvider(wfc.metrics) // must execute SetProvider before we create the queues wfc.wfQueue = wfc.metrics.RateLimiterWithBusyWorkers(ctx, &fixedItemIntervalRateLimiter{}, "workflow_queue") wfc.throttler = wfc.newThrottler() - wfc.podCleanupQueue = wfc.metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultControllerRateLimiter(), "pod_cleanup_queue") - wfc.wfArchiveQueue = wfc.metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultControllerRateLimiter(), "workflow_archive_queue") - + wfc.wfArchiveQueue = wfc.metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "workflow_archive_queue") return &wfc, nil } @@ -248,7 +240,7 @@ func (wfc *WorkflowController) newThrottler() sync.Throttler { // runGCcontroller runs the workflow garbage collector controller func (wfc *WorkflowController) runGCcontroller(ctx context.Context, workflowTTLWorkers int) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) gcCtrl := gccontroller.NewController(ctx, wfc.wfclientset, wfc.wfInformer, wfc.metrics, wfc.Config.RetentionPolicy) err := gcCtrl.Run(ctx.Done(), workflowTTLWorkers) @@ -257,8 +249,14 @@ func (wfc *WorkflowController) runGCcontroller(ctx context.Context, workflowTTLW } } +func (wfc *WorkflowController) runPodController(ctx context.Context, podGCWorkers int) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) + + wfc.PodController.Run(ctx, podGCWorkers) +} + func (wfc *WorkflowController) runCronController(ctx context.Context, cronWorkflowWorkers int) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) cronController := cron.NewCronController(ctx, wfc.wfclientset, wfc.dynamicInterface, wfc.namespace, wfc.GetManagedNamespace(), wfc.Config.InstanceID, wfc.metrics, wfc.eventRecorderManager, cronWorkflowWorkers, wfc.wftmplInformer, wfc.cwftmplInformer) cronController.Run(ctx) @@ -277,7 +275,7 @@ var indexers = cache.Indexers{ // Run starts a Workflow resource controller func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWorkers, podCleanupWorkers, cronWorkflowWorkers, wfArchiveWorkers int) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) // init DB after leader election (if enabled) if err := wfc.initDB(); err != nil { @@ -288,7 +286,6 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo defer cancel() defer wfc.wfQueue.ShutDown() - defer wfc.podCleanupQueue.ShutDown() log.WithField("version", argo.GetVersion().Version). WithField("defaultRequeueTime", GetRequeueTime()). @@ -310,7 +307,8 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo if err != nil { log.Fatal(err) } - wfc.podInformer = wfc.newPodInformer(ctx) + wfc.PodController = pod.NewController(ctx, &wfc.Config, wfc.restConfig, wfc.GetManagedNamespace(), wfc.kubeclientset, wfc.wfInformer, wfc.metrics, wfc.enqueueWfFromPodLabel) + wfc.updateEstimatorFactory() wfc.configMapInformer = wfc.newConfigMapInformer() @@ -323,24 +321,24 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo } if os.Getenv("WATCH_CONTROLLER_SEMAPHORE_CONFIGMAPS") != "false" { - go wfc.runConfigMapWatcher(ctx.Done()) + go wfc.runConfigMapWatcher(ctx) } go wfc.wfInformer.Run(ctx.Done()) go wfc.wftmplInformer.Informer().Run(ctx.Done()) - go wfc.podInformer.Run(ctx.Done()) go wfc.configMapInformer.Run(ctx.Done()) go wfc.wfTaskSetInformer.Informer().Run(ctx.Done()) go wfc.artGCTaskInformer.Informer().Run(ctx.Done()) go wfc.taskResultInformer.Run(ctx.Done()) wfc.createClusterWorkflowTemplateInformer(ctx) + go wfc.runPodController(ctx, podCleanupWorkers) // Wait for all involved caches to be synced, before processing items from the queue is started if !cache.WaitForCacheSync( ctx.Done(), wfc.wfInformer.HasSynced, wfc.wftmplInformer.Informer().HasSynced, - wfc.podInformer.HasSynced, + wfc.PodController.HasSynced(), wfc.configMapInformer.HasSynced, wfc.wfTaskSetInformer.Informer().HasSynced, wfc.artGCTaskInformer.Informer().HasSynced, @@ -349,22 +347,19 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo log.Fatal("Timed out waiting for caches to sync") } - for i := 0; i < podCleanupWorkers; i++ { - go wait.UntilWithContext(ctx, wfc.runPodCleanup, time.Second) - } - go wfc.workflowGarbageCollector(ctx.Done()) - go wfc.archivedWorkflowGarbageCollector(ctx.Done()) + go wfc.workflowGarbageCollector(ctx) + go wfc.archivedWorkflowGarbageCollector(ctx) go wfc.runGCcontroller(ctx, workflowTTLWorkers) go wfc.runCronController(ctx, cronWorkflowWorkers) - go wait.Until(wfc.syncManager.CheckWorkflowExistence, workflowExistenceCheckPeriod, ctx.Done()) + go wait.UntilWithContext(ctx, wfc.syncManager.CheckWorkflowExistence, workflowExistenceCheckPeriod) for i := 0; i < wfWorkers; i++ { - go wait.Until(wfc.runWorker, time.Second, ctx.Done()) + go wait.UntilWithContext(ctx, wfc.runWorker, time.Second) } for i := 0; i < wfArchiveWorkers; i++ { - go wait.Until(wfc.runArchiveWorker, time.Second, ctx.Done()) + go wait.UntilWithContext(ctx, wfc.runArchiveWorker, time.Second) } if cacheGCPeriod != 0 { go wait.JitterUntilWithContext(ctx, wfc.syncAllCacheForGC, cacheGCPeriod, 0.0, true) @@ -433,10 +428,9 @@ func (wfc *WorkflowController) initManagers(ctx context.Context) error { return nil } -func (wfc *WorkflowController) runConfigMapWatcher(stopCh <-chan struct{}) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (wfc *WorkflowController) runConfigMapWatcher(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) - ctx := context.Background() retryWatcher, err := apiwatch.NewRetryWatcher("1", &cache.ListWatch{ WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { return wfc.kubeclientset.CoreV1().ConfigMaps(wfc.managedNamespace).Watch(ctx, metav1.ListOptions{}) @@ -461,7 +455,7 @@ func (wfc *WorkflowController) runConfigMapWatcher(stopCh <-chan struct{}) { wfc.UpdateConfig(ctx) } wfc.notifySemaphoreConfigUpdate(cm) - case <-stopCh: + case <-ctx.Done(): return } } @@ -522,179 +516,15 @@ func (wfc *WorkflowController) UpdateConfig(ctx context.Context) { } } -func (wfc *WorkflowController) queuePodForCleanup(namespace string, podName string, action podCleanupAction) { - wfc.podCleanupQueue.AddRateLimited(newPodCleanupKey(namespace, podName, action)) -} - -func (wfc *WorkflowController) queuePodForCleanupAfter(namespace string, podName string, action podCleanupAction, duration time.Duration) { - wfc.podCleanupQueue.AddAfter(newPodCleanupKey(namespace, podName, action), duration) -} - -func (wfc *WorkflowController) runPodCleanup(ctx context.Context) { - for wfc.processNextPodCleanupItem(ctx) { - } -} - -func (wfc *WorkflowController) getPodCleanupPatch(pod *apiv1.Pod, labelPodCompleted bool) ([]byte, error) { - un := unstructured.Unstructured{} - if labelPodCompleted { - un.SetLabels(map[string]string{common.LabelKeyCompleted: "true"}) - } - - finalizerEnabled := os.Getenv(common.EnvVarPodStatusCaptureFinalizer) == "true" - if finalizerEnabled && pod.Finalizers != nil { - finalizers := slices.Clone(pod.Finalizers) - finalizers = slices.DeleteFunc(finalizers, - func(s string) bool { return s == common.FinalizerPodStatus }) - if len(finalizers) != len(pod.Finalizers) { - un.SetFinalizers(finalizers) - un.SetResourceVersion(pod.ObjectMeta.ResourceVersion) - } - } - - // if there was nothing to patch (no-op) - if len(un.Object) == 0 { - return nil, nil - } - - return un.MarshalJSON() -} - -func (wfc *WorkflowController) patchPodForCleanup(ctx context.Context, pods typedv1.PodInterface, namespace, podName string, labelPodCompleted bool) error { - pod, err := wfc.getPod(namespace, podName) - // err is always nil in all kind of caches for now - if err != nil { - return err - } - // if pod is nil, it must have been deleted - if pod == nil { - return nil - } - - patch, err := wfc.getPodCleanupPatch(pod, labelPodCompleted) - if err != nil { - return err - } - if patch == nil { - return nil - } - - _, err = pods.Patch(ctx, podName, types.MergePatchType, patch, metav1.PatchOptions{}) - if err != nil && !apierr.IsNotFound(err) { - return err - } - - return nil -} - -// all pods will ultimately be cleaned up by either deleting them, or labelling them -func (wfc *WorkflowController) processNextPodCleanupItem(ctx context.Context) bool { - key, quit := wfc.podCleanupQueue.Get() - if quit { - return false - } - - defer func() { - wfc.podCleanupQueue.Forget(key) - wfc.podCleanupQueue.Done(key) - }() - - namespace, podName, action := parsePodCleanupKey(key.(podCleanupKey)) - logCtx := log.WithFields(log.Fields{"key": key, "action": action}) - logCtx.Info("cleaning up pod") - err := func() error { - switch action { - case terminateContainers: - pod, err := wfc.getPod(namespace, podName) - if err == nil && pod != nil && pod.Status.Phase == apiv1.PodPending { - wfc.queuePodForCleanup(namespace, podName, deletePod) - } else if terminationGracePeriod, err := wfc.signalContainers(ctx, namespace, podName, syscall.SIGTERM); err != nil { - return err - } else if terminationGracePeriod > 0 { - wfc.queuePodForCleanupAfter(namespace, podName, killContainers, terminationGracePeriod) - } - case killContainers: - if _, err := wfc.signalContainers(ctx, namespace, podName, syscall.SIGKILL); err != nil { - return err - } - case labelPodCompleted: - pods := wfc.kubeclientset.CoreV1().Pods(namespace) - if err := wfc.patchPodForCleanup(ctx, pods, namespace, podName, true); err != nil { - return err - } - case deletePod: - pods := wfc.kubeclientset.CoreV1().Pods(namespace) - if err := wfc.patchPodForCleanup(ctx, pods, namespace, podName, false); err != nil { - return err - } - propagation := metav1.DeletePropagationBackground - err := pods.Delete(ctx, podName, metav1.DeleteOptions{ - PropagationPolicy: &propagation, - GracePeriodSeconds: wfc.Config.PodGCGracePeriodSeconds, - }) - if err != nil && !apierr.IsNotFound(err) { - return err - } - case removeFinalizer: - pods := wfc.kubeclientset.CoreV1().Pods(namespace) - if err := wfc.patchPodForCleanup(ctx, pods, namespace, podName, false); err != nil { - return err - } - } - return nil - }() - if err != nil { - logCtx.WithError(err).Warn("failed to clean-up pod") - if errorsutil.IsTransientErr(err) || apierr.IsConflict(err) { - wfc.podCleanupQueue.AddRateLimited(key) - } - } - return true -} - -func (wfc *WorkflowController) getPod(namespace string, podName string) (*apiv1.Pod, error) { - obj, exists, err := wfc.podInformer.GetStore().GetByKey(namespace + "/" + podName) - if err != nil { - return nil, err - } - if !exists { - return nil, nil - } - pod, ok := obj.(*apiv1.Pod) - if !ok { - return nil, fmt.Errorf("object is not a pod") - } - return pod, nil -} - -func (wfc *WorkflowController) signalContainers(ctx context.Context, namespace string, podName string, sig syscall.Signal) (time.Duration, error) { - pod, err := wfc.getPod(namespace, podName) - if pod == nil || err != nil { - return 0, err - } - - for _, c := range pod.Status.ContainerStatuses { - if c.State.Running == nil { - continue - } - // problems are already logged at info level, so we just ignore errors here - _ = signal.SignalContainer(ctx, wfc.restConfig, pod, c.Name, sig) - } - if pod.Spec.TerminationGracePeriodSeconds == nil { - return 30 * time.Second, nil - } - return time.Duration(*pod.Spec.TerminationGracePeriodSeconds) * time.Second, nil -} - -func (wfc *WorkflowController) workflowGarbageCollector(stopCh <-chan struct{}) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (wfc *WorkflowController) workflowGarbageCollector(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) periodicity := env.LookupEnvDurationOr("WORKFLOW_GC_PERIOD", 5*time.Minute) log.WithField("periodicity", periodicity).Info("Performing periodic GC") ticker := time.NewTicker(periodicity) for { select { - case <-stopCh: + case <-ctx.Done(): ticker.Stop() return case <-ticker.C: @@ -771,8 +601,8 @@ func (wfc *WorkflowController) deleteOffloadedNodesForWorkflow(uid string, versi return nil } -func (wfc *WorkflowController) archivedWorkflowGarbageCollector(stopCh <-chan struct{}) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (wfc *WorkflowController) archivedWorkflowGarbageCollector(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) periodicity := env.LookupEnvDurationOr("ARCHIVED_WORKFLOW_GC_PERIOD", 24*time.Hour) if wfc.Config.Persistence == nil { @@ -793,7 +623,7 @@ func (wfc *WorkflowController) archivedWorkflowGarbageCollector(stopCh <-chan st defer ticker.Stop() for { select { - case <-stopCh: + case <-ctx.Done(): return case <-ticker.C: log.Info("Performing archived workflow GC") @@ -805,18 +635,16 @@ func (wfc *WorkflowController) archivedWorkflowGarbageCollector(stopCh <-chan st } } -func (wfc *WorkflowController) runWorker() { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (wfc *WorkflowController) runWorker(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) - ctx := context.Background() for wfc.processNextItem(ctx) { } } -func (wfc *WorkflowController) runArchiveWorker() { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (wfc *WorkflowController) runArchiveWorker(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) - ctx := context.Background() for wfc.processNextArchiveItem(ctx) { } } @@ -829,10 +657,10 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { } defer wfc.wfQueue.Done(key) - wfc.workflowKeyLock.Lock(key.(string)) - defer wfc.workflowKeyLock.Unlock(key.(string)) + wfc.workflowKeyLock.Lock(key) + defer wfc.workflowKeyLock.Unlock(key) - obj, ok := wfc.getWorkflowByKey(key.(string)) + obj, ok := wfc.getWorkflowByKey(key) if !ok { return true } @@ -859,7 +687,7 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { return true } - if wf.Status.Phase != "" && wfc.checkRecentlyCompleted(wf.ObjectMeta.Name) { + if wf.Status.Phase != "" && wfc.checkRecentlyCompleted(wf.Name) { log.WithFields(log.Fields{"name": wf.ObjectMeta.Name}).Warn("Cache: Rejecting recently deleted") return true } @@ -869,7 +697,7 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { woc := newWorkflowOperationCtx(wf, wfc) - if !(woc.GetShutdownStrategy().Enabled() && woc.GetShutdownStrategy() == wfv1.ShutdownStrategyTerminate) && !wfc.throttler.Admit(key.(string)) { + if (!woc.GetShutdownStrategy().Enabled() || woc.GetShutdownStrategy() != wfv1.ShutdownStrategyTerminate) && !wfc.throttler.Admit(key) { log.WithField("key", key).Info("Workflow processing has been postponed due to max parallelism limit") if woc.wf.Status.Phase == wfv1.WorkflowUnknown { woc.markWorkflowPhase(ctx, wfv1.WorkflowPending, "Workflow processing has been postponed because too many workflows are already running") @@ -882,7 +710,7 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { defer func() { // must be done with woc if !reconciliationNeeded(woc.wf) { - wfc.throttler.Remove(key.(string)) + wfc.throttler.Remove(key) } }() @@ -912,7 +740,7 @@ func (wfc *WorkflowController) processNextArchiveItem(ctx context.Context) bool } defer wfc.wfArchiveQueue.Done(key) - obj, exists, err := wfc.wfInformer.GetIndexer().GetByKey(key.(string)) + obj, exists, err := wfc.wfInformer.GetIndexer().GetByKey(key) if err != nil { log.WithFields(log.Fields{"key": key, "error": err}).Error("Failed to get workflow from informer") return true @@ -945,11 +773,7 @@ func reconciliationNeeded(wf metav1.Object) bool { // enqueueWfFromPodLabel will extract the workflow name from pod label and // enqueue workflow for processing -func (wfc *WorkflowController) enqueueWfFromPodLabel(obj interface{}) error { - pod, ok := obj.(*apiv1.Pod) - if !ok { - return fmt.Errorf("Key in index is not a pod") - } +func (wfc *WorkflowController) enqueueWfFromPodLabel(pod *apiv1.Pod) error { if pod.Labels == nil { return fmt.Errorf("Pod did not have labels") } @@ -958,7 +782,7 @@ func (wfc *WorkflowController) enqueueWfFromPodLabel(obj interface{}) error { // Ignore pods unrelated to workflow (this shouldn't happen unless the watch is setup incorrectly) return fmt.Errorf("Watch returned pod unrelated to any workflow") } - wfc.wfQueue.AddRateLimited(pod.ObjectMeta.Namespace + "/" + workflowName) + wfc.wfQueue.AddRateLimited(pod.Namespace + "/" + workflowName) return nil } @@ -1108,7 +932,7 @@ func (wfc *WorkflowController) addWorkflowInformerHandlers(ctx context.Context) } for _, p := range podList.Items { if slices.Contains(p.Finalizers, common.FinalizerPodStatus) { - wfc.queuePodForCleanup(p.Namespace, p.Name, removeFinalizer) + wfc.PodController.RemoveFinalizer(p.Namespace, p.Name) } } @@ -1229,84 +1053,10 @@ func (wfc *WorkflowController) archiveWorkflowAux(ctx context.Context, obj inter return nil } -var ( - incompleteReq, _ = labels.NewRequirement(common.LabelKeyCompleted, selection.Equals, []string{"false"}) - workflowReq, _ = labels.NewRequirement(common.LabelKeyWorkflow, selection.Exists, nil) -) - func (wfc *WorkflowController) instanceIDReq() labels.Requirement { return util.InstanceIDRequirement(wfc.Config.InstanceID) } -func (wfc *WorkflowController) newWorkflowPodWatch(ctx context.Context) *cache.ListWatch { - c := wfc.kubeclientset.CoreV1().Pods(wfc.GetManagedNamespace()) - // completed=false - labelSelector := labels.NewSelector(). - Add(*workflowReq). - Add(*incompleteReq). - Add(wfc.instanceIDReq()) - - listFunc := func(options metav1.ListOptions) (runtime.Object, error) { - options.LabelSelector = labelSelector.String() - return c.List(ctx, options) - } - watchFunc := func(options metav1.ListOptions) (watch.Interface, error) { - options.Watch = true - options.LabelSelector = labelSelector.String() - return c.Watch(ctx, options) - } - return &cache.ListWatch{ListFunc: listFunc, WatchFunc: watchFunc} -} - -func (wfc *WorkflowController) newPodInformer(ctx context.Context) cache.SharedIndexInformer { - source := wfc.newWorkflowPodWatch(ctx) - informer := cache.NewSharedIndexInformer(source, &apiv1.Pod{}, podResyncPeriod, cache.Indexers{ - indexes.WorkflowIndex: indexes.MetaWorkflowIndexFunc, - indexes.NodeIDIndex: indexes.MetaNodeIDIndexFunc, - indexes.PodPhaseIndex: indexes.PodPhaseIndexFunc, - }) - //nolint:errcheck // the error only happens if the informer was stopped, and it hasn't even started (https://github.com/kubernetes/client-go/blob/46588f2726fa3e25b1704d6418190f424f95a990/tools/cache/shared_informer.go#L580) - informer.AddEventHandler( - cache.ResourceEventHandlerFuncs{ - AddFunc: func(obj interface{}) { - err := wfc.enqueueWfFromPodLabel(obj) - if err != nil { - log.WithError(err).Warn("could not enqueue workflow from pod label on add") - return - } - }, - UpdateFunc: func(old, newVal interface{}) { - key, err := cache.MetaNamespaceKeyFunc(newVal) - if err != nil { - return - } - oldPod, newPod := old.(*apiv1.Pod), newVal.(*apiv1.Pod) - if oldPod.ResourceVersion == newPod.ResourceVersion { - return - } - if !pod.SignificantPodChange(oldPod, newPod) { - log.WithField("key", key).Info("insignificant pod change") - diff.LogChanges(oldPod, newPod) - return - } - err = wfc.enqueueWfFromPodLabel(newVal) - if err != nil { - log.WithField("key", key).WithError(err).Warn("could not enqueue workflow from pod label on add") - return - } - }, - DeleteFunc: func(obj interface{}) { - // IndexerInformer uses a delta queue, therefore for deletes we have to use this - // key function. - - // Enqueue the workflow for deleted pod - _ = wfc.enqueueWfFromPodLabel(obj) - }, - }, - ) - return informer -} - func (wfc *WorkflowController) newConfigMapInformer() cache.SharedIndexInformer { indexInformer := v1.NewFilteredConfigMapInformer(wfc.kubeclientset, wfc.GetManagedNamespace(), 20*time.Minute, cache.Indexers{ indexes.ConfigMapLabelsIndex: indexes.ConfigMapIndexFunc, @@ -1472,18 +1222,11 @@ func (wfc *WorkflowController) getWorkflowConditionMetrics() map[wfv1.Condition] } func (wfc *WorkflowController) getPodPhaseMetrics() map[string]int64 { - result := make(map[string]int64, 0) - if wfc.podInformer != nil { - for _, phase := range []apiv1.PodPhase{apiv1.PodRunning, apiv1.PodPending} { - objs, err := wfc.podInformer.GetIndexer().IndexKeys(indexes.PodPhaseIndex, string(phase)) - if err != nil { - log.WithError(err).Errorf("failed to list pods in phase %s", phase) - } else { - result[string(phase)] = int64(len(objs)) - } - } + // During startup we need this callback to exist, but it won't function until the PodController is started + if wfc.PodController != nil { + return wfc.PodController.GetPodPhaseMetrics() } - return result + return make(map[string]int64) } func (wfc *WorkflowController) newWorkflowTaskSetInformer() wfextvv1alpha1.WorkflowTaskSetInformer { @@ -1532,5 +1275,5 @@ func (wfc *WorkflowController) newArtGCTaskInformer() wfextvv1alpha1.WorkflowArt func (wfc *WorkflowController) IsLeader() bool { // the wfc.wfInformer is nil if it is not the leader - return !(wfc.wfInformer == nil) + return wfc.wfInformer != nil } diff --git a/workflow/controller/controller_test.go b/workflow/controller/controller_test.go index aba088fce33a..86cbe1839750 100644 --- a/workflow/controller/controller_test.go +++ b/workflow/controller/controller_test.go @@ -38,6 +38,7 @@ import ( controllercache "github.com/argoproj/argo-workflows/v3/workflow/controller/cache" "github.com/argoproj/argo-workflows/v3/workflow/controller/entrypoint" "github.com/argoproj/argo-workflows/v3/workflow/controller/estimation" + "github.com/argoproj/argo-workflows/v3/workflow/controller/pod" "github.com/argoproj/argo-workflows/v3/workflow/events" hydratorfake "github.com/argoproj/argo-workflows/v3/workflow/hydrator/fake" "github.com/argoproj/argo-workflows/v3/workflow/metrics" @@ -312,9 +313,9 @@ func newController(options ...interface{}) (context.CancelFunc, *WorkflowControl { wfc.metrics, testExporter, _ = metrics.CreateDefaultTestMetrics() wfc.entrypoint = entrypoint.New(kube, wfc.Config.Images) - wfc.wfQueue = workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()) + wfc.wfQueue = workqueue.NewTypedRateLimitingQueue(workqueue.DefaultTypedControllerRateLimiter[string]()) wfc.throttler = wfc.newThrottler() - wfc.podCleanupQueue = workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()) + wfc.podCleanupQueue = workqueue.NewTypedRateLimitingQueue(workqueue.DefaultTypedControllerRateLimiter[string]()) wfc.rateLimiter = wfc.newRateLimiter() } @@ -326,14 +327,15 @@ func newController(options ...interface{}) (context.CancelFunc, *WorkflowControl wfc.taskResultInformer = wfc.newWorkflowTaskResultInformer() wfc.wftmplInformer = informerFactory.Argoproj().V1alpha1().WorkflowTemplates() _ = wfc.addWorkflowInformerHandlers(ctx) - wfc.podInformer = wfc.newPodInformer(ctx) + wfc.PodController = pod.NewController(ctx, &wfc.Config, wfc.restConfig, "", wfc.kubeclientset, wfc.wfInformer, wfc.metrics, wfc.enqueueWfFromPodLabel) + wfc.configMapInformer = wfc.newConfigMapInformer() wfc.createSynchronizationManager(ctx) _ = wfc.initManagers(ctx) go wfc.wfInformer.Run(ctx.Done()) go wfc.wftmplInformer.Informer().Run(ctx.Done()) - go wfc.podInformer.Run(ctx.Done()) + go wfc.PodController.Run(ctx, 0) // Zero workers so we can manually process next item go wfc.wfTaskSetInformer.Informer().Run(ctx.Done()) go wfc.artGCTaskInformer.Informer().Run(ctx.Done()) go wfc.taskResultInformer.Run(ctx.Done()) @@ -343,7 +345,7 @@ func newController(options ...interface{}) (context.CancelFunc, *WorkflowControl for _, c := range []cache.SharedIndexInformer{ wfc.wfInformer, wfc.wftmplInformer.Informer(), - wfc.podInformer, + wfc.PodController.TestingPodInformer(), wfc.cwftmplInformer.Informer(), wfc.wfTaskSetInformer.Informer(), wfc.artGCTaskInformer.Informer(), @@ -516,7 +518,7 @@ func createRunningPods(ctx context.Context, woc *wfOperationCtx) { Phase: apiv1.PodRunning, }, }, metav1.CreateOptions{}) - _ = woc.controller.podInformer.GetStore().Add(pod) + _ = woc.controller.PodController.TestingPodInformer().GetStore().Add(pod) } } } @@ -529,7 +531,7 @@ func syncPodsInformer(ctx context.Context, woc *wfOperationCtx, podObjs ...apiv1 } podObjs = append(podObjs, pods.Items...) for _, pod := range podObjs { - err = woc.controller.podInformer.GetIndexer().Add(&pod) + err = woc.controller.PodController.TestingPodInformer().GetIndexer().Add(&pod) if err != nil { panic(err) } @@ -556,7 +558,7 @@ func makePodsPhase(ctx context.Context, woc *wfOperationCtx, phase apiv1.PodPhas if err != nil { panic(err) } - err = woc.controller.podInformer.GetStore().Update(updatedPod) + err = woc.controller.PodController.TestingPodInformer().GetStore().Update(updatedPod) if err != nil { panic(err) } @@ -569,13 +571,13 @@ func makePodsPhase(ctx context.Context, woc *wfOperationCtx, phase apiv1.PodPhas } func deletePods(ctx context.Context, woc *wfOperationCtx) { - for _, obj := range woc.controller.podInformer.GetStore().List() { + for _, obj := range woc.controller.PodController.TestingPodInformer().GetStore().List() { pod := obj.(*apiv1.Pod) err := woc.controller.kubeclientset.CoreV1().Pods(pod.Namespace).Delete(ctx, pod.Name, metav1.DeleteOptions{}) if err != nil { panic(err) } - err = woc.controller.podInformer.GetStore().Delete(obj) + err = woc.controller.PodController.TestingPodInformer().GetStore().Delete(obj) if err != nil { panic(err) } @@ -755,7 +757,7 @@ func TestWorkflowController_archivedWorkflowGarbageCollector(t *testing.T) { cancel, controller := newController() defer cancel() - controller.archivedWorkflowGarbageCollector(make(chan struct{})) + controller.archivedWorkflowGarbageCollector(context.Background()) } const wfWithTmplRef = ` @@ -1153,10 +1155,10 @@ spec: makePodsPhase(ctx, woc, apiv1.PodSucceeded) woc.operate(ctx) - assert.True(t, controller.processNextPodCleanupItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) assert.Equal(t, wfv1.WorkflowSucceeded, woc.wf.Status.Phase) podCleanupKey := "test/my-wf/labelPodCompleted" - assert.Equal(t, 0, controller.podCleanupQueue.NumRequeues(podCleanupKey)) + assert.Equal(t, 0, controller.PodController.TestingQueueNumRequeues(podCleanupKey)) } func TestPodCleanupDeletePendingPodWhenTerminate(t *testing.T) { @@ -1183,60 +1185,15 @@ spec: makePodsPhase(ctx, woc, apiv1.PodPending) woc.execWf.Spec.Shutdown = wfv1.ShutdownStrategyTerminate woc.operate(ctx) - assert.True(t, controller.processNextPodCleanupItem(ctx)) - assert.True(t, controller.processNextPodCleanupItem(ctx)) - assert.True(t, controller.processNextPodCleanupItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) assert.Equal(t, wfv1.WorkflowFailed, woc.wf.Status.Phase) pods, err := listPods(woc) require.NoError(t, err) assert.Empty(t, pods.Items) } -func TestPodCleanupPatch(t *testing.T) { - wfc := &WorkflowController{} - - pod := &apiv1.Pod{ - ObjectMeta: metav1.ObjectMeta{ - Labels: map[string]string{common.LabelKeyCompleted: "false"}, - Finalizers: []string{common.FinalizerPodStatus}, - ResourceVersion: "123456", - }, - } - - t.Setenv(common.EnvVarPodStatusCaptureFinalizer, "true") - - // pod finalizer enabled, patch label - patch, err := wfc.getPodCleanupPatch(pod, true) - require.NoError(t, err) - expected := `{"metadata":{"resourceVersion":"123456","finalizers":[],"labels":{"workflows.argoproj.io/completed":"true"}}}` - assert.JSONEq(t, expected, string(patch)) - - // pod finalizer enabled, do not patch label - patch, err = wfc.getPodCleanupPatch(pod, false) - require.NoError(t, err) - expected = `{"metadata":{"resourceVersion":"123456","finalizers":[]}}` - assert.JSONEq(t, expected, string(patch)) - - // pod finalizer enabled, do not patch label, nil/empty finalizers - podWithNilFinalizers := &apiv1.Pod{} - patch, err = wfc.getPodCleanupPatch(podWithNilFinalizers, false) - require.NoError(t, err) - assert.Nil(t, patch) - - t.Setenv(common.EnvVarPodStatusCaptureFinalizer, "false") - - // pod finalizer disabled, patch both - patch, err = wfc.getPodCleanupPatch(pod, true) - require.NoError(t, err) - expected = `{"metadata":{"labels":{"workflows.argoproj.io/completed":"true"}}}` - assert.JSONEq(t, expected, string(patch)) - - // pod finalizer disabled, do not patch label - patch, err = wfc.getPodCleanupPatch(pod, false) - require.NoError(t, err) - assert.Nil(t, patch) -} - func TestPendingPodWhenTerminate(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(helloWorldWf) wf.Spec.Shutdown = wfv1.ShutdownStrategyTerminate @@ -1270,7 +1227,7 @@ func TestWorkflowReferItselfFromExpression(t *testing.T) { makePodsPhase(ctx, woc, apiv1.PodSucceeded) woc.operate(ctx) - assert.True(t, controller.processNextPodCleanupItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) assert.Equal(t, wfv1.WorkflowSucceeded, woc.wf.Status.Phase) } @@ -1286,7 +1243,7 @@ func TestWorkflowWithLongArguments(t *testing.T) { woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) - cms, err := controller.kubeclientset.CoreV1().ConfigMaps(woc.wf.ObjectMeta.Namespace).List(ctx, metav1.ListOptions{LabelSelector: common.LabelKeyWorkflow + "=" + woc.wf.ObjectMeta.Name}) + cms, err := controller.kubeclientset.CoreV1().ConfigMaps(woc.wf.ObjectMeta.Namespace).List(ctx, metav1.ListOptions{LabelSelector: common.LabelKeyWorkflow + "=" + woc.wf.Name}) require.NoError(t, err) assert.Len(t, cms.Items, 1) assert.Contains(t, cms.Items[0].Data, common.EnvVarTemplate) @@ -1307,6 +1264,6 @@ func TestWorkflowWithLongArguments(t *testing.T) { makePodsPhase(ctx, woc, apiv1.PodSucceeded) woc.operate(ctx) - assert.True(t, controller.processNextPodCleanupItem(ctx)) + assert.True(t, controller.PodController.TestingProcessNextItem(ctx)) assert.Equal(t, wfv1.WorkflowSucceeded, woc.wf.Status.Phase) } diff --git a/workflow/controller/dag.go b/workflow/controller/dag.go index 8f708eb02d05..e26e11015522 100644 --- a/workflow/controller/dag.go +++ b/workflow/controller/dag.go @@ -858,7 +858,7 @@ func (d *dagContext) evaluateDependsLogic(taskName string) (bool, bool, error) { return false, false, nil } - evalTaskName := strings.Replace(taskName, "-", "_", -1) + evalTaskName := strings.ReplaceAll(taskName, "-", "_") if _, ok := evalScope[evalTaskName]; ok { continue } @@ -894,7 +894,7 @@ func (d *dagContext) evaluateDependsLogic(taskName string) (bool, bool, error) { } } - evalLogic := strings.Replace(d.GetTaskDependsLogic(taskName), "-", "_", -1) + evalLogic := strings.ReplaceAll(d.GetTaskDependsLogic(taskName), "-", "_") execute, err := argoexpr.EvalBool(evalLogic, evalScope) if err != nil { return false, false, fmt.Errorf("unable to evaluate expression '%s': %s", evalLogic, err) diff --git a/workflow/controller/dag_test.go b/workflow/controller/dag_test.go index dee2ef413f64..1062238faf08 100644 --- a/workflow/controller/dag_test.go +++ b/workflow/controller/dag_test.go @@ -106,7 +106,7 @@ func TestSingleDependency(t *testing.T) { ctx := context.Background() wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) diff --git a/workflow/controller/exec_control.go b/workflow/controller/exec_control.go index 9866aedd6bbe..14a1542c1f9a 100644 --- a/workflow/controller/exec_control.go +++ b/workflow/controller/exec_control.go @@ -45,7 +45,7 @@ func (woc *wfOperationCtx) applyExecutionControl(pod *apiv1.Pod, wfNodesLock *sy woc.log.WithField("podName", pod.Name). WithField("shutdownStrategy", woc.GetShutdownStrategy()). Info("Terminating pod as part of workflow shutdown") - woc.controller.queuePodForCleanup(pod.Namespace, pod.Name, terminateContainers) + woc.controller.PodController.TerminateContainers(pod.Namespace, pod.Name) msg := fmt.Sprintf("workflow shutdown with strategy: %s", woc.GetShutdownStrategy()) woc.handleExecutionControlError(nodeID, wfNodesLock, msg) return @@ -60,7 +60,7 @@ func (woc *wfOperationCtx) applyExecutionControl(pod *apiv1.Pod, wfNodesLock *sy woc.log.WithField("podName", pod.Name). WithField(" workflowDeadline", woc.workflowDeadline). Info("Terminating pod which has exceeded workflow deadline") - woc.controller.queuePodForCleanup(pod.Namespace, pod.Name, terminateContainers) + woc.controller.PodController.TerminateContainers(pod.Namespace, pod.Name) woc.handleExecutionControlError(nodeID, wfNodesLock, "Step exceeded its deadline") return } @@ -70,7 +70,7 @@ func (woc *wfOperationCtx) applyExecutionControl(pod *apiv1.Pod, wfNodesLock *sy if _, onExitPod := pod.Labels[common.LabelKeyOnExit]; !woc.GetShutdownStrategy().ShouldExecute(onExitPod) { woc.log.WithField("podName", pod.Name). Info("Terminating on-exit pod") - woc.controller.queuePodForCleanup(woc.wf.Namespace, pod.Name, terminateContainers) + woc.controller.PodController.TerminateContainers(pod.Namespace, pod.Name) } } } @@ -115,7 +115,7 @@ func (woc *wfOperationCtx) killDaemonedChildren(nodeID string) { continue } podName := util.GeneratePodName(woc.wf.Name, childNode.Name, util.GetTemplateFromNode(childNode), childNode.ID, util.GetWorkflowPodNameVersion(woc.wf)) - woc.controller.queuePodForCleanup(woc.wf.Namespace, podName, terminateContainers) + woc.controller.PodController.TerminateContainers(woc.wf.Namespace, podName) childNode.Phase = wfv1.NodeSucceeded childNode.Daemoned = nil woc.wf.Status.Nodes.Set(childNode.ID, childNode) diff --git a/workflow/controller/hooks.go b/workflow/controller/hooks.go index 8e86d61c5393..3ca734a7b41b 100644 --- a/workflow/controller/hooks.go +++ b/workflow/controller/hooks.go @@ -19,7 +19,7 @@ func (woc *wfOperationCtx) executeWfLifeCycleHook(ctx context.Context, tmplCtx * if hookName == wfv1.ExitLifecycleEvent { continue } - hookNodeName := generateLifeHookNodeName(woc.wf.ObjectMeta.Name, string(hookName)) + hookNodeName := generateLifeHookNodeName(woc.wf.Name, string(hookName)) // To check a node was triggered. hookedNode, _ := woc.wf.GetNodeByName(hookNodeName) if hook.Expression == "" { diff --git a/workflow/controller/hooks_test.go b/workflow/controller/hooks_test.go index 40de205e9069..b2f4dcce0f77 100644 --- a/workflow/controller/hooks_test.go +++ b/workflow/controller/hooks_test.go @@ -1142,7 +1142,7 @@ spec: pod.Status.Phase = apiv1.PodSucceeded updatedPod, _ := podcs.Update(ctx, pod, metav1.UpdateOptions{}) woc.wf.Status.MarkTaskResultComplete(woc.nodeID(pod)) - _ = woc.controller.podInformer.GetStore().Update(updatedPod) + _ = woc.controller.PodController.TestingPodInformer().GetStore().Update(updatedPod) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.Progress("1/2"), woc.wf.Status.Progress) @@ -1232,7 +1232,7 @@ spec: pod := pods.Items[0] pod.Status.Phase = apiv1.PodSucceeded updatedPod, _ := podcs.Update(ctx, &pod, metav1.UpdateOptions{}) - _ = woc.controller.podInformer.GetStore().Update(updatedPod) + _ = woc.controller.PodController.TestingPodInformer().GetStore().Update(updatedPod) woc.wf.Status.MarkTaskResultComplete(woc.nodeID(&pod)) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index 8712ac984ea3..4c4ddfb89f68 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -156,8 +156,8 @@ func newWorkflowOperationCtx(wf *wfv1.Workflow, wfc *WorkflowController) *wfOper execWf: wfCopy, updated: false, log: log.WithFields(log.Fields{ - "workflow": wf.ObjectMeta.Name, - "namespace": wf.ObjectMeta.Namespace, + "workflow": wf.Name, + "namespace": wf.Namespace, }), controller: wfc, globalParams: make(map[string]string), @@ -365,7 +365,7 @@ func (woc *wfOperationCtx) operate(ctx context.Context) { woc.markWorkflowRunning(ctx) } - node, err := woc.executeTemplate(ctx, woc.wf.ObjectMeta.Name, &wfv1.WorkflowStep{Template: woc.execWf.Spec.Entrypoint}, tmplCtx, woc.execWf.Spec.Arguments, &executeTemplateOpts{}) + node, err := woc.executeTemplate(ctx, woc.wf.Name, &wfv1.WorkflowStep{Template: woc.execWf.Spec.Entrypoint}, tmplCtx, woc.execWf.Spec.Arguments, &executeTemplateOpts{}) if err != nil { woc.log.WithError(err).Error("error in entry template execution") // we wrap this error up to report a clear message @@ -443,7 +443,7 @@ func (woc *wfOperationCtx) operate(ctx context.Context) { var onExitNode *wfv1.NodeStatus if woc.execWf.Spec.HasExitHook() { woc.log.Infof("Running OnExit handler: %s", woc.execWf.Spec.OnExit) - onExitNodeName := common.GenerateOnExitNodeName(woc.wf.ObjectMeta.Name) + onExitNodeName := common.GenerateOnExitNodeName(woc.wf.Name) onExitNode, _ = woc.execWf.GetNodeByName(onExitNodeName) if onExitNode != nil || woc.GetShutdownStrategy().ShouldExecute(true) { exitHook := woc.execWf.Spec.GetExitHook(woc.execWf.Spec.Arguments) @@ -511,7 +511,7 @@ func (woc *wfOperationCtx) operate(ctx context.Context) { default: // NOTE: we should never make it here because if the node was 'Running' we should have // returned earlier. - err = errors.InternalErrorf("Unexpected node phase %s: %+v", woc.wf.ObjectMeta.Name, err) + err = errors.InternalErrorf("Unexpected node phase %s: %+v", woc.wf.Name, err) woc.markWorkflowError(ctx, err) } @@ -615,13 +615,13 @@ func (woc *wfOperationCtx) getWorkflowDeadline() *time.Time { // setGlobalParameters sets the globalParam map with global parameters func (woc *wfOperationCtx) setGlobalParameters(executionParameters wfv1.Arguments) error { - woc.globalParams[common.GlobalVarWorkflowName] = woc.wf.ObjectMeta.Name - woc.globalParams[common.GlobalVarWorkflowNamespace] = woc.wf.ObjectMeta.Namespace + woc.globalParams[common.GlobalVarWorkflowName] = woc.wf.Name + woc.globalParams[common.GlobalVarWorkflowNamespace] = woc.wf.Namespace woc.globalParams[common.GlobalVarWorkflowMainEntrypoint] = woc.execWf.Spec.Entrypoint woc.globalParams[common.GlobalVarWorkflowServiceAccountName] = woc.execWf.Spec.ServiceAccountName - woc.globalParams[common.GlobalVarWorkflowUID] = string(woc.wf.ObjectMeta.UID) - woc.globalParams[common.GlobalVarWorkflowCreationTimestamp] = woc.wf.ObjectMeta.CreationTimestamp.Format(time.RFC3339) - if annotation := woc.wf.ObjectMeta.GetAnnotations(); annotation != nil { + woc.globalParams[common.GlobalVarWorkflowUID] = string(woc.wf.UID) + woc.globalParams[common.GlobalVarWorkflowCreationTimestamp] = woc.wf.CreationTimestamp.Format(time.RFC3339) + if annotation := woc.wf.GetAnnotations(); annotation != nil { val, ok := annotation[common.AnnotationKeyCronWfScheduledTime] if ok { woc.globalParams[common.GlobalVarWorkflowCronScheduleTime] = val @@ -633,10 +633,10 @@ func (woc *wfOperationCtx) setGlobalParameters(executionParameters wfv1.Argument } for char := range strftime.FormatChars { cTimeVar := fmt.Sprintf("%s.%s", common.GlobalVarWorkflowCreationTimestamp, string(char)) - woc.globalParams[cTimeVar] = strftime.Format("%"+string(char), woc.wf.ObjectMeta.CreationTimestamp.Time) + woc.globalParams[cTimeVar] = strftime.Format("%"+string(char), woc.wf.CreationTimestamp.Time) } - woc.globalParams[common.GlobalVarWorkflowCreationTimestamp+".s"] = strconv.FormatInt(woc.wf.ObjectMeta.CreationTimestamp.Time.Unix(), 10) - woc.globalParams[common.GlobalVarWorkflowCreationTimestamp+".RFC3339"] = woc.wf.ObjectMeta.CreationTimestamp.Format(time.RFC3339) + woc.globalParams[common.GlobalVarWorkflowCreationTimestamp+".s"] = strconv.FormatInt(woc.wf.CreationTimestamp.Unix(), 10) + woc.globalParams[common.GlobalVarWorkflowCreationTimestamp+".RFC3339"] = woc.wf.CreationTimestamp.Format(time.RFC3339) if workflowParameters, err := json.Marshal(woc.execWf.Spec.Arguments.Parameters); err == nil { woc.globalParams[common.GlobalVarWorkflowParameters] = string(workflowParameters) @@ -644,7 +644,7 @@ func (woc *wfOperationCtx) setGlobalParameters(executionParameters wfv1.Argument } for _, param := range executionParameters.Parameters { if param.ValueFrom != nil && param.ValueFrom.ConfigMapKeyRef != nil { - cmValue, err := common.GetConfigMapValue(woc.controller.configMapInformer.GetIndexer(), woc.wf.ObjectMeta.Namespace, param.ValueFrom.ConfigMapKeyRef.Name, param.ValueFrom.ConfigMapKeyRef.Key) + cmValue, err := common.GetConfigMapValue(woc.controller.configMapInformer.GetIndexer(), woc.wf.Namespace, param.ValueFrom.ConfigMapKeyRef.Name, param.ValueFrom.ConfigMapKeyRef.Key) if err != nil { if param.ValueFrom.Default != nil { woc.globalParams["workflow.parameters."+param.Name] = param.ValueFrom.Default.String() @@ -678,18 +678,18 @@ func (woc *wfOperationCtx) setGlobalParameters(executionParameters wfv1.Argument md := woc.execWf.Spec.WorkflowMetadata - if workflowAnnotations, err := json.Marshal(woc.wf.ObjectMeta.Annotations); err == nil { + if workflowAnnotations, err := json.Marshal(woc.wf.Annotations); err == nil { woc.globalParams[common.GlobalVarWorkflowAnnotations] = string(workflowAnnotations) woc.globalParams[common.GlobalVarWorkflowAnnotationsJSON] = string(workflowAnnotations) } - for k, v := range woc.wf.ObjectMeta.Annotations { + for k, v := range woc.wf.Annotations { woc.globalParams["workflow.annotations."+k] = v } - if workflowLabels, err := json.Marshal(woc.wf.ObjectMeta.Labels); err == nil { + if workflowLabels, err := json.Marshal(woc.wf.Labels); err == nil { woc.globalParams[common.GlobalVarWorkflowLabels] = string(workflowLabels) woc.globalParams[common.GlobalVarWorkflowLabelsJSON] = string(workflowLabels) } - for k, v := range woc.wf.ObjectMeta.Labels { + for k, v := range woc.wf.Labels { // if the Label will get overridden by a LabelsFrom expression later, don't set it now if md != nil { _, existsLabelsFrom := md.LabelsFrom[k] @@ -736,7 +736,7 @@ func (woc *wfOperationCtx) persistUpdates(ctx context.Context) { if woc.orig.ResourceVersion != woc.wf.ResourceVersion { woc.log.Panic("cannot persist updates with mismatched resource versions") } - wfClient := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows(woc.wf.ObjectMeta.Namespace) + wfClient := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows(woc.wf.Namespace) // try and compress nodes if needed nodes := woc.wf.Status.Nodes err := woc.controller.hydrator.Dehydrate(woc.wf) @@ -890,7 +890,7 @@ func (woc *wfOperationCtx) reapplyUpdate(ctx context.Context, wfClient v1alpha1. // Next get latest version of the workflow, apply the patch and retry the update attempt := 1 for { - currWf, err := wfClient.Get(ctx, woc.wf.ObjectMeta.Name, metav1.GetOptions{}) + currWf, err := wfClient.Get(ctx, woc.wf.Name, metav1.GetOptions{}) if err != nil { return nil, err } @@ -1115,6 +1115,7 @@ func (woc *wfOperationCtx) processNodeRetries(node *wfv1.NodeStatus, retryStrate func (woc *wfOperationCtx) podReconciliation(ctx context.Context) (error, bool) { podList, err := woc.getAllWorkflowPods() if err != nil { + woc.log.Error("was unable to retrieve workflow pods") return err, false } seenPods := make(map[string]*apiv1.Pod) @@ -1140,6 +1141,11 @@ func (woc *wfOperationCtx) podReconciliation(ctx context.Context) (error, bool) node, err := woc.wf.Status.Nodes.Get(nodeID) if err == nil { if newState := woc.assessNodeStatus(ctx, pod, node); newState != nil { + // update if a pod deletion timestamp exists on a completed workflow, ensures this pod is always looked at + // in the pod cleanup process + if pod.DeletionTimestamp != nil && newState.Fulfilled() { + woc.updated = true + } // Check whether its taskresult is in an incompleted state. if newState.Succeeded() && woc.wf.Status.IsTaskResultIncomplete(node.ID) { woc.log.WithFields(log.Fields{"nodeID": newState.ID}).Debug("Taskresult of the node not yet completed") @@ -1310,7 +1316,7 @@ func (woc *wfOperationCtx) failNodesWithoutCreatedPodsAfterDeadlineOrShutdown() // getAllWorkflowPods returns all pods related to the current workflow func (woc *wfOperationCtx) getAllWorkflowPods() ([]*apiv1.Pod, error) { - objs, err := woc.controller.podInformer.GetIndexer().ByIndex(indexes.WorkflowIndex, indexes.WorkflowIndexValue(woc.wf.Namespace, woc.wf.Name)) + objs, err := woc.controller.PodController.GetPodsByIndex(indexes.WorkflowIndex, indexes.WorkflowIndexValue(woc.wf.Namespace, woc.wf.Name)) if err != nil { return nil, err } @@ -1357,7 +1363,7 @@ func (woc *wfOperationCtx) assessNodeStatus(ctx context.Context, pod *apiv1.Pod, new.Message = getPendingReason(pod) new.Daemoned = nil if old.Phase != new.Phase || old.Message != new.Message { - woc.controller.metrics.ChangePodPending(ctx, new.Message, pod.ObjectMeta.Namespace) + woc.controller.metrics.ChangePodPending(ctx, new.Message, pod.Namespace) } case apiv1.PodSucceeded: new.Phase = wfv1.NodeSucceeded @@ -1399,10 +1405,10 @@ func (woc *wfOperationCtx) assessNodeStatus(ctx context.Context, pod *apiv1.Pod, } default: new.Phase = wfv1.NodeError - new.Message = fmt.Sprintf("Unexpected pod phase for %s: %s", pod.ObjectMeta.Name, pod.Status.Phase) + new.Message = fmt.Sprintf("Unexpected pod phase for %s: %s", pod.Name, pod.Status.Phase) } if old.Phase != new.Phase { - woc.controller.metrics.ChangePodPhase(ctx, string(new.Phase), pod.ObjectMeta.Namespace) + woc.controller.metrics.ChangePodPhase(ctx, string(new.Phase), pod.Namespace) } // if it's ContainerSetTemplate pod then the inner container names should match to some node names, @@ -1413,10 +1419,6 @@ func (woc *wfOperationCtx) assessNodeStatus(ctx context.Context, pod *apiv1.Pod, continue } switch { - case c.State.Waiting != nil: - woc.markNodePhase(ctrNodeName, wfv1.NodePending) - case c.State.Running != nil: - woc.markNodePhase(ctrNodeName, wfv1.NodeRunning) case c.State.Terminated != nil: exitCode := int(c.State.Terminated.ExitCode) message := fmt.Sprintf("%s (exit code %d): %s", c.State.Terminated.Reason, exitCode, c.State.Terminated.Message) @@ -1430,6 +1432,12 @@ func (woc *wfOperationCtx) assessNodeStatus(ctx context.Context, pod *apiv1.Pod, default: woc.markNodePhase(ctrNodeName, wfv1.NodeFailed, message) } + case pod.Status.Phase == apiv1.PodFailed: + woc.markNodePhase(ctrNodeName, wfv1.NodeFailed, `Pod Failed whilst container running`) + case c.State.Waiting != nil: + woc.markNodePhase(ctrNodeName, wfv1.NodePending) + case c.State.Running != nil: + woc.markNodePhase(ctrNodeName, wfv1.NodeRunning) } } @@ -1469,7 +1477,7 @@ func (woc *wfOperationCtx) assessNodeStatus(ctx context.Context, pod *apiv1.Pod, if c.Name == common.WaitContainerName { waitContainerCleanedUp = false switch { - case c.State.Running != nil && new.Phase.Completed(): + case c.State.Running != nil && new.Phase.Completed() && pod.Status.Phase != apiv1.PodFailed: woc.log.WithField("new.phase", new.Phase).Info("leaving phase un-changed: wait container is not yet terminated ") new.Phase = old.Phase case c.State.Terminated != nil && c.State.Terminated.ExitCode != 0: @@ -1546,7 +1554,7 @@ func podHasContainerNeedingTermination(pod *apiv1.Pod, tmpl wfv1.Template) bool func (woc *wfOperationCtx) cleanUpPod(pod *apiv1.Pod, tmpl wfv1.Template) { if podHasContainerNeedingTermination(pod, tmpl) { - woc.controller.queuePodForCleanup(woc.wf.Namespace, pod.Name, terminateContainers) + woc.controller.PodController.TerminateContainers(woc.wf.Namespace, pod.Name) } } @@ -1670,7 +1678,7 @@ func (woc *wfOperationCtx) inferFailedReason(pod *apiv1.Pod, tmpl *wfv1.Template } func (woc *wfOperationCtx) createPVCs(ctx context.Context) error { - if !(woc.wf.Status.Phase == wfv1.WorkflowPending || woc.wf.Status.Phase == wfv1.WorkflowRunning) { + if woc.wf.Status.Phase != wfv1.WorkflowPending && woc.wf.Status.Phase != wfv1.WorkflowRunning { // Only attempt to create PVCs if workflow is in Pending or Running state // (e.g. passed validation, or didn't already complete) return nil @@ -1680,21 +1688,21 @@ func (woc *wfOperationCtx) createPVCs(ctx context.Context) error { // This will also handle the case where workflow has no volumeClaimTemplates. return nil } - pvcClient := woc.controller.kubeclientset.CoreV1().PersistentVolumeClaims(woc.wf.ObjectMeta.Namespace) + pvcClient := woc.controller.kubeclientset.CoreV1().PersistentVolumeClaims(woc.wf.Namespace) for i, pvcTmpl := range woc.execWf.Spec.VolumeClaimTemplates { - if pvcTmpl.ObjectMeta.Name == "" { + if pvcTmpl.Name == "" { return errors.Errorf(errors.CodeBadRequest, "volumeClaimTemplates[%d].metadata.name is required", i) } pvcTmpl = *pvcTmpl.DeepCopy() // PVC name will be - - refName := pvcTmpl.ObjectMeta.Name - pvcName := fmt.Sprintf("%s-%s", woc.wf.ObjectMeta.Name, pvcTmpl.ObjectMeta.Name) + refName := pvcTmpl.Name + pvcName := fmt.Sprintf("%s-%s", woc.wf.Name, pvcTmpl.Name) woc.log.Infof("Creating pvc %s", pvcName) - pvcTmpl.ObjectMeta.Name = pvcName - if pvcTmpl.ObjectMeta.Labels == nil { - pvcTmpl.ObjectMeta.Labels = make(map[string]string) + pvcTmpl.Name = pvcName + if pvcTmpl.Labels == nil { + pvcTmpl.Labels = make(map[string]string) } - pvcTmpl.ObjectMeta.Labels[common.LabelKeyWorkflow] = woc.wf.ObjectMeta.Name + pvcTmpl.Labels[common.LabelKeyWorkflow] = woc.wf.Name pvcTmpl.OwnerReferences = []metav1.OwnerReference{ *metav1.NewControllerRef(woc.wf, wfv1.SchemeGroupVersion.WithKind(workflow.WorkflowKind)), } @@ -1727,7 +1735,7 @@ func (woc *wfOperationCtx) createPVCs(ctx context.Context) error { Name: refName, VolumeSource: apiv1.VolumeSource{ PersistentVolumeClaim: &apiv1.PersistentVolumeClaimVolumeSource{ - ClaimName: pvc.ObjectMeta.Name, + ClaimName: pvc.Name, }, }, } @@ -1757,7 +1765,7 @@ func (woc *wfOperationCtx) deletePVCs(ctx context.Context) error { // PVC list already empty. nothing to do return nil } - pvcClient := woc.controller.kubeclientset.CoreV1().PersistentVolumeClaims(woc.wf.ObjectMeta.Namespace) + pvcClient := woc.controller.kubeclientset.CoreV1().PersistentVolumeClaims(woc.wf.Namespace) newPVClist := make([]apiv1.Volume, 0) // Attempt to delete all PVCs. Record first error encountered var firstErr error @@ -1801,7 +1809,7 @@ func (woc *wfOperationCtx) deletePVCs(ctx context.Context) error { // Check if we have a retry node which wasn't memoized and return that if we do func (woc *wfOperationCtx) possiblyGetRetryChildNode(node *wfv1.NodeStatus) *wfv1.NodeStatus { - if node.Type == wfv1.NodeTypeRetry && !(node.MemoizationStatus != nil && node.MemoizationStatus.Hit) { + if node.Type == wfv1.NodeTypeRetry && (node.MemoizationStatus == nil || !node.MemoizationStatus.Hit) { // If a retry node has hooks, the hook nodes will also become its children, // so we need to filter out the hook nodes when finding the last child node of the retry node. for i := len(node.Children) - 1; i >= 0; i-- { @@ -2330,13 +2338,13 @@ func (woc *wfOperationCtx) checkTemplateTimeout(tmpl *wfv1.Template, node *wfv1. // recordWorkflowPhaseChange stores the metrics associated with the workflow phase changing func (woc *wfOperationCtx) recordWorkflowPhaseChange(ctx context.Context) { phase := metrics.ConvertWorkflowPhase(woc.wf.Status.Phase) - woc.controller.metrics.ChangeWorkflowPhase(ctx, phase, woc.wf.ObjectMeta.Namespace) + woc.controller.metrics.ChangeWorkflowPhase(ctx, phase, woc.wf.Namespace) if woc.wf.Spec.WorkflowTemplateRef != nil { // not-woc-misuse - woc.controller.metrics.CountWorkflowTemplate(ctx, phase, woc.wf.Spec.WorkflowTemplateRef.Name, woc.wf.ObjectMeta.Namespace, woc.wf.Spec.WorkflowTemplateRef.ClusterScope) // not-woc-misuse + woc.controller.metrics.CountWorkflowTemplate(ctx, phase, woc.wf.Spec.WorkflowTemplateRef.Name, woc.wf.Namespace, woc.wf.Spec.WorkflowTemplateRef.ClusterScope) // not-woc-misuse switch woc.wf.Status.Phase { case wfv1.WorkflowSucceeded, wfv1.WorkflowFailed, wfv1.WorkflowError: duration := time.Since(woc.wf.Status.StartedAt.Time) - woc.controller.metrics.RecordWorkflowTemplateTime(ctx, duration, woc.wf.Spec.WorkflowTemplateRef.Name, woc.wf.ObjectMeta.Namespace, woc.wf.Spec.WorkflowTemplateRef.ClusterScope) // not-woc-misuse + woc.controller.metrics.RecordWorkflowTemplateTime(ctx, duration, woc.wf.Spec.WorkflowTemplateRef.Name, woc.wf.Namespace, woc.wf.Spec.WorkflowTemplateRef.ClusterScope) // not-woc-misuse log.Warnf("Recording template time") } } @@ -2362,12 +2370,12 @@ func (woc *wfOperationCtx) markWorkflowPhase(ctx context.Context, phase wfv1.Wor woc.updated = true woc.wf.Status.Phase = phase woc.recordWorkflowPhaseChange(ctx) - if woc.wf.ObjectMeta.Labels == nil { - woc.wf.ObjectMeta.Labels = make(map[string]string) + if woc.wf.Labels == nil { + woc.wf.Labels = make(map[string]string) } - woc.wf.ObjectMeta.Labels[common.LabelKeyPhase] = string(phase) - if _, ok := woc.wf.ObjectMeta.Labels[common.LabelKeyCompleted]; !ok { - woc.wf.ObjectMeta.Labels[common.LabelKeyCompleted] = "false" + woc.wf.Labels[common.LabelKeyPhase] = string(phase) + if _, ok := woc.wf.Labels[common.LabelKeyCompleted]; !ok { + woc.wf.Labels[common.LabelKeyCompleted] = "false" } if woc.controller.Config.WorkflowEvents.IsEnabled() { switch phase { @@ -2392,14 +2400,14 @@ func (woc *wfOperationCtx) markWorkflowPhase(ctx context.Context, phase wfv1.Wor } if phase == wfv1.WorkflowError { - entryNode, err := woc.wf.Status.Nodes.Get(woc.wf.ObjectMeta.Name) + entryNode, err := woc.wf.Status.Nodes.Get(woc.wf.Name) if err != nil { - woc.log.Errorf("was unable to obtain node for %s", woc.wf.ObjectMeta.Name) + woc.log.Errorf("was unable to obtain node for %s", woc.wf.Name) } if (err == nil) && entryNode.Phase == wfv1.NodeRunning { entryNode.Phase = wfv1.NodeError entryNode.Message = "Workflow operation error" - woc.wf.Status.Nodes.Set(woc.wf.ObjectMeta.Name, *entryNode) + woc.wf.Status.Nodes.Set(woc.wf.Name, *entryNode) woc.updated = true } } @@ -2409,15 +2417,15 @@ func (woc *wfOperationCtx) markWorkflowPhase(ctx context.Context, phase wfv1.Wor woc.log.Info("Marking workflow completed") woc.wf.Status.FinishedAt = metav1.Time{Time: time.Now().UTC()} woc.globalParams[common.GlobalVarWorkflowDuration] = fmt.Sprintf("%f", woc.wf.Status.FinishedAt.Sub(woc.wf.Status.StartedAt.Time).Seconds()) - if woc.wf.ObjectMeta.Labels == nil { - woc.wf.ObjectMeta.Labels = make(map[string]string) + if woc.wf.Labels == nil { + woc.wf.Labels = make(map[string]string) } - woc.wf.ObjectMeta.Labels[common.LabelKeyCompleted] = "true" + woc.wf.Labels[common.LabelKeyCompleted] = "true" woc.wf.Status.Conditions.UpsertCondition(wfv1.Condition{Status: metav1.ConditionTrue, Type: wfv1.ConditionTypeCompleted}) err := woc.deletePDBResource(ctx) if err != nil { woc.wf.Status.Phase = wfv1.WorkflowError - woc.wf.ObjectMeta.Labels[common.LabelKeyPhase] = string(wfv1.NodeError) + woc.wf.Labels[common.LabelKeyPhase] = string(wfv1.NodeError) woc.updated = true woc.wf.Status.Message = err.Error() } @@ -2431,7 +2439,7 @@ func (woc *wfOperationCtx) markWorkflowPhase(ctx context.Context, phase wfv1.Wor } woc.updated = true if woc.hasTaskSetNodes() { - woc.controller.queuePodForCleanup(woc.wf.Namespace, woc.getAgentPodName(), deletePod) + woc.controller.PodController.DeletePod(woc.wf.Namespace, woc.getAgentPodName()) } } } @@ -2667,7 +2675,7 @@ func (woc *wfOperationCtx) getPodByNode(node *wfv1.NodeStatus) (*apiv1.Pod, erro } podName := woc.getPodName(node.Name, wfutil.GetTemplateFromNode(*node)) - return woc.controller.getPod(woc.wf.GetNamespace(), podName) + return woc.controller.PodController.GetPod(woc.wf.GetNamespace(), podName) } func (woc *wfOperationCtx) recordNodePhaseEvent(node *wfv1.NodeStatus) { @@ -3143,12 +3151,12 @@ func (woc *wfOperationCtx) buildLocalScope(scope *wfScope, prefix string, node * if !node.StartedAt.Time.IsZero() { key := fmt.Sprintf("%s.startedAt", prefix) - scope.addParamToScope(key, node.StartedAt.Time.Format(time.RFC3339)) + scope.addParamToScope(key, node.StartedAt.Format(time.RFC3339)) } if !node.FinishedAt.Time.IsZero() { key := fmt.Sprintf("%s.finishedAt", prefix) - scope.addParamToScope(key, node.FinishedAt.Time.Format(time.RFC3339)) + scope.addParamToScope(key, node.FinishedAt.Format(time.RFC3339)) } if node.PodIP != "" { @@ -3776,7 +3784,7 @@ func (woc *wfOperationCtx) computeMetrics(ctx context.Context, metricList []*wfv proceed, err := shouldExecute(metricTmpl.When) if err != nil { - woc.reportMetricEmissionError(fmt.Sprintf("unable to compute 'when' clause for metric '%s': %s", woc.wf.ObjectMeta.Name, err)) + woc.reportMetricEmissionError(fmt.Sprintf("unable to compute 'when' clause for metric '%s': %s", woc.wf.Name, err)) continue } if !proceed { @@ -3786,7 +3794,7 @@ func (woc *wfOperationCtx) computeMetrics(ctx context.Context, metricList []*wfv if metricTmpl.IsRealtime() { // Finally substitute value parameters value := metricTmpl.Gauge.Value - if !(strings.HasPrefix(value, "{{") && strings.HasSuffix(value, "}}")) { + if !strings.HasPrefix(value, "{{") || !strings.HasSuffix(value, "}}") { woc.reportMetricEmissionError("real time metrics can only be used with metric variables") continue } diff --git a/workflow/controller/operator_concurrency_test.go b/workflow/controller/operator_concurrency_test.go index d73461dc77fa..4dc011254498 100644 --- a/workflow/controller/operator_concurrency_test.go +++ b/workflow/controller/operator_concurrency_test.go @@ -1119,7 +1119,7 @@ spec: // Make job-1's pod succeed makePodsPhase(ctx, woc, apiv1.PodSucceeded, func(pod *apiv1.Pod, _ *wfOperationCtx) { - if pod.ObjectMeta.Name == "job-1" { + if pod.Name == "job-1" { pod.Status.Phase = apiv1.PodSucceeded } }) diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index 2e368dbbf767..535917f4db22 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -508,7 +508,7 @@ func TestVolumeGCStrategy(t *testing.T) { wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err := wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err := wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Len(t, wf.Status.PersistentVolumeClaims, tt.expectedVolumesRemaining) }) @@ -932,7 +932,7 @@ func TestProcessNodeRetriesWithExpression(t *testing.T) { require.NoError(t, err) // The parent node also gets marked as Succeeded. assert.Equal(t, wfv1.NodeSucceeded, n.Phase) - assert.Equal(t, "", n.Message) + assert.Empty(t, n.Message) // Mark the parent node as running again and the lastChild as errored. n = woc.markNodePhase(n.Name, wfv1.NodeRunning) @@ -1006,14 +1006,14 @@ func TestProcessNodeRetriesMessageOrder(t *testing.T) { n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) require.NoError(t, err) assert.Equal(t, wfv1.NodeRunning, n.Phase) - assert.Equal(t, "", n.Message) + assert.Empty(t, n.Message) // No retry related message for succeeded node woc.markNodePhase(lastChild.Name, wfv1.NodeSucceeded) n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) require.NoError(t, err) assert.Equal(t, wfv1.NodeSucceeded, n.Phase) - assert.Equal(t, "", n.Message) + assert.Empty(t, n.Message) // workflow mark shutdown, no retry is evaluated woc.wf.Spec.Shutdown = wfv1.ShutdownStrategyStop @@ -1043,7 +1043,7 @@ func TestProcessNodeRetriesMessageOrder(t *testing.T) { n, err = woc.wf.GetNodeByName(nodeName) require.NoError(t, err) assert.Equal(t, wfv1.NodeError, n.Phase) - assert.Equal(t, "", n.Message) + assert.Empty(t, n.Message) // Node status aligns with retrypolicy, should evaluate expression retries.RetryPolicy = wfv1.RetryPolicyOnFailure @@ -1331,7 +1331,7 @@ func TestBackoffMessage(t *testing.T) { require.NoError(t, err) assert.True(t, proceed) // New node is started, message should be clear - assert.Equal(t, "", newRetryNode.Message) + assert.Empty(t, newRetryNode.Message) } var retriesVariableTemplate = ` @@ -1764,6 +1764,33 @@ func TestAssessNodeStatus(t *testing.T) { node: &wfv1.NodeStatus{TemplateName: templateName}, wantPhase: wfv1.NodeError, wantMessage: "Unexpected pod phase for : ", + }, { + name: "pod failed - main container still running, init container finished", + pod: &apiv1.Pod{ + Status: apiv1.PodStatus{ + InitContainerStatuses: []apiv1.ContainerStatus{ + { + Name: common.InitContainerName, + State: apiv1.ContainerState{Terminated: nil}, + }, + }, + ContainerStatuses: []apiv1.ContainerStatus{ + { + Name: common.WaitContainerName, + State: apiv1.ContainerState{Running: &apiv1.ContainerStateRunning{StartedAt: metav1.Time{Time: time.Now()}}}, + }, + { + Name: common.MainContainerName, + State: apiv1.ContainerState{Running: &apiv1.ContainerStateRunning{StartedAt: metav1.Time{Time: time.Now()}}}, + }, + }, + Message: "Pod Failed", + Phase: apiv1.PodFailed, + }, + }, + node: &wfv1.NodeStatus{TemplateName: templateName}, + wantPhase: wfv1.NodeFailed, + wantMessage: "Pod Failed", }} nonDaemonWf := wfv1.MustUnmarshalWorkflow(helloWorldWf) @@ -1977,7 +2004,7 @@ func TestWorkflowStepRetry(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(workflowStepRetry) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -1987,7 +2014,7 @@ func TestWorkflowStepRetry(t *testing.T) { // complete the first pod makePodsPhase(ctx, woc, apiv1.PodSucceeded) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc = newWorkflowOperationCtx(wf, controller) nodeID := woc.nodeID(&pods.Items[0]) @@ -1996,7 +2023,7 @@ func TestWorkflowStepRetry(t *testing.T) { // fail the second pod makePodsPhase(ctx, woc, apiv1.PodFailed) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc = newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -2105,7 +2132,7 @@ func TestStepsTemplateParallelismLimit(t *testing.T) { wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) @@ -2116,7 +2143,7 @@ func TestStepsTemplateParallelismLimit(t *testing.T) { // operate again and make sure we don't schedule any more pods makePodsPhase(ctx, woc, apiv1.PodRunning) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) // wfBytes, _ := json.MarshalIndent(wf, "", " ") // log.Printf("%s", wfBytes) @@ -2250,7 +2277,7 @@ func TestNestedTemplateParallelismLimit(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(nestedParallelism) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -2305,9 +2332,9 @@ func TestSuspendResume(t *testing.T) { // suspend the workflow ctx := context.Background() - err := util.SuspendWorkflow(ctx, wfcset, wf.ObjectMeta.Name) + err := util.SuspendWorkflow(ctx, wfcset, wf.Name) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, *wf.Spec.Suspend) @@ -2319,9 +2346,9 @@ func TestSuspendResume(t *testing.T) { assert.Empty(t, pods.Items) // resume the workflow and operate again. two pods should be able to be scheduled - err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.ObjectMeta.Name, "") + err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.Name, "") require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Nil(t, wf.Spec.Suspend) woc = newWorkflowOperationCtx(wf, controller) @@ -2356,7 +2383,7 @@ func TestSuspendWithDeadline(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -2410,7 +2437,7 @@ func TestSuspendInputsResolution(t *testing.T) { assert.Equal(t, wfv1.NodeRunning, node.Phase) assert.Equal(t, "param1", node.Inputs.Parameters[0].Name) - assert.Equal(t, "{\"enum\": [\"one\", \"two\", \"three\"]}", node.Inputs.Parameters[0].Value.String()) + assert.JSONEq(t, "{\"enum\": [\"one\", \"two\", \"three\"]}", node.Inputs.Parameters[0].Value.String()) assert.Len(t, node.Inputs.Parameters[0].Enum, 3) assert.Equal(t, "one", node.Inputs.Parameters[0].Enum[0].String()) assert.Equal(t, "two", node.Inputs.Parameters[0].Enum[1].String()) @@ -2465,10 +2492,11 @@ func TestSequence(t *testing.T) { found100 := false found101 := false for _, node := range updatedWf.Status.Nodes { - if node.DisplayName == "step1(0:100)" { + switch node.DisplayName { + case "step1(0:100)": assert.Equal(t, "100", node.Inputs.Parameters[0].Value.String()) found100 = true - } else if node.DisplayName == "step1(1:101)" { + case "step1(1:101)": assert.Equal(t, "101", node.Inputs.Parameters[0].Value.String()) found101 = true } @@ -2528,8 +2556,8 @@ func TestInputParametersAsJson(t *testing.T) { found := false for _, node := range updatedWf.Status.Nodes { if node.Type == wfv1.NodeTypePod { - expectedJson := `Workflow: [{"name":"parameter1","value":"value1"}]. Template: [{"name":"parameter1","value":"value1"},{"name":"parameter2","value":"template2"}]` - assert.Equal(t, expectedJson, node.Inputs.Parameters[0].Value.String()) + expected := `Workflow: [{"name":"parameter1","value":"value1"}]. Template: [{"name":"parameter1","value":"value1"},{"name":"parameter2","value":"template2"}]` + assert.Equal(t, expected, node.Inputs.Parameters[0].Value.String()) found = true } } @@ -2680,7 +2708,7 @@ func TestSuspendTemplate(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -2692,9 +2720,9 @@ func TestSuspendTemplate(t *testing.T) { assert.Empty(t, pods.Items) // resume the workflow. verify resume workflow edits nodestatus correctly - err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.ObjectMeta.Name, "") + err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.Name, "") require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.False(t, util.IsWorkflowSuspended(wf)) @@ -2718,7 +2746,7 @@ func TestSuspendTemplateWithFailedResume(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -2730,9 +2758,9 @@ func TestSuspendTemplateWithFailedResume(t *testing.T) { assert.Empty(t, pods.Items) // resume the workflow. verify resume workflow edits nodestatus correctly - err = util.StopWorkflow(ctx, wfcset, controller.hydrator, wf.ObjectMeta.Name, "inputs.parameters.param1.value=value1", "Step failed!") + err = util.StopWorkflow(ctx, wfcset, controller.hydrator, wf.Name, "inputs.parameters.param1.value=value1", "Step failed!") require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.False(t, util.IsWorkflowSuspended(wf)) @@ -2757,7 +2785,7 @@ func TestSuspendTemplateWithFilteredResume(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -2769,7 +2797,7 @@ func TestSuspendTemplateWithFilteredResume(t *testing.T) { assert.Empty(t, pods.Items) // resume the workflow, but with non-matching selector - err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.ObjectMeta.Name, "inputs.paramaters.param1.value=value2") + err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.Name, "inputs.paramaters.param1.value=value2") require.Error(t, err) // operate the workflow. nothing should have happened @@ -2781,9 +2809,9 @@ func TestSuspendTemplateWithFilteredResume(t *testing.T) { assert.True(t, util.IsWorkflowSuspended(wf)) // resume the workflow, but with matching selector - err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.ObjectMeta.Name, "inputs.parameters.param1.value=value1") + err = util.ResumeWorkflow(ctx, wfcset, controller.hydrator, wf.Name, "inputs.parameters.param1.value=value1") require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.False(t, util.IsWorkflowSuspended(wf)) @@ -2833,7 +2861,7 @@ func TestSuspendResumeAfterTemplate(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -2867,7 +2895,7 @@ func TestSuspendResumeAfterTemplateNoWait(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) @@ -3226,7 +3254,7 @@ func TestParamSubstitutionWithArtifact(t *testing.T) { woc := newWoc(*wf) ctx := context.Background() woc.operate(ctx) - wf, err := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows("").Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows("").Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) pods, err := listPods(woc) @@ -3239,7 +3267,7 @@ func TestGlobalParamSubstitutionWithArtifact(t *testing.T) { woc := newWoc(*wf) ctx := context.Background() woc.operate(ctx) - wf, err := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows("").Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows("").Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) pods, err := listPods(woc) @@ -3352,7 +3380,7 @@ func TestMetadataPassing(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(metadataTemplate) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -3603,7 +3631,7 @@ func TestResourceTemplate(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) @@ -3615,7 +3643,7 @@ func TestResourceTemplate(t *testing.T) { err = yaml.Unmarshal([]byte(tmpl.Resource.Manifest), &cm) require.NoError(t, err) assert.Equal(t, "resource-cm", cm.Name) - assert.Empty(t, cm.ObjectMeta.OwnerReferences) + assert.Empty(t, cm.OwnerReferences) } var resourceWithOwnerReferenceTemplate = ` @@ -3686,7 +3714,7 @@ func TestResourceWithOwnerReferenceTemplate(t *testing.T) { require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) @@ -3798,7 +3826,7 @@ func TestStepWFGetNodeName(t *testing.T) { assert.False(t, hasOutputResultRef("print-message", &wf.Spec.Templates[0])) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) for _, node := range wf.Status.Nodes { if strings.Contains(node.Name, "generate") { @@ -3823,7 +3851,7 @@ func TestDAGWFGetNodeName(t *testing.T) { assert.False(t, hasOutputResultRef("B", &wf.Spec.Templates[0])) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) for _, node := range wf.Status.Nodes { if strings.Contains(node.Name, ".A") { @@ -3845,7 +3873,7 @@ spec: arguments: parameters: - name: input - value: '[[1,2],[3,4],[4,5],[6,7]]' + value: '[[1,2],[3,4],[4,5],[106,7]]' templates: - name: expand-with-items steps: @@ -4554,7 +4582,7 @@ func TestRetryNodeOutputs(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(retryNodeOutputs) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) @@ -6515,6 +6543,7 @@ status: phase: Failed nodes: my-wf: + id: my-wf name: my-wf phase: Failed `) @@ -7002,7 +7031,7 @@ func TestStorageQuota(t *testing.T) { cancel, controller := newController(wf) defer cancel() - controller.kubeclientset.(*fake.Clientset).BatchV1().(*batchfake.FakeBatchV1).Fake.PrependReactor("create", "persistentvolumeclaims", func(action k8stesting.Action) (bool, runtime.Object, error) { + controller.kubeclientset.(*fake.Clientset).BatchV1().(*batchfake.FakeBatchV1).PrependReactor("create", "persistentvolumeclaims", func(action k8stesting.Action) (bool, runtime.Object, error) { return true, nil, apierr.NewForbidden(schema.GroupResource{Group: "test", Resource: "test1"}, "test", errors.New("exceeded quota")) }) @@ -7012,7 +7041,7 @@ func TestStorageQuota(t *testing.T) { assert.Equal(t, wfv1.WorkflowPending, woc.wf.Status.Phase) assert.Contains(t, woc.wf.Status.Message, "Waiting for a PVC to be created.") - controller.kubeclientset.(*fake.Clientset).BatchV1().(*batchfake.FakeBatchV1).Fake.PrependReactor("create", "persistentvolumeclaims", func(action k8stesting.Action) (bool, runtime.Object, error) { + controller.kubeclientset.(*fake.Clientset).BatchV1().(*batchfake.FakeBatchV1).PrependReactor("create", "persistentvolumeclaims", func(action k8stesting.Action) (bool, runtime.Object, error) { return true, nil, apierr.NewBadRequest("BadRequest") }) @@ -7635,7 +7664,7 @@ func TestRetryOnDiffHost(t *testing.T) { assert.Equal(t, wfv1.NodeRunning, n.Phase) // Ensure related fields are not set - assert.Equal(t, "", lastChild.HostNodeName) + assert.Empty(t, lastChild.HostNodeName) // Set host name n, err = woc.wf.GetNodeByName(nodeName) @@ -8552,7 +8581,7 @@ func TestMutexWfPendingWithNoPod(t *testing.T) { woc.controller.syncManager.Release(ctx, wf, "test", &wfv1.Synchronization{Mutex: &wfv1.Mutex{Name: "welcome"}}) woc.operate(ctx) - assert.Equal(t, "", woc.wf.Status.Nodes.FindByDisplayName("hello-world-mpdht").Message) + assert.Empty(t, woc.wf.Status.Nodes.FindByDisplayName("hello-world-mpdht").Message) } var wfGlobalArtifactNil = `apiVersion: argoproj.io/v1alpha1 @@ -9586,7 +9615,7 @@ func TestSetWFPodNamesAnnotation(t *testing.T) { woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) - annotations := woc.wf.ObjectMeta.GetAnnotations() + annotations := woc.wf.GetAnnotations() assert.Equal(t, annotations[common.AnnotationKeyPodNameVersion], tt.podNameVersion) } } @@ -10291,6 +10320,10 @@ spec: path: /tmp/hello_world.txt ` +func nodeWithTemplateName(name string) func(n wfv1.NodeStatus) bool { + return func(n wfv1.NodeStatus) bool { return n.TemplateName == name } +} + func TestMemoizationTemplateLevelCacheWithStepWithoutCache(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(workflowWithTemplateLevelMemoizationAndChildStep) @@ -10307,16 +10340,15 @@ func TestMemoizationTemplateLevelCacheWithStepWithoutCache(t *testing.T) { // Expect both workflowTemplate and the step to be executed for _, node := range woc.wf.Status.Nodes { - if node.TemplateName == "entrypoint" { - assert.True(t, true, "Entrypoint node does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.False(t, node.MemoizationStatus.Hit) - } - if node.Name == "whalesay" { - assert.True(t, true, "Whalesay step does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - } + t.Logf("%+v", node) } + node := woc.wf.Status.Nodes.Find(nodeWithTemplateName("entrypoint")) + require.NotNil(t, node, "Entrypoint should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) + assert.False(t, node.MemoizationStatus.Hit) + node = woc.wf.Status.Nodes.Find(nodeWithTemplateName("whalesay")) + require.NotNil(t, node, "Whalesay step should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) } func TestMemoizationTemplateLevelCacheWithStepWithCache(t *testing.T) { @@ -10355,17 +10387,12 @@ func TestMemoizationTemplateLevelCacheWithStepWithCache(t *testing.T) { woc.operate(ctx) // Only parent node should exist and it should be a memoization cache hit - for _, node := range woc.wf.Status.Nodes { - t.Log(node) - if node.TemplateName == "entrypoint" { - assert.True(t, true, "Entrypoint node does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.True(t, node.MemoizationStatus.Hit) - } - if node.Name == "whalesay" { - assert.False(t, true, "Whalesay step should not have been executed") - } - } + node := woc.wf.Status.Nodes.Find(nodeWithTemplateName("entrypoint")) + require.NotNil(t, node, "Entrypoint should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) + assert.True(t, node.MemoizationStatus.Hit) + node = woc.wf.Status.Nodes.Find(nodeWithTemplateName("whalesay")) + require.Nil(t, node, "Whalesay step should not have been executed") } var workflowWithTemplateLevelMemoizationAndChildDag = ` @@ -10421,17 +10448,13 @@ func TestMemoizationTemplateLevelCacheWithDagWithoutCache(t *testing.T) { woc.operate(ctx) // Expect both workflowTemplate and the dag to be executed - for _, node := range woc.wf.Status.Nodes { - if node.TemplateName == "entrypoint" { - assert.True(t, true, "Entrypoint node does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.False(t, node.MemoizationStatus.Hit) - } - if node.Name == "whalesay" { - assert.True(t, true, "Whalesay dag does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - } - } + node := woc.wf.Status.Nodes.Find(nodeWithTemplateName("entrypoint")) + require.NotNil(t, node, "Entrypoint should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) + assert.False(t, node.MemoizationStatus.Hit) + node = woc.wf.Status.Nodes.Find(nodeWithTemplateName("whalesay")) + require.NotNil(t, node, "Whalesay dag should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) } func TestMemoizationTemplateLevelCacheWithDagWithCache(t *testing.T) { @@ -10470,17 +10493,12 @@ func TestMemoizationTemplateLevelCacheWithDagWithCache(t *testing.T) { woc.operate(ctx) // Only parent node should exist and it should be a memoization cache hit - for _, node := range woc.wf.Status.Nodes { - t.Log(node) - if node.TemplateName == "entrypoint" { - assert.True(t, true, "Entrypoint node does not exist") - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.True(t, node.MemoizationStatus.Hit) - } - if node.Name == "whalesay" { - assert.False(t, true, "Whalesay dag should not have been executed") - } - } + node := woc.wf.Status.Nodes.Find(nodeWithTemplateName("entrypoint")) + require.NotNil(t, node, "Entrypoint should exist") + assert.Equal(t, wfv1.NodeSucceeded, node.Phase) + assert.True(t, node.MemoizationStatus.Hit) + node = woc.wf.Status.Nodes.Find(nodeWithTemplateName("whalesay")) + require.Nil(t, node, "Whalesay dag should not have been executed") } var maxDepth = ` @@ -10777,7 +10795,7 @@ status: ctx := context.Background() - controller.kubeclientset.(*fake.Clientset).CoreV1().(*corefake.FakeCoreV1).Fake.PrependReactor("create", "pods", func(action k8stesting.Action) (bool, runtime.Object, error) { + controller.kubeclientset.(*fake.Clientset).CoreV1().(*corefake.FakeCoreV1).PrependReactor("create", "pods", func(action k8stesting.Action) (bool, runtime.Object, error) { createAction, ok := action.(k8stesting.CreateAction) assert.True(t, ok) @@ -10863,7 +10881,7 @@ func TestWorkflowNeedReconcile(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(needReconcileWorklfow) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -10873,7 +10891,7 @@ func TestWorkflowNeedReconcile(t *testing.T) { // complete the first pod makePodsPhase(ctx, woc, apiv1.PodSucceeded) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc = newWorkflowOperationCtx(wf, controller) for _, node := range woc.wf.Status.Nodes { @@ -10904,7 +10922,7 @@ func TestWorkflowNeedReconcile(t *testing.T) { // complete the second pod makePodsPhase(ctx, woc, apiv1.PodSucceeded) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc = newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -11205,7 +11223,7 @@ func TestContainerSetWhenPodDeleted(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(wfHasContainerSet) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) @@ -11289,7 +11307,7 @@ func TestContainerSetWithDependenciesWhenPodDeleted(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(wfHasContainerSetWithDependencies) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) - wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + wf, err = wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) require.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) diff --git a/workflow/controller/pod/accessors.go b/workflow/controller/pod/accessors.go new file mode 100644 index 000000000000..26e97b80b5e2 --- /dev/null +++ b/workflow/controller/pod/accessors.go @@ -0,0 +1,40 @@ +// Package pod implements pod life cycle management +package pod + +import ( + "fmt" + + apiv1 "k8s.io/api/core/v1" +) + +func (c *Controller) GetPod(namespace string, podName string) (*apiv1.Pod, error) { + obj, exists, err := c.podInformer.GetStore().GetByKey(namespace + "/" + podName) + if err != nil { + return nil, err + } + if !exists { + return nil, nil + } + pod, ok := obj.(*apiv1.Pod) + if !ok { + return nil, fmt.Errorf("object is not a pod") + } + return pod, nil +} + +// TODO - return []*apiv1.Pod instead, save on duplicating this +func (c *Controller) GetPodsByIndex(index, key string) ([]interface{}, error) { + return c.podInformer.GetIndexer().ByIndex(index, key) +} + +func (c *Controller) TerminateContainers(namespace, name string) { + c.queuePodForCleanup(namespace, name, terminateContainers) +} + +func (c *Controller) DeletePod(namespace, name string) { + c.queuePodForCleanup(namespace, name, deletePod) +} + +func (c *Controller) RemoveFinalizer(namespace, name string) { + c.queuePodForCleanup(namespace, name, removeFinalizer) +} diff --git a/workflow/controller/pod/cleanup.go b/workflow/controller/pod/cleanup.go new file mode 100644 index 000000000000..01abddebf9e3 --- /dev/null +++ b/workflow/controller/pod/cleanup.go @@ -0,0 +1,70 @@ +package pod + +import ( + "slices" + "time" + + apiv1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/labels" + + "github.com/argoproj/argo-workflows/v3/workflow/common" + + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" +) + +func (c *Controller) EnactAnyPodCleanup( + selector labels.Selector, + pod *apiv1.Pod, + strategy wfv1.PodGCStrategy, + workflowPhase wfv1.WorkflowPhase, + delay time.Duration, +) { + action := determinePodCleanupAction(selector, pod.Labels, strategy, workflowPhase, pod.Status.Phase, pod.Finalizers) + switch action { + case noAction: // ignore + break + case deletePod: + c.queuePodForCleanupAfter(pod.Namespace, pod.Name, action, delay) + default: + c.queuePodForCleanup(pod.Namespace, pod.Name, action) + } + +} + +func determinePodCleanupAction( + selector labels.Selector, + podLabels map[string]string, + strategy wfv1.PodGCStrategy, + workflowPhase wfv1.WorkflowPhase, + podPhase apiv1.PodPhase, + finalizers []string, +) podCleanupAction { + switch { + case !selector.Matches(labels.Set(podLabels)): // if the pod will never be deleted, label it now + return labelPodCompleted + case strategy == wfv1.PodGCOnPodNone: + return labelPodCompleted + case strategy == wfv1.PodGCOnWorkflowCompletion && workflowPhase.Completed(): + return deletePod + case strategy == wfv1.PodGCOnWorkflowSuccess && workflowPhase == wfv1.WorkflowSucceeded: + return deletePod + case strategy == wfv1.PodGCOnPodCompletion: + return deletePod + case strategy == wfv1.PodGCOnPodSuccess && podPhase == apiv1.PodSucceeded: + return deletePod + case strategy == wfv1.PodGCOnPodSuccess && podPhase == apiv1.PodFailed: + return labelPodCompleted + case workflowPhase.Completed(): + return labelPodCompleted + case hasOurFinalizer(finalizers): + return removeFinalizer + } + return noAction +} + +func hasOurFinalizer(finalizers []string) bool { + if finalizers != nil { + return slices.Contains(finalizers, common.FinalizerPodStatus) + } + return false +} diff --git a/workflow/controller/pod_cleanup_key.go b/workflow/controller/pod/cleanup_key.go similarity index 94% rename from workflow/controller/pod_cleanup_key.go rename to workflow/controller/pod/cleanup_key.go index 086944d5e833..1f85300dab18 100644 --- a/workflow/controller/pod_cleanup_key.go +++ b/workflow/controller/pod/cleanup_key.go @@ -1,4 +1,4 @@ -package controller +package pod import ( "fmt" @@ -15,6 +15,7 @@ type ( ) const ( + noAction podCleanupAction = "" deletePod podCleanupAction = "deletePod" labelPodCompleted podCleanupAction = "labelPodCompleted" terminateContainers podCleanupAction = "terminateContainers" diff --git a/workflow/controller/pod_cleanup_test.go b/workflow/controller/pod/cleanup_test.go similarity index 52% rename from workflow/controller/pod_cleanup_test.go rename to workflow/controller/pod/cleanup_test.go index 80c69d02d467..20f86655bd2d 100644 --- a/workflow/controller/pod_cleanup_test.go +++ b/workflow/controller/pod/cleanup_test.go @@ -1,4 +1,4 @@ -package controller +package pod import ( "testing" @@ -7,57 +7,63 @@ import ( apiv1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/labels" + "github.com/argoproj/argo-workflows/v3/workflow/common" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) -func Test_determinePodCleanupAction(t *testing.T) { - - assert.Equal(t, labelPodCompleted, determinePodCleanupAction(labels.Nothing(), nil, wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded)) - assert.Equal(t, labelPodCompleted, determinePodCleanupAction(labels.Everything(), nil, wfv1.PodGCOnPodNone, wfv1.WorkflowSucceeded, apiv1.PodSucceeded)) +func TestDeterminePodCleanupAction(t *testing.T) { + finalizersNotOurs := []string{} + finalizersOurs := append(finalizersNotOurs, common.FinalizerPodStatus) + assert.Equal(t, labelPodCompleted, determinePodCleanupAction(labels.Nothing(), nil, wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs)) + assert.Equal(t, labelPodCompleted, determinePodCleanupAction(labels.Everything(), nil, wfv1.PodGCOnPodNone, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs)) type fields = struct { Strategy wfv1.PodGCStrategy `json:"strategy,omitempty"` WorkflowPhase wfv1.WorkflowPhase `json:"workflowPhase,omitempty"` PodPhase apiv1.PodPhase `json:"podPhase,omitempty"` + Finalizers []string } for _, tt := range []struct { Fields fields `json:"fields"` Want podCleanupAction `json:"want,omitempty"` }{ + {fields{wfv1.PodGCOnPodNone, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersNotOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodNone, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersNotOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodNone, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodNone, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersOurs}, labelPodCompleted}, - // strategy = 4 options - // workflow phase = 3 options - // pod phase = 2 options - - // 4 * 3 * 2 = 24 options - - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodSucceeded}, ""}, - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodFailed}, ""}, - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowSucceeded, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowSucceeded, apiv1.PodFailed}, deletePod}, - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowFailed, apiv1.PodSucceeded}, labelPodCompleted}, - {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowFailed, apiv1.PodFailed}, labelPodCompleted}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersNotOurs}, ""}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersNotOurs}, ""}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersOurs}, removeFinalizer}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersOurs}, removeFinalizer}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowSucceeded, apiv1.PodFailed, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowFailed, apiv1.PodSucceeded, finalizersOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnWorkflowSuccess, wfv1.WorkflowFailed, apiv1.PodFailed, finalizersOurs}, labelPodCompleted}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodSucceeded}, ""}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodFailed}, ""}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowSucceeded, apiv1.PodFailed}, deletePod}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowFailed, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowFailed, apiv1.PodFailed}, deletePod}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersNotOurs}, ""}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersNotOurs}, ""}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersOurs}, removeFinalizer}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersOurs}, removeFinalizer}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowSucceeded, apiv1.PodFailed, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowFailed, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnWorkflowCompletion, wfv1.WorkflowFailed, apiv1.PodFailed, finalizersOurs}, deletePod}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowRunning, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowRunning, apiv1.PodFailed}, labelPodCompleted}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowSucceeded, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowSucceeded, apiv1.PodFailed}, labelPodCompleted}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowFailed, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowFailed, apiv1.PodFailed}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowSucceeded, apiv1.PodFailed, finalizersOurs}, labelPodCompleted}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowFailed, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodSuccess, wfv1.WorkflowFailed, apiv1.PodFailed, finalizersOurs}, labelPodCompleted}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowRunning, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowRunning, apiv1.PodFailed}, deletePod}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodFailed}, deletePod}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowFailed, apiv1.PodSucceeded}, deletePod}, - {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowFailed, apiv1.PodFailed}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowRunning, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowRunning, apiv1.PodFailed, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowSucceeded, apiv1.PodFailed, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowFailed, apiv1.PodSucceeded, finalizersOurs}, deletePod}, + {fields{wfv1.PodGCOnPodCompletion, wfv1.WorkflowFailed, apiv1.PodFailed, finalizersOurs}, deletePod}, } { t.Run(wfv1.MustMarshallJSON(tt), func(t *testing.T) { action := determinePodCleanupAction( @@ -65,7 +71,9 @@ func Test_determinePodCleanupAction(t *testing.T) { nil, tt.Fields.Strategy, tt.Fields.WorkflowPhase, - tt.Fields.PodPhase) + tt.Fields.PodPhase, + tt.Fields.Finalizers, + ) assert.Equal(t, tt.Want, action) }) } diff --git a/workflow/controller/pod/controller.go b/workflow/controller/pod/controller.go new file mode 100644 index 000000000000..1a400adb131c --- /dev/null +++ b/workflow/controller/pod/controller.go @@ -0,0 +1,318 @@ +// Package pod reconciles pods and takes care of gc events +package pod + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/sirupsen/logrus" + apiv1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/selection" + "k8s.io/apimachinery/pkg/util/wait" + "k8s.io/apimachinery/pkg/watch" + "k8s.io/client-go/kubernetes" + "k8s.io/client-go/rest" + "k8s.io/client-go/tools/cache" + "k8s.io/client-go/util/workqueue" + + argoConfig "github.com/argoproj/argo-workflows/v3/config" + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/util/diff" + "github.com/argoproj/argo-workflows/v3/workflow/common" + "github.com/argoproj/argo-workflows/v3/workflow/controller/indexes" + "github.com/argoproj/argo-workflows/v3/workflow/metrics" + "github.com/argoproj/argo-workflows/v3/workflow/util" +) + +const ( + podResyncPeriod = 30 * time.Minute +) + +var ( + incompleteReq, _ = labels.NewRequirement(common.LabelKeyCompleted, selection.Equals, []string{"false"}) + workflowReq, _ = labels.NewRequirement(common.LabelKeyWorkflow, selection.Exists, nil) + keyFunc = cache.DeletionHandlingMetaNamespaceKeyFunc +) + +type podEventCallback func(pod *apiv1.Pod) error + +// Controller is a controller for pods +type Controller struct { + config *argoConfig.Config + kubeclientset kubernetes.Interface + wfInformer cache.SharedIndexInformer + workqueue workqueue.TypedRateLimitingInterface[string] + podInformer cache.SharedIndexInformer + callBack podEventCallback + log *logrus.Logger + restConfig *rest.Config +} + +// NewController creates a pod controller +func NewController(ctx context.Context, config *argoConfig.Config, restConfig *rest.Config, namespace string, clientSet kubernetes.Interface, wfInformer cache.SharedIndexInformer, metrics *metrics.Metrics, callback podEventCallback) *Controller { + log := logrus.New() + podController := &Controller{ + config: config, + kubeclientset: clientSet, + wfInformer: wfInformer, + workqueue: metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "pod_cleanup_queue"), + podInformer: newInformer(ctx, clientSet, &config.InstanceID, &namespace), + log: log, + callBack: callback, + restConfig: restConfig, + } + //nolint:errcheck // the error only happens if the informer was stopped, and it hasn't even started (https://github.com/kubernetes/client-go/blob/46588f2726fa3e25b1704d6418190f424f95a990/tools/cache/shared_informer.go#L580) + podController.podInformer.AddEventHandler( + cache.ResourceEventHandlerFuncs{ + AddFunc: func(obj interface{}) { + pod, err := podFromObj(obj) + if err != nil { + log.WithError(err).Error("object from informer wasn't a pod") + return + } + podController.addPodEvent(pod) + }, + UpdateFunc: func(old, newVal interface{}) { + key, err := keyFunc(newVal) + if err != nil { + return + } + oldPod, newPod := old.(*apiv1.Pod), newVal.(*apiv1.Pod) + if oldPod.ResourceVersion == newPod.ResourceVersion { + return + } + if !significantPodChange(oldPod, newPod) { + log.WithField("key", key).Info("insignificant pod change") + diff.LogChanges(oldPod, newPod) + return + } + podController.updatePodEvent(oldPod, newPod) + }, + DeleteFunc: func(obj interface{}) { + podController.deletePodEvent(obj) + }, + }, + ) + return podController +} + +func (c *Controller) HasSynced() func() bool { + return c.podInformer.HasSynced +} + +// Run runs the pod controller +func (c *Controller) Run(ctx context.Context, workers int) { + defer c.workqueue.ShutDown() + if !cache.WaitForCacheSync(ctx.Done(), c.wfInformer.HasSynced) { + return + } + go c.podInformer.Run(ctx.Done()) + if !cache.WaitForCacheSync(ctx.Done(), c.HasSynced()) { + return + } + for range workers { + go wait.UntilWithContext(ctx, c.runPodCleanup, time.Second) + } + <-ctx.Done() +} + +// GetPodPhaseMetrics obtains pod metrics +func (c *Controller) GetPodPhaseMetrics() map[string]int64 { + result := make(map[string]int64, 0) + if c.podInformer != nil { + for _, phase := range []apiv1.PodPhase{apiv1.PodRunning, apiv1.PodPending} { + objs, err := c.podInformer.GetIndexer().IndexKeys(indexes.PodPhaseIndex, string(phase)) + if err != nil { + c.log.WithError(err).Errorf("failed to list pods in phase %s", phase) + } else { + result[string(phase)] = int64(len(objs)) + } + } + } + return result +} + +// Check if owned pod's workflow no longer exists or workflow is in deletion +func (c *Controller) podOrphaned(pod *apiv1.Pod) bool { + controllerRef := metav1.GetControllerOf(pod) + // Pod had no owner + if controllerRef == nil || + controllerRef.Kind != workflow.WorkflowKind { + return false + } + wfOwnerKey := fmt.Sprintf("%s/%s", pod.Namespace, controllerRef.Name) + logCtx := c.log.WithFields(logrus.Fields{"wfOwnerKey": wfOwnerKey, "namespace": pod.Namespace, "podName": pod.Name}) + obj, wfExists, err := c.wfInformer.GetIndexer().GetByKey(wfOwnerKey) + if err != nil { + logCtx.Warn("failed to get workflow from informer") + } + if !wfExists { + return true + } + un, ok := obj.(*unstructured.Unstructured) + if !ok { + c.log.WithField("pod", pod.Name).Warn("workflow is not an unstructured") + return true + } + wf, err := util.FromUnstructured(un) + if err != nil { + c.log.WithField("pod", pod.Name).Warn("workflow unstructured can't be converted to a workflow") + return true + } + return wf.DeletionTimestamp != nil +} + +func podGCFromPod(pod *apiv1.Pod) wfv1.PodGC { + if val, ok := pod.Annotations[common.AnnotationKeyPodGCStrategy]; ok { + parts := strings.Split(val, "/") + return wfv1.PodGC{Strategy: wfv1.PodGCStrategy(parts[0]), DeleteDelayDuration: parts[1]} + } + return wfv1.PodGC{Strategy: wfv1.PodGCOnPodNone} +} + +// Returns time.IsZero if no last transition +func podLastTransition(pod *apiv1.Pod) time.Time { + lastTransition := time.Time{} + for _, condition := range pod.Status.Conditions { + if condition.LastTransitionTime.After(lastTransition) { + lastTransition = condition.LastTransitionTime.Time + } + } + return lastTransition +} + +// A common handler for +func (c *Controller) commonPodEvent(pod *apiv1.Pod, deleting bool) { + // All pods here are not marked completed + action := noAction + minimumDelay := time.Duration(0) + podGC := podGCFromPod(pod) + switch { + case deleting: + if hasOurFinalizer(pod.Finalizers) { + c.log.WithFields(logrus.Fields{"pod.Finalizers": pod.Finalizers}).Info("Removing finalizers during a delete") + action = removeFinalizer + minimumDelay = time.Duration(2 * time.Minute) + } + case c.podOrphaned(pod): + if hasOurFinalizer(pod.Finalizers) { + action = removeFinalizer + } + switch { + case podGC.Strategy == wfv1.PodGCOnWorkflowCompletion: + case podGC.Strategy == wfv1.PodGCOnPodCompletion: + case podGC.Strategy == wfv1.PodGCOnPodSuccess && pod.Status.Phase == apiv1.PodSucceeded: + action = deletePod + } + } + if action != noAction { + // The workflow is gone, we have no idea when that happened, so lets base around pod transiution + lastTransition := podLastTransition(pod) + // GetDeleteDelayDuration returns -1 if no duration, we don't care about failure to parse otherwise here + delay := time.Duration(0) + delayDuration, _ := podGC.GetDeleteDelayDuration() + // In the case of a raw delete make sure we've had some time to process it if there was a finalizer + if delayDuration < minimumDelay { + delayDuration = minimumDelay + } + if !lastTransition.IsZero() && delayDuration > 0 { + delay = time.Until(lastTransition.Add(delayDuration)) + } + c.log.WithFields(logrus.Fields{"action": action, "namespace": pod.Namespace, "podName": pod.Name, "podGC": podGC}).Info("queuing pod", "delay", delay) + switch { + case delay > 0: + c.queuePodForCleanupAfter(pod.Namespace, pod.Name, action, delay) + default: + c.queuePodForCleanup(pod.Namespace, pod.Name, action) + } + } +} + +func (c *Controller) addPodEvent(pod *apiv1.Pod) { + c.log.WithField("pod", pod.Name).Info("add pod event") + err := c.callBack(pod) + if err != nil { + c.log.WithField("pod", pod.Name).Warn("callback for pod add failed") + } + c.commonPodEvent(pod, false) +} + +func (c *Controller) updatePodEvent(old *apiv1.Pod, new *apiv1.Pod) { + // This is only called for actual updates, where there are "significant changes" + c.log.WithField("pod", old.Name).Info("update pod event") + err := c.callBack(new) + if err != nil { + c.log.WithField("pod", new.Name).Warn("callback for pod update failed") + } + c.commonPodEvent(new, false) +} + +func (c *Controller) deletePodEvent(obj interface{}) { + pod, err := podFromObj(obj) + if err != nil { + tombstone, ok := obj.(cache.DeletedFinalStateUnknown) + if !ok { + c.log.Info("error obtaining pod object from tombstone") + return + } + pod, ok = tombstone.Obj.(*apiv1.Pod) + if !ok { + c.log.Warn("deleted pod last known state not a pod") + return + } + } + c.log.WithField("pod", pod.Name).Info("delete pod event") + // enqueue the workflow for the deleted pod + err = c.callBack(pod) + if err != nil { + c.log.WithField("pod", pod.Name).Warn("callback for pod delete failed") + } + // Backstop to remove finalizer if it hasn't already happened, our last chance + c.commonPodEvent(pod, true) +} + +func newWorkflowPodWatch(ctx context.Context, clientSet kubernetes.Interface, instanceID, namespace *string) *cache.ListWatch { + c := clientSet.CoreV1().Pods(*namespace) + // completed=false + labelSelector := labels.NewSelector(). + Add(*workflowReq). + // not sure if we should do this + Add(*incompleteReq). + Add(util.InstanceIDRequirement(*instanceID)) + + listFunc := func(options metav1.ListOptions) (runtime.Object, error) { + options.LabelSelector = labelSelector.String() + return c.List(ctx, options) + } + watchFunc := func(options metav1.ListOptions) (watch.Interface, error) { + options.Watch = true + options.LabelSelector = labelSelector.String() + return c.Watch(ctx, options) + } + return &cache.ListWatch{ListFunc: listFunc, WatchFunc: watchFunc} +} + +func newInformer(ctx context.Context, clientSet kubernetes.Interface, instanceID, namespace *string) cache.SharedIndexInformer { + source := newWorkflowPodWatch(ctx, clientSet, instanceID, namespace) + informer := cache.NewSharedIndexInformer(source, &apiv1.Pod{}, podResyncPeriod, cache.Indexers{ + indexes.WorkflowIndex: indexes.MetaWorkflowIndexFunc, + indexes.NodeIDIndex: indexes.MetaNodeIDIndexFunc, + indexes.PodPhaseIndex: indexes.PodPhaseIndexFunc, + }) + return informer +} + +func podFromObj(obj interface{}) (*apiv1.Pod, error) { + pod, ok := obj.(*apiv1.Pod) + if !ok { + return nil, fmt.Errorf("Object is not a pod") + } + return pod, nil +} diff --git a/workflow/controller/pod/queue.go b/workflow/controller/pod/queue.go new file mode 100644 index 000000000000..7c33f08395ba --- /dev/null +++ b/workflow/controller/pod/queue.go @@ -0,0 +1,180 @@ +// Package pod reconciles pods and takes care of gc events +package pod + +import ( + "context" + "os" + "slices" + "syscall" + "time" + + "github.com/sirupsen/logrus" + apiv1 "k8s.io/api/core/v1" + apierr "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/types" + typedv1 "k8s.io/client-go/kubernetes/typed/core/v1" + + errorsutil "github.com/argoproj/argo-workflows/v3/util/errors" + "github.com/argoproj/argo-workflows/v3/workflow/common" + "github.com/argoproj/argo-workflows/v3/workflow/signal" +) + +func (c *Controller) runPodCleanup(ctx context.Context) { + for c.processNextPodCleanupItem(ctx) { + } +} + +func (c *Controller) getPodCleanupPatch(pod *apiv1.Pod, labelPodCompleted bool) ([]byte, error) { + un := unstructured.Unstructured{} + if labelPodCompleted { + un.SetLabels(map[string]string{common.LabelKeyCompleted: "true"}) + } + + finalizerEnabled := os.Getenv(common.EnvVarPodStatusCaptureFinalizer) == "true" + if finalizerEnabled && pod.Finalizers != nil { + finalizers := slices.Clone(pod.Finalizers) + finalizers = slices.DeleteFunc(finalizers, + func(s string) bool { return s == common.FinalizerPodStatus }) + if len(finalizers) != len(pod.Finalizers) { + un.SetFinalizers(finalizers) + un.SetResourceVersion(pod.ResourceVersion) + } + } + + // if there was nothing to patch (no-op) + if len(un.Object) == 0 { + return nil, nil + } + + return un.MarshalJSON() +} + +// signalContainers signals all containers of a pod +func (c *Controller) signalContainers(ctx context.Context, namespace string, podName string, sig syscall.Signal) (time.Duration, error) { + pod, err := c.GetPod(namespace, podName) + if pod == nil || err != nil { + return 0, err + } + + for _, container := range pod.Status.ContainerStatuses { + if container.State.Running == nil { + continue + } + // problems are already logged at info level, so we just ignore errors here + _ = signal.SignalContainer(ctx, c.restConfig, pod, container.Name, sig) + } + if pod.Spec.TerminationGracePeriodSeconds == nil { + return 30 * time.Second, nil + } + return time.Duration(*pod.Spec.TerminationGracePeriodSeconds) * time.Second, nil +} + +func (c *Controller) patchPodForCleanup(ctx context.Context, pods typedv1.PodInterface, namespace, podName string, labelPodCompleted bool) error { + pod, err := c.GetPod(namespace, podName) + // err is always nil in all kind of caches for now + if err != nil { + return err + } + // if pod is nil, it must have been deleted + if pod == nil { + return nil + } + + patch, err := c.getPodCleanupPatch(pod, labelPodCompleted) + if err != nil { + return err + } + if patch == nil { + return nil + } + + _, err = pods.Patch(ctx, podName, types.MergePatchType, patch, metav1.PatchOptions{}) + if err != nil && !apierr.IsNotFound(err) { + return err + } + + return nil +} + +// all pods will ultimately be cleaned up by either deleting them, or labelling them +func (c *Controller) processNextPodCleanupItem(ctx context.Context) bool { + key, quit := c.workqueue.Get() + if quit { + return false + } + + defer func() { + c.workqueue.Forget(key) + c.workqueue.Done(key) + }() + + namespace, podName, action := parsePodCleanupKey(key) + logCtx := c.log.WithFields(logrus.Fields{"key": key, "action": action, "namespace": namespace, "podName": podName}) + logCtx.Info("cleaning up pod") + err := func() error { + switch action { + case terminateContainers: + pod, err := c.GetPod(namespace, podName) + if err == nil && pod != nil && pod.Status.Phase == apiv1.PodPending { + c.queuePodForCleanup(namespace, podName, deletePod) + } else if terminationGracePeriod, err := c.signalContainers(ctx, namespace, podName, syscall.SIGTERM); err != nil { + return err + } else if terminationGracePeriod > 0 { + c.queuePodForCleanupAfter(namespace, podName, killContainers, terminationGracePeriod) + } + case killContainers: + if _, err := c.signalContainers(ctx, namespace, podName, syscall.SIGKILL); err != nil { + return err + } + case labelPodCompleted: + pods := c.kubeclientset.CoreV1().Pods(namespace) + if err := c.patchPodForCleanup(ctx, pods, namespace, podName, true); err != nil { + return err + } + case deletePod: + pods := c.kubeclientset.CoreV1().Pods(namespace) + if err := c.patchPodForCleanup(ctx, pods, namespace, podName, false); err != nil { + return err + } + propagation := metav1.DeletePropagationBackground + err := pods.Delete(ctx, podName, metav1.DeleteOptions{ + PropagationPolicy: &propagation, + GracePeriodSeconds: c.config.PodGCGracePeriodSeconds, + }) + if err != nil && !apierr.IsNotFound(err) { + return err + } + case removeFinalizer: + pods := c.kubeclientset.CoreV1().Pods(namespace) + if err := c.patchPodForCleanup(ctx, pods, namespace, podName, false); err != nil { + return err + } + } + return nil + }() + if err != nil { + logCtx.WithError(err).Warn("failed to clean-up pod") + if errorsutil.IsTransientErr(err) || apierr.IsConflict(err) { + c.workqueue.AddRateLimited(key) + } + } + return true +} + +func (c *Controller) queuePodForCleanup(namespace string, podName string, action podCleanupAction) { + c.log.WithFields(logrus.Fields{"namespace": namespace, "podName": podName, "action": action}).Info("queueing pod for cleanup") + c.workqueue.AddRateLimited(newPodCleanupKey(namespace, podName, action)) +} + +func (c *Controller) queuePodForCleanupAfter(namespace string, podName string, action podCleanupAction, duration time.Duration) { + logCtx := c.log.WithFields(logrus.Fields{"namespace": namespace, "podName": podName, "action": action, "after": duration}) + if duration > 0 { + logCtx.Info("queueing pod for cleanup after") + c.workqueue.AddAfter(newPodCleanupKey(namespace, podName, action), duration) + } else { + logCtx.Warn("queueing pod for cleanup now, rather than delayed") + c.workqueue.AddRateLimited(newPodCleanupKey(namespace, podName, action)) + } +} diff --git a/workflow/controller/pod/queue_test.go b/workflow/controller/pod/queue_test.go new file mode 100644 index 000000000000..09d9ec010b36 --- /dev/null +++ b/workflow/controller/pod/queue_test.go @@ -0,0 +1,57 @@ +package pod + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + apiv1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + "github.com/argoproj/argo-workflows/v3/workflow/common" +) + +func TestPodCleanupPatch(t *testing.T) { + c := &Controller{} + + pod := &apiv1.Pod{ + ObjectMeta: metav1.ObjectMeta{ + Labels: map[string]string{common.LabelKeyCompleted: "false"}, + Finalizers: []string{common.FinalizerPodStatus}, + ResourceVersion: "123456", + }, + } + + t.Setenv(common.EnvVarPodStatusCaptureFinalizer, "true") + + // pod finalizer enabled, patch label + patch, err := c.getPodCleanupPatch(pod, true) + require.NoError(t, err) + expected := `{"metadata":{"resourceVersion":"123456","finalizers":[],"labels":{"workflows.argoproj.io/completed":"true"}}}` + assert.JSONEq(t, expected, string(patch)) + + // pod finalizer enabled, do not patch label + patch, err = c.getPodCleanupPatch(pod, false) + require.NoError(t, err) + expected = `{"metadata":{"resourceVersion":"123456","finalizers":[]}}` + assert.JSONEq(t, expected, string(patch)) + + // pod finalizer enabled, do not patch label, nil/empty finalizers + podWithNilFinalizers := &apiv1.Pod{} + patch, err = c.getPodCleanupPatch(podWithNilFinalizers, false) + require.NoError(t, err) + assert.Nil(t, patch) + + t.Setenv(common.EnvVarPodStatusCaptureFinalizer, "false") + + // pod finalizer disabled, patch both + patch, err = c.getPodCleanupPatch(pod, true) + require.NoError(t, err) + expected = `{"metadata":{"labels":{"workflows.argoproj.io/completed":"true"}}}` + assert.JSONEq(t, expected, string(patch)) + + // pod finalizer disabled, do not patch label + patch, err = c.getPodCleanupPatch(pod, false) + require.NoError(t, err) + assert.Nil(t, patch) +} diff --git a/workflow/controller/pod/significant.go b/workflow/controller/pod/significant.go index cf9e5a9ad537..9a7c9319ac72 100644 --- a/workflow/controller/pod/significant.go +++ b/workflow/controller/pod/significant.go @@ -6,7 +6,7 @@ import ( apiv1 "k8s.io/api/core/v1" ) -func SignificantPodChange(from *apiv1.Pod, to *apiv1.Pod) bool { +func significantPodChange(from *apiv1.Pod, to *apiv1.Pod) bool { return os.Getenv("ALL_POD_CHANGES_SIGNIFICANT") == "true" || from.Spec.NodeName != to.Spec.NodeName || from.Status.Phase != to.Status.Phase || diff --git a/workflow/controller/pod/significant_test.go b/workflow/controller/pod/significant_test.go index bc2d94c73718..dcc34144f775 100644 --- a/workflow/controller/pod/significant_test.go +++ b/workflow/controller/pod/significant_test.go @@ -10,77 +10,77 @@ import ( func Test_SgnificantPodChange(t *testing.T) { t.Run("NoChange", func(t *testing.T) { - assert.False(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{})) + assert.False(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{})) }) t.Run("ALL_POD_CHANGES_SIGNIFICANT", func(t *testing.T) { t.Setenv("ALL_POD_CHANGES_SIGNIFICANT", "true") - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{})) + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{})) }) t.Run("DeletionTimestamp", func(t *testing.T) { now := metav1.Now() - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{DeletionTimestamp: &now}}), "deletion timestamp change") + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{DeletionTimestamp: &now}}), "deletion timestamp change") }) t.Run("Annotations", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}), "new annotation") - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "baz"}}}), "changed annotation") - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{}}}), "deleted annotation") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}), "new annotation") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "baz"}}}), "changed annotation") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Annotations: map[string]string{}}}), "deleted annotation") }) t.Run("Labels", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}), "new label") - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "baz"}}}), "changed label") - assert.True(t, SignificantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{}}}), "deleted label") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}), "new label") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "baz"}}}), "changed label") + assert.True(t, significantPodChange(&corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{"foo": "bar"}}}, &corev1.Pod{ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{}}}), "deleted label") }) t.Run("Spec", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{Spec: corev1.PodSpec{NodeName: "from"}}), "Node name change") + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{Spec: corev1.PodSpec{NodeName: "from"}}), "Node name change") }) t.Run("Status", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{Phase: corev1.PodRunning}}), "Phase change") - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{PodIP: "my-ip"}}), "Pod IP change") + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{Phase: corev1.PodRunning}}), "Phase change") + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{PodIP: "my-ip"}}), "Pod IP change") }) t.Run("ContainerStatuses", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}), "Number of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}), "Number of container status changes") + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{Ready: true}}}}, ), "Ready of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{}}}}}}, ), "Waiting of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{}}}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{Reason: "my-reason"}}}}}}, ), "Waiting reason of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{}}}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{Message: "my-message"}}}}}}, ), "Waiting message of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{}}}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Waiting: &corev1.ContainerStateWaiting{Message: "my-message"}}}}}}, ), "Waiting message of container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Running: &corev1.ContainerStateRunning{}}}}}}, ), "Running container status changes") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{}}}}, &corev1.Pod{Status: corev1.PodStatus{ContainerStatuses: []corev1.ContainerStatus{{State: corev1.ContainerState{Terminated: &corev1.ContainerStateTerminated{}}}}}}, ), "Terminate container status changes") }) t.Run("InitContainerStatuses", func(t *testing.T) { - assert.True(t, SignificantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{InitContainerStatuses: []corev1.ContainerStatus{{}}}}), "Number of container status changes") + assert.True(t, significantPodChange(&corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{InitContainerStatuses: []corev1.ContainerStatus{{}}}}), "Number of container status changes") }) t.Run("Conditions", func(t *testing.T) { - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{}, &corev1.Pod{Status: corev1.PodStatus{Conditions: []corev1.PodCondition{{}}}}), "condition added") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{Conditions: []corev1.PodCondition{{}}}}, &corev1.Pod{Status: corev1.PodStatus{Conditions: []corev1.PodCondition{{Reason: "es"}}}}, ), "condition changed") - assert.True(t, SignificantPodChange( + assert.True(t, significantPodChange( &corev1.Pod{Status: corev1.PodStatus{Conditions: []corev1.PodCondition{{}}}}, &corev1.Pod{}, ), "condition removed") diff --git a/workflow/controller/pod/testing.go b/workflow/controller/pod/testing.go new file mode 100644 index 000000000000..be338a09e2e4 --- /dev/null +++ b/workflow/controller/pod/testing.go @@ -0,0 +1,21 @@ +package pod + +import ( + "context" + + "k8s.io/client-go/tools/cache" +) + +// Accessors for the unit tests in /workflow/controller + +func (c *Controller) TestingPodInformer() cache.SharedIndexInformer { + return c.podInformer +} + +func (c *Controller) TestingProcessNextItem(ctx context.Context) bool { + return c.processNextPodCleanupItem(ctx) +} + +func (c *Controller) TestingQueueNumRequeues(key string) int { + return c.workqueue.NumRequeues(key) +} diff --git a/workflow/controller/pod_cleanup.go b/workflow/controller/pod_cleanup.go index 12787ae77d22..04e68e7edcba 100644 --- a/workflow/controller/pod_cleanup.go +++ b/workflow/controller/pod_cleanup.go @@ -1,28 +1,33 @@ package controller import ( - apiv1 "k8s.io/api/core/v1" - "k8s.io/apimachinery/pkg/labels" + "time" - "github.com/argoproj/argo-workflows/v3/workflow/common" + apiv1 "k8s.io/api/core/v1" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/workflow/common" "github.com/argoproj/argo-workflows/v3/workflow/controller/indexes" ) -func (woc *wfOperationCtx) queuePodsForCleanup() { +func (woc *wfOperationCtx) getPodGCDelay(podGC *wfv1.PodGC) time.Duration { delay := woc.controller.Config.GetPodGCDeleteDelayDuration() - podGC := woc.execWf.Spec.PodGC podGCDelay, err := podGC.GetDeleteDelayDuration() if err != nil { woc.log.WithError(err).Warn("failed to parse podGC.deleteDelayDuration") } else if podGCDelay >= 0 { delay = podGCDelay } + return delay +} + +func (woc *wfOperationCtx) queuePodsForCleanup() { + podGC := woc.execWf.Spec.PodGC + delay := woc.getPodGCDelay(podGC) strategy := podGC.GetStrategy() selector, _ := podGC.GetLabelSelector() workflowPhase := woc.wf.Status.Phase - objs, _ := woc.controller.podInformer.GetIndexer().ByIndex(indexes.WorkflowIndex, woc.wf.Namespace+"/"+woc.wf.Name) + objs, _ := woc.controller.PodController.GetPodsByIndex(indexes.WorkflowIndex, woc.wf.Namespace+"/"+woc.wf.Name) for _, obj := range objs { pod := obj.(*apiv1.Pod) if _, ok := pod.Labels[common.LabelKeyComponent]; ok { // for these types we don't want to do PodGC @@ -37,39 +42,6 @@ func (woc *wfOperationCtx) queuePodsForCleanup() { if !nodePhase.Fulfilled() { continue } - switch determinePodCleanupAction(selector, pod.Labels, strategy, workflowPhase, pod.Status.Phase) { - case deletePod: - woc.controller.queuePodForCleanupAfter(pod.Namespace, pod.Name, deletePod, delay) - case labelPodCompleted: - woc.controller.queuePodForCleanup(pod.Namespace, pod.Name, labelPodCompleted) - } - } -} - -func determinePodCleanupAction( - selector labels.Selector, - podLabels map[string]string, - strategy wfv1.PodGCStrategy, - workflowPhase wfv1.WorkflowPhase, - podPhase apiv1.PodPhase, -) podCleanupAction { - switch { - case !selector.Matches(labels.Set(podLabels)): // if the pod will never be deleted, label it now - return labelPodCompleted - case strategy == wfv1.PodGCOnPodNone: - return labelPodCompleted - case strategy == wfv1.PodGCOnWorkflowCompletion && workflowPhase.Completed(): - return deletePod - case strategy == wfv1.PodGCOnWorkflowSuccess && workflowPhase == wfv1.WorkflowSucceeded: - return deletePod - case strategy == wfv1.PodGCOnPodCompletion: - return deletePod - case strategy == wfv1.PodGCOnPodSuccess && podPhase == apiv1.PodSucceeded: - return deletePod - case strategy == wfv1.PodGCOnPodSuccess && podPhase == apiv1.PodFailed: - return labelPodCompleted - case workflowPhase.Completed(): - return labelPodCompleted + woc.controller.PodController.EnactAnyPodCleanup(selector, pod, strategy, workflowPhase, delay) } - return "" } diff --git a/workflow/controller/rate_limiters.go b/workflow/controller/rate_limiters.go index 504bfb5e6117..eb7ad3f65c0b 100644 --- a/workflow/controller/rate_limiters.go +++ b/workflow/controller/rate_limiters.go @@ -22,14 +22,14 @@ func GetRequeueTime() time.Duration { type fixedItemIntervalRateLimiter struct{} -func (r *fixedItemIntervalRateLimiter) When(interface{}) time.Duration { +func (r *fixedItemIntervalRateLimiter) When(string) time.Duration { return GetRequeueTime() } -func (r *fixedItemIntervalRateLimiter) Forget(interface{}) {} +func (r *fixedItemIntervalRateLimiter) Forget(string) {} -func (r *fixedItemIntervalRateLimiter) NumRequeues(interface{}) int { +func (r *fixedItemIntervalRateLimiter) NumRequeues(string) int { return 1 } -var _ workqueue.RateLimiter = &fixedItemIntervalRateLimiter{} +var _ workqueue.TypedRateLimiter[string] = &fixedItemIntervalRateLimiter{} diff --git a/workflow/controller/taskresult.go b/workflow/controller/taskresult.go index d6c003d0b51a..40db86cdccb5 100644 --- a/workflow/controller/taskresult.go +++ b/workflow/controller/taskresult.go @@ -9,6 +9,8 @@ import ( "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" + "k8s.io/apimachinery/pkg/selection" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" wfextvv1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/client/informers/externalversions/workflow/v1alpha1" envutil "github.com/argoproj/argo-workflows/v3/util/env" @@ -16,6 +18,10 @@ import ( "github.com/argoproj/argo-workflows/v3/workflow/controller/indexes" ) +var ( + workflowReq, _ = labels.NewRequirement(common.LabelKeyWorkflow, selection.Exists, nil) +) + func (wfc *WorkflowController) newWorkflowTaskResultInformer() cache.SharedIndexInformer { labelSelector := labels.NewSelector(). Add(*workflowReq). @@ -71,10 +77,11 @@ func (woc *wfOperationCtx) taskResultReconciliation() { label := result.Labels[common.LabelKeyReportOutputsCompleted] // If the task result is completed, set the state to true. - if label == "true" { + switch label { + case "true": woc.log.Debugf("Marking task result complete %s", resultName) woc.wf.Status.MarkTaskResultComplete(resultName) - } else if label == "false" { + case "false": woc.log.Debugf("Marking task result incomplete %s", resultName) woc.wf.Status.MarkTaskResultIncomplete(resultName) } diff --git a/workflow/controller/taskset_test.go b/workflow/controller/taskset_test.go index dd54ced0a8de..7b36f48b3ba7 100644 --- a/workflow/controller/taskset_test.go +++ b/workflow/controller/taskset_test.go @@ -110,7 +110,7 @@ spec: for _, pod := range pods.Items { assert.NotNil(t, pod) assert.True(t, strings.HasSuffix(pod.Name, "-agent")) - assert.Equal(t, "testID", pod.ObjectMeta.Labels[common.LabelKeyControllerInstanceID]) + assert.Equal(t, "testID", pod.Labels[common.LabelKeyControllerInstanceID]) } }) } diff --git a/workflow/controller/workflowpod.go b/workflow/controller/workflowpod.go index 735b6ac04e07..b3170388d152 100644 --- a/workflow/controller/workflowpod.go +++ b/workflow/controller/workflowpod.go @@ -165,10 +165,10 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin pod := &apiv1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: util.GeneratePodName(woc.wf.Name, nodeName, tmpl.Name, nodeID, util.GetWorkflowPodNameVersion(woc.wf)), - Namespace: woc.wf.ObjectMeta.Namespace, + Namespace: woc.wf.Namespace, Labels: map[string]string{ - common.LabelKeyWorkflow: woc.wf.ObjectMeta.Name, // Allows filtering by pods related to specific workflow - common.LabelKeyCompleted: "false", // Allows filtering by incomplete workflow pods + common.LabelKeyWorkflow: woc.wf.Name, // Allows filtering by pods related to specific workflow + common.LabelKeyCompleted: "false", // Allows filtering by incomplete workflow pods }, Annotations: map[string]string{ common.AnnotationKeyNodeName: nodeName, @@ -187,12 +187,12 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin } if os.Getenv(common.EnvVarPodStatusCaptureFinalizer) == "true" { - pod.ObjectMeta.Finalizers = append(pod.ObjectMeta.Finalizers, common.FinalizerPodStatus) + pod.Finalizers = append(pod.Finalizers, common.FinalizerPodStatus) } if opts.onExitPod { // This pod is part of an onExit handler, label it so - pod.ObjectMeta.Labels[common.LabelKeyOnExit] = "true" + pod.Labels[common.LabelKeyOnExit] = "true" } if woc.execWf.Spec.HostNetwork != nil { @@ -202,7 +202,7 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin woc.addDNSConfig(pod) if woc.controller.Config.InstanceID != "" { - pod.ObjectMeta.Labels[common.LabelKeyControllerInstanceID] = woc.controller.Config.InstanceID + pod.Labels[common.LabelKeyControllerInstanceID] = woc.controller.Config.InstanceID } woc.addArchiveLocation(tmpl) @@ -234,7 +234,11 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin break } } - pod.ObjectMeta.Annotations[common.AnnotationKeyDefaultContainer] = defaultContainer + pod.Annotations[common.AnnotationKeyDefaultContainer] = defaultContainer + + if podGC := woc.execWf.Spec.PodGC; podGC != nil { + pod.Annotations[common.AnnotationKeyPodGCStrategy] = fmt.Sprintf("%s/%s", podGC.GetStrategy(), woc.getPodGCDelay(podGC)) + } // Add init container only if it needs input artifacts. This is also true for // script templates (which needs to populate the script) @@ -245,7 +249,7 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin woc.addMetadata(pod, tmpl) // Set initial progress from pod metadata if exists. - if x, ok := pod.ObjectMeta.Annotations[common.AnnotationKeyProgress]; ok { + if x, ok := pod.Annotations[common.AnnotationKeyProgress]; ok { if p, ok := wfv1.ParseProgress(x); ok { node, err := woc.wf.Status.Nodes.Get(nodeID) if err != nil { @@ -432,9 +436,9 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin cm := &apiv1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: cmName, - Namespace: woc.wf.ObjectMeta.Namespace, + Namespace: woc.wf.Namespace, Labels: map[string]string{ - common.LabelKeyWorkflow: woc.wf.ObjectMeta.Name, + common.LabelKeyWorkflow: woc.wf.Name, }, Annotations: map[string]string{ common.AnnotationKeyNodeName: nodeName, @@ -538,7 +542,7 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin } func (woc *wfOperationCtx) podExists(nodeID string) (existing *apiv1.Pod, exists bool, err error) { - objs, err := woc.controller.podInformer.GetIndexer().ByIndex(indexes.NodeIDIndex, woc.wf.Namespace+"/"+nodeID) + objs, err := woc.controller.PodController.GetPodsByIndex(indexes.NodeIDIndex, woc.wf.Namespace+"/"+nodeID) if err != nil { return nil, false, fmt.Errorf("failed to get pod from informer store: %w", err) } @@ -732,18 +736,18 @@ func (woc *wfOperationCtx) addMetadata(pod *apiv1.Pod, tmpl *wfv1.Template) { if woc.execWf.Spec.PodMetadata != nil { // add workflow-level pod annotations and labels for k, v := range woc.execWf.Spec.PodMetadata.Annotations { - pod.ObjectMeta.Annotations[k] = v + pod.Annotations[k] = v } for k, v := range woc.execWf.Spec.PodMetadata.Labels { - pod.ObjectMeta.Labels[k] = v + pod.Labels[k] = v } } for k, v := range tmpl.Metadata.Annotations { - pod.ObjectMeta.Annotations[k] = v + pod.Annotations[k] = v } for k, v := range tmpl.Metadata.Labels { - pod.ObjectMeta.Labels[k] = v + pod.Labels[k] = v } } diff --git a/workflow/controller/workflowpod_test.go b/workflow/controller/workflowpod_test.go index 42b5b359a54a..f719fb502af7 100644 --- a/workflow/controller/workflowpod_test.go +++ b/workflow/controller/workflowpod_test.go @@ -450,13 +450,13 @@ func TestMetadata(t *testing.T) { assert.Len(t, pods.Items, 1) pod := pods.Items[0] assert.NotNil(t, pod.ObjectMeta) - assert.NotNil(t, pod.ObjectMeta.Annotations) - assert.NotNil(t, pod.ObjectMeta.Labels) + assert.NotNil(t, pod.Annotations) + assert.NotNil(t, pod.Labels) for k, v := range woc.execWf.Spec.Templates[0].Metadata.Annotations { - assert.Equal(t, pod.ObjectMeta.Annotations[k], v) + assert.Equal(t, pod.Annotations[k], v) } for k, v := range woc.execWf.Spec.Templates[0].Metadata.Labels { - assert.Equal(t, pod.ObjectMeta.Labels[k], v) + assert.Equal(t, pod.Labels[k], v) } } @@ -830,7 +830,7 @@ func TestOutOfCluster(t *testing.T) { assert.Len(t, pods.Items, 1) pod := pods.Items[0] assert.Equal(t, "kubeconfig", pod.Spec.Volumes[0].Name) - assert.Equal(t, "foo", pod.Spec.Volumes[0].VolumeSource.Secret.SecretName) + assert.Equal(t, "foo", pod.Spec.Volumes[0].Secret.SecretName) waitCtr := pod.Spec.Containers[0] verifyKubeConfigVolume(waitCtr, "kubeconfig", "/kube/config") @@ -856,7 +856,7 @@ func TestOutOfCluster(t *testing.T) { assert.Len(t, pods.Items, 1) pod := pods.Items[0] assert.Equal(t, "kube-config-secret", pod.Spec.Volumes[0].Name) - assert.Equal(t, "foo", pod.Spec.Volumes[0].VolumeSource.Secret.SecretName) + assert.Equal(t, "foo", pod.Spec.Volumes[0].Secret.SecretName) // kubeconfig volume is the last one waitCtr := pod.Spec.Containers[0] @@ -1763,17 +1763,17 @@ func TestPodMetadata(t *testing.T) { woc := newWoc(*wf) mainCtr := woc.execWf.Spec.Templates[0].Container pod, _ := woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) - assert.Equal(t, "foo", pod.ObjectMeta.Annotations["workflow-level-pod-annotation"]) - assert.Equal(t, "bar", pod.ObjectMeta.Labels["workflow-level-pod-label"]) + assert.Equal(t, "foo", pod.Annotations["workflow-level-pod-annotation"]) + assert.Equal(t, "bar", pod.Labels["workflow-level-pod-label"]) wf = wfv1.MustUnmarshalWorkflow(wfWithPodMetadataAndTemplateMetadata) woc = newWoc(*wf) mainCtr = woc.execWf.Spec.Templates[0].Container pod, _ = woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) - assert.Equal(t, "fizz", pod.ObjectMeta.Annotations["workflow-level-pod-annotation"]) - assert.Equal(t, "buzz", pod.ObjectMeta.Labels["workflow-level-pod-label"]) - assert.Equal(t, "hello", pod.ObjectMeta.Annotations["template-level-pod-annotation"]) - assert.Equal(t, "world", pod.ObjectMeta.Labels["template-level-pod-label"]) + assert.Equal(t, "fizz", pod.Annotations["workflow-level-pod-annotation"]) + assert.Equal(t, "buzz", pod.Labels["workflow-level-pod-label"]) + assert.Equal(t, "hello", pod.Annotations["template-level-pod-annotation"]) + assert.Equal(t, "world", pod.Labels["template-level-pod-label"]) } var wfWithContainerSet = ` @@ -1805,13 +1805,13 @@ func TestPodDefaultContainer(t *testing.T) { woc := newWoc(*wf) template := woc.execWf.Spec.Templates[0] pod, _ := woc.createWorkflowPod(ctx, wf.Name, template.ContainerSet.GetContainers(), &wf.Spec.Templates[0], &createWorkflowPodOpts{}) - assert.Equal(t, common.MainContainerName, pod.ObjectMeta.Annotations[common.AnnotationKeyDefaultContainer]) + assert.Equal(t, common.MainContainerName, pod.Annotations[common.AnnotationKeyDefaultContainer]) wf = wfv1.MustUnmarshalWorkflow(wfWithContainerSet) woc = newWoc(*wf) template = woc.execWf.Spec.Templates[0] pod, _ = woc.createWorkflowPod(ctx, wf.Name, template.ContainerSet.GetContainers(), &template, &createWorkflowPodOpts{}) - assert.Equal(t, "b", pod.ObjectMeta.Annotations[common.AnnotationKeyDefaultContainer]) + assert.Equal(t, "b", pod.Annotations[common.AnnotationKeyDefaultContainer]) } func TestGetDeadline(t *testing.T) { @@ -1859,10 +1859,10 @@ func TestPodMetadataWithWorkflowDefaults(t *testing.T) { require.NoError(t, err) mainCtr := woc.execWf.Spec.Templates[0].Container pod, _ := woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) - assert.Equal(t, "annotation-value", pod.ObjectMeta.Annotations["controller-level-pod-annotation"]) - assert.Equal(t, "set-by-controller", pod.ObjectMeta.Annotations["workflow-level-pod-annotation"]) - assert.Equal(t, "label-value", pod.ObjectMeta.Labels["controller-level-pod-label"]) - assert.Equal(t, "set-by-controller", pod.ObjectMeta.Labels["workflow-level-pod-label"]) + assert.Equal(t, "annotation-value", pod.Annotations["controller-level-pod-annotation"]) + assert.Equal(t, "set-by-controller", pod.Annotations["workflow-level-pod-annotation"]) + assert.Equal(t, "label-value", pod.Labels["controller-level-pod-label"]) + assert.Equal(t, "set-by-controller", pod.Labels["workflow-level-pod-label"]) cancel() // need to cancel to spin up pods with the same name cancel, controller = newController() @@ -1882,10 +1882,10 @@ func TestPodMetadataWithWorkflowDefaults(t *testing.T) { require.NoError(t, err) mainCtr = woc.execWf.Spec.Templates[0].Container pod, _ = woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) - assert.Equal(t, "foo", pod.ObjectMeta.Annotations["workflow-level-pod-annotation"]) - assert.Equal(t, "bar", pod.ObjectMeta.Labels["workflow-level-pod-label"]) - assert.Equal(t, "annotation-value", pod.ObjectMeta.Annotations["controller-level-pod-annotation"]) - assert.Equal(t, "label-value", pod.ObjectMeta.Labels["controller-level-pod-label"]) + assert.Equal(t, "foo", pod.Annotations["workflow-level-pod-annotation"]) + assert.Equal(t, "bar", pod.Labels["workflow-level-pod-label"]) + assert.Equal(t, "annotation-value", pod.Annotations["controller-level-pod-annotation"]) + assert.Equal(t, "label-value", pod.Labels["controller-level-pod-label"]) cancel() } @@ -1909,11 +1909,11 @@ func TestPodExists(t *testing.T) { // Sleep 1 second to wait for informer getting pod info time.Sleep(time.Second) - existingPod, doesExist, err := woc.podExists(pod.ObjectMeta.Name) + existingPod, doesExist, err := woc.podExists(pod.Name) require.NoError(t, err) assert.NotNil(t, existingPod) assert.True(t, doesExist) - assert.EqualValues(t, pod, existingPod) + assert.Equal(t, pod, existingPod) } func TestPodFinalizerExits(t *testing.T) { diff --git a/workflow/cron/controller.go b/workflow/cron/controller.go index 106bc33e588d..fdc454bb640e 100644 --- a/workflow/cron/controller.go +++ b/workflow/cron/controller.go @@ -48,7 +48,7 @@ type Controller struct { cronWfInformer informers.GenericInformer wftmplInformer wfextvv1alpha1.WorkflowTemplateInformer cwftmplInformer wfextvv1alpha1.ClusterWorkflowTemplateInformer - cronWfQueue workqueue.RateLimitingInterface + cronWfQueue workqueue.TypedRateLimitingInterface[string] dynamicInterface dynamic.Interface metrics *metrics.Metrics eventRecorderManager events.EventRecorderManager @@ -82,7 +82,7 @@ func NewCronController(ctx context.Context, wfclientset versioned.Interface, dyn cron: newCronFacade(), keyLock: sync.NewKeyLock(), dynamicInterface: dynamicInterface, - cronWfQueue: metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultControllerRateLimiter(), "cron_wf_queue"), + cronWfQueue: metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "cron_wf_queue"), metrics: metrics, eventRecorderManager: eventRecorderManager, wftmplInformer: wftmplInformer, @@ -92,7 +92,7 @@ func NewCronController(ctx context.Context, wfclientset versioned.Interface, dyn } func (cc *Controller) Run(ctx context.Context) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) defer cc.cronWfQueue.ShutDown() log.Infof("Starting CronWorkflow controller") if cc.instanceId != "" { @@ -136,7 +136,7 @@ func (cc *Controller) runCronWorker() { } func (cc *Controller) processNextCronItem(ctx context.Context) bool { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) key, quit := cc.cronWfQueue.Get() if quit { @@ -144,20 +144,20 @@ func (cc *Controller) processNextCronItem(ctx context.Context) bool { } defer cc.cronWfQueue.Done(key) - cc.keyLock.Lock(key.(string)) - defer cc.keyLock.Unlock(key.(string)) + cc.keyLock.Lock(key) + defer cc.keyLock.Unlock(key) logCtx := log.WithField("cronWorkflow", key) logCtx.Infof("Processing %s", key) - obj, exists, err := cc.cronWfInformer.Informer().GetIndexer().GetByKey(key.(string)) + obj, exists, err := cc.cronWfInformer.Informer().GetIndexer().GetByKey(key) if err != nil { logCtx.WithError(err).Error(fmt.Sprintf("Failed to get CronWorkflow '%s' from informer index", key)) return true } if !exists { logCtx.Infof("Deleting '%s'", key) - cc.cron.Delete(key.(string)) + cc.cron.Delete(key) return true } @@ -193,10 +193,10 @@ func (cc *Controller) processNextCronItem(ctx context.Context) bool { } // The job is currently scheduled, remove it and re add it. - cc.cron.Delete(key.(string)) + cc.cron.Delete(key) for _, schedule := range cronWf.Spec.GetSchedulesWithTimezone(ctx) { - lastScheduledTimeFunc, err := cc.cron.AddJob(key.(string), schedule, cronWorkflowOperationCtx) + lastScheduledTimeFunc, err := cc.cron.AddJob(key, schedule, cronWorkflowOperationCtx) if err != nil { logCtx.WithError(err).Error("could not schedule CronWorkflow") return true @@ -204,7 +204,7 @@ func (cc *Controller) processNextCronItem(ctx context.Context) bool { cronWorkflowOperationCtx.scheduledTimeFunc = lastScheduledTimeFunc } - logCtx.Infof("CronWorkflow %s added", key.(string)) + logCtx.Infof("CronWorkflow %s added", key) return true } @@ -256,7 +256,7 @@ func isCompleted(wf v1.Object) bool { } func (cc *Controller) syncAll(ctx context.Context) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) log.Debug("Syncing all CronWorkflows") diff --git a/workflow/cron/operator.go b/workflow/cron/operator.go index ea400aaab017..1b9fd00a3201 100644 --- a/workflow/cron/operator.go +++ b/workflow/cron/operator.go @@ -58,7 +58,7 @@ func newCronWfOperationCtx(cronWorkflow *v1alpha1.CronWorkflow, wfClientset vers wftmplInformer wfextvv1alpha1.WorkflowTemplateInformer, cwftmplInformer wfextvv1alpha1.ClusterWorkflowTemplateInformer, ) *cronWfOperationCtx { return &cronWfOperationCtx{ - name: cronWorkflow.ObjectMeta.Name, + name: cronWorkflow.Name, cronWf: cronWorkflow, wfClientset: wfClientset, wfClient: wfClientset.ArgoprojV1alpha1().Workflows(cronWorkflow.Namespace), @@ -66,8 +66,8 @@ func newCronWfOperationCtx(cronWorkflow *v1alpha1.CronWorkflow, wfClientset vers wftmplInformer: wftmplInformer, cwftmplInformer: cwftmplInformer, log: log.WithFields(log.Fields{ - "workflow": cronWorkflow.ObjectMeta.Name, - "namespace": cronWorkflow.ObjectMeta.Namespace, + "workflow": cronWorkflow.Name, + "namespace": cronWorkflow.Namespace, }), metrics: metrics, // inferScheduledTime returns an inferred scheduled time based on the current time and only works if it is called @@ -117,7 +117,7 @@ func (woc *cronWfOperationCtx) run(ctx context.Context, scheduledRuntime time.Ti return } - woc.metrics.CronWfTrigger(ctx, woc.name, woc.cronWf.ObjectMeta.Namespace) + woc.metrics.CronWfTrigger(ctx, woc.name, woc.cronWf.Namespace) wf := common.ConvertCronWorkflowToWorkflowWithProperties(woc.cronWf, getChildWorkflowName(woc.cronWf.Name, scheduledRuntime), scheduledRuntime) @@ -138,7 +138,7 @@ func (woc *cronWfOperationCtx) run(ctx context.Context, scheduledRuntime time.Ti } func (woc *cronWfOperationCtx) validateCronWorkflow(ctx context.Context) error { - wftmplGetter := informer.NewWorkflowTemplateFromInformerGetter(woc.wftmplInformer, woc.cronWf.ObjectMeta.Namespace) + wftmplGetter := informer.NewWorkflowTemplateFromInformerGetter(woc.wftmplInformer, woc.cronWf.Namespace) cwftmplGetter := informer.NewClusterWorkflowTemplateFromInformerGetter(woc.cwftmplInformer) err := validate.ValidateCronWorkflow(ctx, wftmplGetter, cwftmplGetter, woc.cronWf) if err != nil { @@ -261,13 +261,13 @@ func (woc *cronWfOperationCtx) enforceRuntimePolicy(ctx context.Context) (bool, // Do nothing case v1alpha1.ForbidConcurrent: if len(woc.cronWf.Status.Active) > 0 { - woc.metrics.CronWfPolicy(ctx, woc.name, woc.cronWf.ObjectMeta.Namespace, v1alpha1.ForbidConcurrent) + woc.metrics.CronWfPolicy(ctx, woc.name, woc.cronWf.Namespace, v1alpha1.ForbidConcurrent) woc.log.Infof("%s has 'ConcurrencyPolicy: Forbid' and has an active Workflow so it was not run", woc.name) return false, nil } case v1alpha1.ReplaceConcurrent: if len(woc.cronWf.Status.Active) > 0 { - woc.metrics.CronWfPolicy(ctx, woc.name, woc.cronWf.ObjectMeta.Namespace, v1alpha1.ReplaceConcurrent) + woc.metrics.CronWfPolicy(ctx, woc.name, woc.cronWf.Namespace, v1alpha1.ReplaceConcurrent) woc.log.Infof("%s has 'ConcurrencyPolicy: Replace' and has active Workflows", woc.name) err := woc.terminateOutstandingWorkflows(ctx) if err != nil { @@ -446,7 +446,7 @@ func (woc *cronWfOperationCtx) deleteOldestWorkflows(ctx context.Context, jobLis } sort.SliceStable(jobList, func(i, j int) bool { - return jobList[i].Status.FinishedAt.Time.After(jobList[j].Status.FinishedAt.Time) + return jobList[i].Status.FinishedAt.After(jobList[j].Status.FinishedAt.Time) }) for _, wf := range jobList[workflowsToKeep:] { diff --git a/workflow/events/event_recorder_manager_test.go b/workflow/events/event_recorder_manager_test.go index 6cd68c86edc1..62b7028db45e 100644 --- a/workflow/events/event_recorder_manager_test.go +++ b/workflow/events/event_recorder_manager_test.go @@ -13,8 +13,8 @@ const aggregationWithAnnotationsEnvKey = "EVENT_AGGREGATION_WITH_ANNOTATIONS" func TestCustomEventAggregatorFuncWithAnnotations(t *testing.T) { event := apiv1.Event{} key, msg := customEventAggregatorFuncWithAnnotations(&event) - assert.Equal(t, "", key) - assert.Equal(t, "", msg) + assert.Empty(t, key) + assert.Empty(t, msg) event.Source = apiv1.EventSource{Component: "component1", Host: "host1"} event.InvolvedObject.Name = "name1" @@ -25,7 +25,7 @@ func TestCustomEventAggregatorFuncWithAnnotations(t *testing.T) { assert.Equal(t, "message1", msg) // Test default behavior where annotations are not used for aggregation - event.ObjectMeta.Annotations = map[string]string{"key1": "val1", "key2": "val2"} + event.Annotations = map[string]string{"key1": "val1", "key2": "val2"} key, msg = customEventAggregatorFuncWithAnnotations(&event) assert.Equal(t, "component1host1name1", key) assert.Equal(t, "message1", msg) @@ -37,7 +37,7 @@ func TestCustomEventAggregatorFuncWithAnnotations(t *testing.T) { // Test annotations with values in different order t.Setenv(aggregationWithAnnotationsEnvKey, "true") - event.ObjectMeta.Annotations = map[string]string{"key2": "val2", "key1": "val1"} + event.Annotations = map[string]string{"key2": "val2", "key1": "val1"} key, msg = customEventAggregatorFuncWithAnnotations(&event) assert.Equal(t, "component1host1name1val1val2", key) assert.Equal(t, "message1", msg) diff --git a/workflow/executor/agent.go b/workflow/executor/agent.go index d016069fd5a1..18ca1b7a2641 100644 --- a/workflow/executor/agent.go +++ b/workflow/executor/agent.go @@ -74,7 +74,7 @@ type response struct { } func (ae *AgentExecutor) Agent(ctx context.Context) error { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) taskWorkers := env.LookupEnvIntOr(common.EnvAgentTaskWorkers, 16) requeueTime := env.LookupEnvDurationOr(common.EnvAgentPatchRate, 10*time.Second) diff --git a/workflow/executor/executor_test.go b/workflow/executor/executor_test.go index d2a7f09dbfe4..7d474c740215 100644 --- a/workflow/executor/executor_test.go +++ b/workflow/executor/executor_test.go @@ -230,7 +230,7 @@ func TestDefaultParametersEmptyString(t *testing.T) { ctx := context.Background() err := we.SaveParameters(ctx) require.NoError(t, err) - assert.Equal(t, "", we.Template.Outputs.Parameters[0].Value.String()) + assert.Empty(t, we.Template.Outputs.Parameters[0].Value.String()) } func TestIsTarball(t *testing.T) { diff --git a/workflow/executor/mocks/ContainerRuntimeExecutor.go b/workflow/executor/mocks/ContainerRuntimeExecutor.go index dab32e5f00a4..559184bd31a5 100644 --- a/workflow/executor/mocks/ContainerRuntimeExecutor.go +++ b/workflow/executor/mocks/ContainerRuntimeExecutor.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/workflow/gccontroller/gc_controller.go b/workflow/gccontroller/gc_controller.go index 6e56ad279f09..9372a73d2aaa 100644 --- a/workflow/gccontroller/gc_controller.go +++ b/workflow/gccontroller/gc_controller.go @@ -31,7 +31,7 @@ var ticker *time.Ticker = time.NewTicker(50 * time.Millisecond) type Controller struct { wfclientset wfclientset.Interface wfInformer cache.SharedIndexInformer - workqueue workqueue.DelayingInterface + workqueue workqueue.TypedDelayingInterface[string] clock clock.WithTickerAndDelayedExecution metrics *metrics.Metrics orderedQueueLock sync.Mutex @@ -49,7 +49,7 @@ func NewController(ctx context.Context, wfClientset wfclientset.Interface, wfInf controller := &Controller{ wfclientset: wfClientset, wfInformer: wfInformer, - workqueue: metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultControllerRateLimiter(), "workflow_ttl_queue"), + workqueue: metrics.RateLimiterWithBusyWorkers(ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "workflow_ttl_queue"), clock: clock.RealClock{}, metrics: metrics, orderedQueue: orderedQueue, @@ -79,10 +79,10 @@ func NewController(ctx context.Context, wfClientset wfclientset.Interface, wfInf }, Handler: cache.ResourceEventHandlerFuncs{ UpdateFunc: func(old, new interface{}) { - controller.retentionEnqueue(new) + controller.retentionEnqueue(ctx, new) }, AddFunc: func(obj interface{}) { - controller.retentionEnqueue(obj) + controller.retentionEnqueue(ctx, obj) }, }, }) @@ -92,7 +92,7 @@ func NewController(ctx context.Context, wfClientset wfclientset.Interface, wfInf return controller } -func (c *Controller) retentionEnqueue(obj interface{}) { +func (c *Controller) retentionEnqueue(ctx context.Context, obj interface{}) { // No need to queue the workflow if the retention policy is not set if c.retentionPolicy == nil { return @@ -108,7 +108,7 @@ func (c *Controller) retentionEnqueue(obj interface{}) { case wfv1.WorkflowSucceeded, wfv1.WorkflowFailed, wfv1.WorkflowError: c.orderedQueueLock.Lock() heap.Push(c.orderedQueue[phase], un) - c.runGC(phase) + c.runGC(ctx, phase) c.orderedQueueLock.Unlock() } } @@ -142,8 +142,8 @@ func (c *Controller) runWorker() { } // retentionGC queues workflows for deletion based upon the retention policy. -func (c *Controller) runGC(phase wfv1.WorkflowPhase) { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (c *Controller) runGC(ctx context.Context, phase wfv1.WorkflowPhase) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) var maxWorkflows int switch phase { case wfv1.WorkflowSucceeded: @@ -172,7 +172,7 @@ func (c *Controller) processNextWorkItem(ctx context.Context) bool { return false } defer c.workqueue.Done(key) - runtimeutil.HandleError(c.deleteWorkflow(ctx, key.(string))) + runtimeutil.HandleError(c.deleteWorkflow(ctx, key)) return true } diff --git a/workflow/gccontroller/gc_controller_test.go b/workflow/gccontroller/gc_controller_test.go index e1e77cdd4f09..17c3770c161c 100644 --- a/workflow/gccontroller/gc_controller_test.go +++ b/workflow/gccontroller/gc_controller_test.go @@ -352,7 +352,7 @@ func newTTLController(t *testing.T) *Controller { wfclientset: wfclientset, wfInformer: wfInformer, clock: clock, - workqueue: workqueue.NewDelayingQueue(), + workqueue: workqueue.NewTypedDelayingQueueWithConfig[string](workqueue.TypedDelayingQueueConfig[string]{}), metrics: gcMetrics, } } diff --git a/workflow/gccontroller/heap_test.go b/workflow/gccontroller/heap_test.go index 1d030f2db059..83d5ab15b81a 100644 --- a/workflow/gccontroller/heap_test.go +++ b/workflow/gccontroller/heap_test.go @@ -51,10 +51,10 @@ metadata: heap.Push(queue, wf) assert.Equal(t, 3, queue.Len()) first := heap.Pop(queue).(*unstructured.Unstructured) - assert.Equal(t, now.Add(-time.Second).Unix(), first.GetCreationTimestamp().Time.Unix()) + assert.Equal(t, now.Add(-time.Second).Unix(), first.GetCreationTimestamp().Unix()) assert.Equal(t, "bad-baseline-oldest", first.GetName()) - assert.Equal(t, now.Unix(), heap.Pop(queue).(*unstructured.Unstructured).GetCreationTimestamp().Time.Unix()) - assert.Equal(t, now.Add(time.Second).Unix(), heap.Pop(queue).(*unstructured.Unstructured).GetCreationTimestamp().Time.Unix()) + assert.Equal(t, now.Unix(), heap.Pop(queue).(*unstructured.Unstructured).GetCreationTimestamp().Unix()) + assert.Equal(t, now.Add(time.Second).Unix(), heap.Pop(queue).(*unstructured.Unstructured).GetCreationTimestamp().Unix()) assert.Equal(t, 0, queue.Len()) } diff --git a/workflow/hydrator/hydrator_test.go b/workflow/hydrator/hydrator_test.go index c75222021537..96a5c7f582a8 100644 --- a/workflow/hydrator/hydrator_test.go +++ b/workflow/hydrator/hydrator_test.go @@ -68,7 +68,7 @@ func TestHydrator(t *testing.T) { }) t.Run("WorkflowTooLargeButOffloadNotSupported", func(t *testing.T) { offloadNodeStatusRepo := &sqldbmocks.OffloadNodeStatusRepo{} - offloadNodeStatusRepo.On("Save", "my-uid", "my-ns", mock.Anything).Return("my-offload-version", sqldb.OffloadNotSupportedError) + offloadNodeStatusRepo.On("Save", "my-uid", "my-ns", mock.Anything).Return("my-offload-version", sqldb.ErrOffloadNotSupported) hydrator := New(offloadNodeStatusRepo) wf := &wfv1.Workflow{ ObjectMeta: metav1.ObjectMeta{UID: "my-uid", Namespace: "my-ns"}, @@ -96,7 +96,7 @@ func TestHydrator(t *testing.T) { }) t.Run("OffloadingDisabled", func(t *testing.T) { offloadNodeStatusRepo := &sqldbmocks.OffloadNodeStatusRepo{} - offloadNodeStatusRepo.On("Get", "my-uid", "my-offload-version").Return(nil, sqldb.OffloadNotSupportedError) + offloadNodeStatusRepo.On("Get", "my-uid", "my-offload-version").Return(nil, sqldb.ErrOffloadNotSupported) hydrator := New(offloadNodeStatusRepo) wf := &wfv1.Workflow{ ObjectMeta: metav1.ObjectMeta{UID: "my-uid"}, diff --git a/workflow/metrics/metrics_test.go b/workflow/metrics/metrics_test.go index e24ed2c99206..47d2e2fa4199 100644 --- a/workflow/metrics/metrics_test.go +++ b/workflow/metrics/metrics_test.go @@ -148,7 +148,7 @@ func TestWorkflowQueueMetrics(t *testing.T) { m, te, err := getSharedMetrics() require.NoError(t, err) attribs := attribute.NewSet(attribute.String(telemetry.AttribQueueName, "workflow_queue")) - wfQueue := m.RateLimiterWithBusyWorkers(m.Ctx, workqueue.DefaultControllerRateLimiter(), "workflow_queue") + wfQueue := m.RateLimiterWithBusyWorkers(m.Ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "workflow_queue") defer wfQueue.ShutDown() assert.NotNil(t, m.GetInstrument(nameWorkersQueueDepth)) diff --git a/workflow/metrics/util.go b/workflow/metrics/util.go index 6a1071018ba7..3f857ff1d9e1 100644 --- a/workflow/metrics/util.go +++ b/workflow/metrics/util.go @@ -17,7 +17,7 @@ var ( func IsValidMetricName(name string) bool { // Use promtheus's metric name checker, despite perhaps not using prometheus - return model.IsValidMetricName(model.LabelValue(name)) && !strings.Contains(name, `:`) + return model.IsValidLegacyMetricName(string(model.LabelValue(name))) && !strings.Contains(name, `:`) } func ValidateMetricValues(metric *wfv1.Prometheus) error { diff --git a/workflow/metrics/work_queue.go b/workflow/metrics/work_queue.go index 46679d77a3a0..40e7a6d04a86 100644 --- a/workflow/metrics/work_queue.go +++ b/workflow/metrics/work_queue.go @@ -26,7 +26,7 @@ const ( var _ workqueue.MetricsProvider = &Metrics{} type workersBusyRateLimiterWorkQueue struct { - workqueue.RateLimitingInterface + workqueue.TypedRateLimitingInterface[string] workerType string busyGauge *telemetry.Instrument // Evil storage of context for compatibility with legacy interface to workqueue @@ -128,12 +128,12 @@ func addWorkQueueMetrics(_ context.Context, m *Metrics) error { return nil } -func (m *Metrics) RateLimiterWithBusyWorkers(ctx context.Context, workQueue workqueue.RateLimiter, queueName string) workqueue.RateLimitingInterface { +func (m *Metrics) RateLimiterWithBusyWorkers(ctx context.Context, workQueue workqueue.TypedRateLimiter[string], queueName string) workqueue.TypedRateLimitingInterface[string] { queue := workersBusyRateLimiterWorkQueue{ - RateLimitingInterface: workqueue.NewNamedRateLimitingQueue(workQueue, queueName), - workerType: queueName, - busyGauge: m.GetInstrument(nameWorkersBusy), - ctx: ctx, + TypedRateLimitingInterface: workqueue.NewTypedRateLimitingQueueWithConfig(workQueue, workqueue.TypedRateLimitingQueueConfig[string]{Name: queueName}), + workerType: queueName, + busyGauge: m.GetInstrument(nameWorkersBusy), + ctx: ctx, } queue.newWorker(ctx) return queue @@ -155,14 +155,14 @@ func (w *workersBusyRateLimiterWorkQueue) workerFree(ctx context.Context) { w.busyGauge.AddInt(ctx, -1, w.attributes()) } -func (w workersBusyRateLimiterWorkQueue) Get() (interface{}, bool) { - item, shutdown := w.RateLimitingInterface.Get() +func (w workersBusyRateLimiterWorkQueue) Get() (string, bool) { + item, shutdown := w.TypedRateLimitingInterface.Get() w.workerBusy(w.ctx) return item, shutdown } -func (w workersBusyRateLimiterWorkQueue) Done(item interface{}) { - w.RateLimitingInterface.Done(item) +func (w workersBusyRateLimiterWorkQueue) Done(item string) { + w.TypedRateLimitingInterface.Done(item) w.workerFree(w.ctx) } diff --git a/workflow/metrics/work_queue_test.go b/workflow/metrics/work_queue_test.go index 9c7f9766936b..0222a292e23e 100644 --- a/workflow/metrics/work_queue_test.go +++ b/workflow/metrics/work_queue_test.go @@ -17,7 +17,7 @@ func TestMetricsWorkQueue(t *testing.T) { attribsWT := attribute.NewSet(attribute.String(telemetry.AttribWorkerType, "test")) - queue := m.RateLimiterWithBusyWorkers(m.Ctx, workqueue.DefaultControllerRateLimiter(), "test") + queue := m.RateLimiterWithBusyWorkers(m.Ctx, workqueue.DefaultTypedControllerRateLimiter[string](), "test") defer queue.ShutDown() val, err := te.GetInt64CounterValue(nameWorkersBusy, &attribsWT) require.NoError(t, err) diff --git a/workflow/sync/lock_name.go b/workflow/sync/lock_name.go index fcc71cee6937..ee7d9a4c626d 100644 --- a/workflow/sync/lock_name.go +++ b/workflow/sync/lock_name.go @@ -135,12 +135,12 @@ func ParseSelectors(selectors string) []v1alpha1.SyncSelector { func (ln *lockName) encodeName() string { encodingBuilder := &strings.Builder{} - encodingBuilder.WriteString(fmt.Sprintf("%s/%s/%s", ln.Namespace, ln.Kind, ln.ResourceName)) + fmt.Fprintf(encodingBuilder, "%s/%s/%s", ln.Namespace, ln.Kind, ln.ResourceName) if ln.Kind == lockKindConfigMap { - encodingBuilder.WriteString(fmt.Sprintf("/%s", ln.Key)) + fmt.Fprintf(encodingBuilder, "/%s", ln.Key) } if selectors := StringifySelectors(ln.Selectors); len(selectors) > 0 { - encodingBuilder.WriteString(fmt.Sprintf("?%s", selectors)) + fmt.Fprintf(encodingBuilder, "?%s", selectors) } return ln.validateEncoding(encodingBuilder.String()) } diff --git a/workflow/sync/mocks/Throttler.go b/workflow/sync/mocks/Throttler.go index d4273b265322..ed70bdffca3f 100644 --- a/workflow/sync/mocks/Throttler.go +++ b/workflow/sync/mocks/Throttler.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.2. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/workflow/sync/multi_throttler_test.go b/workflow/sync/multi_throttler_test.go index d321c31c0766..c0da19efaf8b 100644 --- a/workflow/sync/multi_throttler_test.go +++ b/workflow/sync/multi_throttler_test.go @@ -129,7 +129,7 @@ status: assert.False(t, throttler.Admit("default/d")) throttler.Remove("default/a") - assert.Equal(t, "", queuedKey) + assert.Empty(t, queuedKey) assert.False(t, throttler.Admit("default/c")) assert.False(t, throttler.Admit("default/d")) diff --git a/workflow/sync/sync_manager.go b/workflow/sync/sync_manager.go index fb0feda826d5..c6179a7c3719 100644 --- a/workflow/sync/sync_manager.go +++ b/workflow/sync/sync_manager.go @@ -20,7 +20,7 @@ type ( type Manager struct { syncLockMap map[string]semaphore - lock *sync.Mutex + lock *sync.RWMutex nextWorkflow NextWorkflow getSyncLimit GetSyncLimit isWFDeleted IsWorkflowDeleted @@ -29,7 +29,7 @@ type Manager struct { func NewLockManager(getSyncLimit GetSyncLimit, nextWorkflow NextWorkflow, isWFDeleted IsWorkflowDeleted) *Manager { return &Manager{ syncLockMap: make(map[string]semaphore), - lock: &sync.Mutex{}, + lock: &sync.RWMutex{}, nextWorkflow: nextWorkflow, getSyncLimit: getSyncLimit, isWFDeleted: isWFDeleted, @@ -47,8 +47,11 @@ func (sm *Manager) getWorkflowKey(key string) (string, error) { return fmt.Sprintf("%s/%s", items[0], items[1]), nil } -func (sm *Manager) CheckWorkflowExistence() { - defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) +func (sm *Manager) CheckWorkflowExistence(ctx context.Context) { + defer runtimeutil.HandleCrashWithContext(ctx, runtimeutil.PanicHandlers...) + + sm.lock.RLock() + defer sm.lock.RUnlock() log.Debug("Check the workflow existence") for _, lock := range sm.syncLockMap { @@ -311,8 +314,8 @@ func (sm *Manager) Release(ctx context.Context, wf *wfv1.Workflow, nodeName stri return } - sm.lock.Lock() - defer sm.lock.Unlock() + sm.lock.RLock() + defer sm.lock.RUnlock() holderKey := getHolderKey(wf, nodeName) // Ignoring error here is as good as it's going to be, we shouldn't get here as we should @@ -336,8 +339,8 @@ func (sm *Manager) Release(ctx context.Context, wf *wfv1.Workflow, nodeName stri } func (sm *Manager) ReleaseAll(wf *wfv1.Workflow) bool { - sm.lock.Lock() - defer sm.lock.Unlock() + sm.lock.RLock() + defer sm.lock.RUnlock() if wf.Status.Synchronization == nil { return true diff --git a/workflow/sync/sync_manager_test.go b/workflow/sync/sync_manager_test.go index b70fd89de56e..1e77151bdb50 100644 --- a/workflow/sync/sync_manager_test.go +++ b/workflow/sync/sync_manager_test.go @@ -721,7 +721,7 @@ func TestCheckWorkflowExistence(t *testing.T) { assert.Len(mutex.getCurrentPending(), 1) assert.Len(semaphore.getCurrentHolders(), 1) assert.Len(semaphore.getCurrentPending(), 1) - syncManager.CheckWorkflowExistence() + syncManager.CheckWorkflowExistence(ctx) assert.Empty(mutex.getCurrentHolders()) assert.Len(mutex.getCurrentPending(), 1) assert.Empty(semaphore.getCurrentHolders()) diff --git a/workflow/sync/syncitems_test.go b/workflow/sync/syncitems_test.go index f7fe5974d195..0177a0ede9f9 100644 --- a/workflow/sync/syncitems_test.go +++ b/workflow/sync/syncitems_test.go @@ -19,7 +19,7 @@ func TestNoDuplicates(t *testing.T) { { name: "single", items: []*syncItem{ - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, @@ -29,12 +29,12 @@ func TestNoDuplicates(t *testing.T) { { name: "two", items: []*syncItem{ - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, }, - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "beta", }, @@ -44,13 +44,13 @@ func TestNoDuplicates(t *testing.T) { { name: "different namespace mutex", items: []*syncItem{ - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", Namespace: "foo", }, }, - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", Namespace: "bar", @@ -75,12 +75,12 @@ func TestExpectDuplicates(t *testing.T) { { name: "simple duplicate mutex", items: []*syncItem{ - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, }, - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, @@ -90,7 +90,7 @@ func TestExpectDuplicates(t *testing.T) { { name: "simple duplicate semaphore", items: []*syncItem{ - &syncItem{ + { semaphore: &v1alpha1.SemaphoreRef{ ConfigMapKeyRef: &apiv1.ConfigMapKeySelector{ LocalObjectReference: apiv1.LocalObjectReference{ @@ -100,7 +100,7 @@ func TestExpectDuplicates(t *testing.T) { }, }, }, - &syncItem{ + { semaphore: &v1alpha1.SemaphoreRef{ ConfigMapKeyRef: &apiv1.ConfigMapKeySelector{ LocalObjectReference: apiv1.LocalObjectReference{ @@ -115,17 +115,17 @@ func TestExpectDuplicates(t *testing.T) { { name: "another duplicate mutex", items: []*syncItem{ - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, }, - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "beta", }, }, - &syncItem{ + { mutex: &v1alpha1.Mutex{ Name: "alpha", }, diff --git a/workflow/util/util.go b/workflow/util/util.go index f70b72d283a8..e12d6a8dd456 100644 --- a/workflow/util/util.go +++ b/workflow/util/util.go @@ -2,6 +2,7 @@ package util import ( "bufio" + "container/list" "context" "encoding/json" "fmt" @@ -12,6 +13,7 @@ import ( "path/filepath" "regexp" nruntime "runtime" + "slices" "strconv" "strings" "time" @@ -165,8 +167,8 @@ func ToUnstructured(wf *wfv1.Workflow) (*unstructured.Unstructured, error) { // IsWorkflowCompleted returns whether or not a workflow is considered completed func IsWorkflowCompleted(wf *wfv1.Workflow) bool { - if wf.ObjectMeta.Labels != nil { - return wf.ObjectMeta.Labels[common.LabelKeyCompleted] == "true" + if wf.Labels != nil { + return wf.Labels[common.LabelKeyCompleted] == "true" } return false } @@ -288,10 +290,10 @@ func ApplySubmitOpts(wf *wfv1.Workflow, opts *wfv1.SubmitOpts) error { return err } if opts.GenerateName != "" { - wf.ObjectMeta.GenerateName = opts.GenerateName + wf.GenerateName = opts.GenerateName } if opts.Name != "" { - wf.ObjectMeta.Name = opts.Name + wf.Name = opts.Name } if opts.OwnerReference != nil { wf.SetOwnerReferences(append(wf.GetOwnerReferences(), *opts.OwnerReference)) @@ -636,10 +638,10 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized newWF.TypeMeta = wf.TypeMeta // Resubmitted workflow will use generated names - if wf.ObjectMeta.GenerateName != "" { - newWF.ObjectMeta.GenerateName = wf.ObjectMeta.GenerateName + if wf.GenerateName != "" { + newWF.GenerateName = wf.GenerateName } else { - newWF.ObjectMeta.GenerateName = wf.ObjectMeta.Name + "-" + newWF.GenerateName = wf.Name + "-" } // When resubmitting workflow with memoized nodes, we need to use a predetermined workflow name // in order to formulate the node statuses. Which means we cannot reuse metadata.generateName @@ -650,7 +652,7 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized default: return nil, errors.Errorf(errors.CodeBadRequest, "workflow must be Failed/Error to resubmit in memoized mode") } - newWF.ObjectMeta.Name = newWF.ObjectMeta.GenerateName + RandSuffix() + newWF.Name = newWF.GenerateName + RandSuffix() } // carry over the unmodified spec @@ -664,16 +666,16 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized newWF.Spec.Shutdown = "" // carry over user labels and annotations from previous workflow. - if newWF.ObjectMeta.Labels == nil { - newWF.ObjectMeta.Labels = make(map[string]string) + if newWF.Labels == nil { + newWF.Labels = make(map[string]string) } - for key, val := range wf.ObjectMeta.Labels { + for key, val := range wf.Labels { switch key { case common.LabelKeyCreator, common.LabelKeyCreatorEmail, common.LabelKeyCreatorPreferredUsername, common.LabelKeyPhase, common.LabelKeyCompleted, common.LabelKeyWorkflowArchivingStatus: // ignore default: - newWF.ObjectMeta.Labels[key] = val + newWF.Labels[key] = val } } // Apply creator labels based on the authentication information of the current request, @@ -681,12 +683,12 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized creator.Label(ctx, &newWF) // Append an additional label so it's easy for user to see the // name of the original workflow that has been resubmitted. - newWF.ObjectMeta.Labels[common.LabelKeyPreviousWorkflowName] = wf.ObjectMeta.Name - if newWF.ObjectMeta.Annotations == nil { - newWF.ObjectMeta.Annotations = make(map[string]string) + newWF.Labels[common.LabelKeyPreviousWorkflowName] = wf.Name + if newWF.Annotations == nil { + newWF.Annotations = make(map[string]string) } - for key, val := range wf.ObjectMeta.Annotations { - newWF.ObjectMeta.Annotations[key] = val + for key, val := range wf.Annotations { + newWF.Annotations[key] = val } // Setting OwnerReference from original Workflow @@ -694,7 +696,7 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized // Override parameters if parameters != nil { - if _, ok := wf.ObjectMeta.Labels[common.LabelKeyPreviousWorkflowName]; ok || memoized { + if _, ok := wf.Labels[common.LabelKeyPreviousWorkflowName]; ok || memoized { log.Warnln("Overriding parameters on memoized or resubmitted workflows may have unexpected results") } err := overrideParameters(&newWF, parameters) @@ -708,9 +710,9 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized } // Iterate the previous nodes. - replaceRegexp := regexp.MustCompile("^" + wf.ObjectMeta.Name) + replaceRegexp := regexp.MustCompile("^" + wf.Name) newWF.Status.Nodes = make(map[string]wfv1.NodeStatus) - onExitNodeName := wf.ObjectMeta.Name + ".onExit" + onExitNodeName := wf.Name + ".onExit" err := packer.DecompressWorkflow(wf) if err != nil { log.Panic(err) @@ -721,7 +723,7 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized continue } originalID := node.ID - newNode.Name = replaceRegexp.ReplaceAllString(node.Name, newWF.ObjectMeta.Name) + newNode.Name = replaceRegexp.ReplaceAllString(node.Name, newWF.Name) newNode.ID = newWF.NodeID(newNode.Name) if node.BoundaryID != "" { newNode.BoundaryID = convertNodeID(&newWF, replaceRegexp, node.BoundaryID, wf.Status.Nodes) @@ -771,23 +773,10 @@ func FormulateResubmitWorkflow(ctx context.Context, wf *wfv1.Workflow, memoized // convertNodeID converts an old nodeID to a new nodeID func convertNodeID(newWf *wfv1.Workflow, regex *regexp.Regexp, oldNodeID string, oldNodes map[string]wfv1.NodeStatus) string { node := oldNodes[oldNodeID] - newNodeName := regex.ReplaceAllString(node.Name, newWf.ObjectMeta.Name) + newNodeName := regex.ReplaceAllString(node.Name, newWf.Name) return newWf.NodeID(newNodeName) } -func getDescendantNodeIDs(wf *wfv1.Workflow, node wfv1.NodeStatus) []string { - var descendantNodeIDs []string - descendantNodeIDs = append(descendantNodeIDs, node.Children...) - for _, child := range node.Children { - childStatus, err := wf.Status.Nodes.Get(child) - if err != nil { - log.Panicf("Coudn't obtain child for %s, panicking", child) - } - descendantNodeIDs = append(descendantNodeIDs, getDescendantNodeIDs(wf, *childStatus)...) - } - return descendantNodeIDs -} - func isDescendantNodeSucceeded(wf *wfv1.Workflow, node wfv1.NodeStatus, nodeIDsToReset map[string]bool) bool { for _, child := range node.Children { childStatus, err := wf.Status.Nodes.Get(child) @@ -813,232 +802,505 @@ func deletePodNodeDuringRetryWorkflow(wf *wfv1.Workflow, node wfv1.NodeStatus, d return deletedPods, podsToDelete } -func containsNode(nodes []string, node string) bool { - for _, e := range nodes { - if e == node { - return true +func createNewRetryWorkflow(wf *wfv1.Workflow, parameters []string) (*wfv1.Workflow, error) { + newWF := wf.DeepCopy() + + // Delete/reset fields which indicate workflow completed + delete(newWF.Labels, common.LabelKeyCompleted) + delete(newWF.Labels, common.LabelKeyWorkflowArchivingStatus) + newWF.Status.Conditions.UpsertCondition(wfv1.Condition{Status: metav1.ConditionFalse, Type: wfv1.ConditionTypeCompleted}) + newWF.Labels[common.LabelKeyPhase] = string(wfv1.NodeRunning) + newWF.Status.Phase = wfv1.WorkflowRunning + newWF.Status.Nodes = make(wfv1.Nodes) + newWF.Status.Message = "" + newWF.Status.StartedAt = metav1.Time{Time: time.Now().UTC()} + newWF.Status.FinishedAt = metav1.Time{} + if newWF.Status.StoredWorkflowSpec != nil { + newWF.Status.StoredWorkflowSpec.Shutdown = "" + } + newWF.Spec.Shutdown = "" + newWF.Status.PersistentVolumeClaims = []apiv1.Volume{} + if newWF.Spec.ActiveDeadlineSeconds != nil && *newWF.Spec.ActiveDeadlineSeconds == 0 { + // if it was terminated, unset the deadline + newWF.Spec.ActiveDeadlineSeconds = nil + } + // Override parameters + if parameters != nil { + if _, ok := wf.Labels[common.LabelKeyPreviousWorkflowName]; ok { + log.Warnln("Overriding parameters on resubmitted workflows may have unexpected results") + } + err := overrideParameters(newWF, parameters) + if err != nil { + return nil, err } } - return false + return newWF, nil +} + +type dagNode struct { + n *wfv1.NodeStatus + parent *dagNode + children []*dagNode +} + +func newWorkflowsDag(wf *wfv1.Workflow) ([]*dagNode, error) { + nodes := make(map[string]*dagNode) + parentsMap := make(map[string]*wfv1.NodeStatus) + + // create mapping from node to parent + // as well as creating temp mappings from nodeID to node + for _, wfNode := range wf.Status.Nodes { + n := dagNode{} + n.n = &wfNode + nodes[wfNode.ID] = &n + for _, child := range wfNode.Children { + parentsMap[child] = &wfNode + } + } + + for _, wfNode := range wf.Status.Nodes { + parentWfNode, ok := parentsMap[wfNode.ID] + if !ok && wfNode.Name != wf.Name && !strings.HasPrefix(wfNode.Name, wf.Name+".onExit") { + return nil, fmt.Errorf("couldn't find parent node for %s", wfNode.ID) + } + + var parentNode *dagNode + if parentWfNode != nil { + parentNode = nodes[parentWfNode.ID] + } + + children := make([]*dagNode, 0) + + for _, childID := range wfNode.Children { + childNode, ok := nodes[childID] + if !ok { + return nil, fmt.Errorf("coudln't obtain child %s", childID) + } + children = append(children, childNode) + } + nodes[wfNode.ID].parent = parentNode + nodes[wfNode.ID].children = children + } + + values := make([]*dagNode, 0) + for _, v := range nodes { + values = append(values, v) + } + return values, nil +} + +func singularPath(nodes []*dagNode, toNode string) ([]*dagNode, error) { + if len(nodes) <= 0 { + return nil, fmt.Errorf("expected at least 1 node") + } + var root *dagNode + var leaf *dagNode + for i := range nodes { + if nodes[i].n.ID == toNode { + leaf = nodes[i] + } + if nodes[i].parent == nil { + root = nodes[i] + } + } + + if root == nil { + return nil, fmt.Errorf("was unable to find root") + } + + if leaf == nil { + return nil, fmt.Errorf("was unable to find %s", toNode) + } + + curr := leaf + + reverseNodes := make([]*dagNode, 0) + for { + reverseNodes = append(reverseNodes, curr) + if curr.n.ID == root.n.ID { + break + } + if curr.parent == nil { + return nil, fmt.Errorf("parent was nil but curr is not the root node") + } + curr = curr.parent + } + + slices.Reverse(reverseNodes) + return reverseNodes, nil } -func isGroupNode(node wfv1.NodeStatus) bool { - return node.Type == wfv1.NodeTypeDAG || node.Type == wfv1.NodeTypeTaskGroup || node.Type == wfv1.NodeTypeStepGroup || node.Type == wfv1.NodeTypeSteps +func getChildren(n *dagNode) map[string]bool { + children := make(map[string]bool) + queue := list.New() + queue.PushBack(n) + for { + currNode := queue.Front() + if currNode == nil { + break + } + + curr := currNode.Value.(*dagNode) + for i := range curr.children { + children[curr.children[i].n.ID] = true + queue.PushBack(curr.children[i]) + } + queue.Remove(currNode) + } + return children } -func resetConnectedParentGroupNodes(oldWF *wfv1.Workflow, newWF *wfv1.Workflow, currentNode wfv1.NodeStatus, resetParentGroupNodes []string) (*wfv1.Workflow, []string) { - currentNodeID := currentNode.ID +type resetFn func(string) +type deleteFn func(string) +type matchFn func(*dagNode) bool + +func matchNodeType(nodeType wfv1.NodeType) matchFn { + return func(n *dagNode) bool { + return n.n.Type == nodeType + } +} + +func resetUntil(n *dagNode, matchFunc matchFn, resetFunc resetFn) (*dagNode, error) { + curr := n for { - currentNode, err := oldWF.Status.Nodes.Get(currentNodeID) - if err != nil { - log.Panicf("dying due to inability to obtain node for %s, panicking", currentNodeID) + if curr == nil { + return nil, fmt.Errorf("was seeking node but ran out of nodes to explore") + } + + if match := matchFunc(curr); match { + resetFunc(curr.n.ID) + return curr, nil + } + curr = curr.parent + } +} + +func matchBoundaryID(boundaryID string) matchFn { + return func(n *dagNode) bool { + return n.n.ID == boundaryID + } +} + +func resetBoundaries(n *dagNode, resetFunc resetFn) (*dagNode, error) { + curr := n + for { + if curr == nil { + return curr, nil } - if !containsNode(resetParentGroupNodes, currentNodeID) { - newWF.Status.Nodes.Set(currentNodeID, resetNode(*currentNode.DeepCopy())) - resetParentGroupNodes = append(resetParentGroupNodes, currentNodeID) - log.Debugf("Reset connected group node %s", currentNode.Name) + if curr.parent != nil && curr.parent.n.Type == wfv1.NodeTypeRetry { + resetFunc(curr.parent.n.ID) + curr = curr.parent } - if currentNode.BoundaryID != "" && currentNode.BoundaryID != oldWF.ObjectMeta.Name { - parentNode, err := oldWF.Status.Nodes.Get(currentNode.BoundaryID) + if curr.parent != nil && curr.parent.n.Type == wfv1.NodeTypeStepGroup { + resetFunc(curr.parent.n.ID) + } + seekingBoundaryID := curr.n.BoundaryID + if seekingBoundaryID == "" { + return curr.parent, nil + } + var err error + curr, err = resetUntil(curr, matchBoundaryID(seekingBoundaryID), resetFunc) + if err != nil { + return nil, err + } + } +} + +// resetPod is only called in the event a Container was found. This implies that there is a parent pod. +func resetPod(n *dagNode, resetFunc resetFn, addToDelete deleteFn) (*dagNode, error) { + // this sets to reset but resets are overridden by deletes in the final FormulateRetryWorkflow logic. + curr, err := resetUntil(n, matchNodeType(wfv1.NodeTypePod), resetFunc) + if err != nil { + return nil, err + } + addToDelete(curr.n.ID) + children := getChildren(curr) + for childID := range children { + addToDelete(childID) + } + return curr, nil +} + +func resetPath(allNodes []*dagNode, startNode string) (map[string]bool, map[string]bool, error) { + nodes, err := singularPath(allNodes, startNode) + + curr := nodes[len(nodes)-1] + if len(nodes) > 0 { + // remove startNode + nodes = nodes[:len(nodes)-1] + } + + nodesToDelete := getChildren(curr) + nodesToDelete[curr.n.ID] = true + + nodesToReset := make(map[string]bool) + + if err != nil { + return nil, nil, err + } + l := len(nodes) + if l <= 0 { + return nodesToReset, nodesToDelete, nil + } + + // safe to reset the startNode since deletions + // override resets. + addToReset := func(nodeID string) { + nodesToReset[nodeID] = true + } + + addToDelete := func(nodeID string) { + nodesToDelete[nodeID] = true + } + + for curr != nil { + switch { + case isGroupNodeType(curr.n.Type): + addToReset(curr.n.ID) + curr, err = resetBoundaries(curr, addToReset) if err != nil { - log.Panicf("unable to obtain node for %s, panicking", currentNode.BoundaryID) + return nil, nil, err } - if isGroupNode(*parentNode) { - currentNodeID = parentNode.ID - } else { - break + continue + case curr.n.Type == wfv1.NodeTypeRetry: + addToReset(curr.n.ID) + case curr.n.Type == wfv1.NodeTypeContainer: + curr, err = resetPod(curr, addToReset, addToDelete) + if err != nil { + return nil, nil, err } - } else { + continue + } + + curr = curr.parent + } + return nodesToReset, nodesToDelete, nil +} + +func setUnion[T comparable](m1 map[T]bool, m2 map[T]bool) map[T]bool { + res := make(map[T]bool) + + for k, v := range m1 { + res[k] = v + } + + for k, v := range m2 { + if _, ok := m1[k]; !ok { + res[k] = v + } + } + return res +} + +func isGroupNodeType(nodeType wfv1.NodeType) bool { + return nodeType == wfv1.NodeTypeDAG || nodeType == wfv1.NodeTypeTaskGroup || nodeType == wfv1.NodeTypeStepGroup || nodeType == wfv1.NodeTypeSteps +} + +func isExecutionNodeType(nodeType wfv1.NodeType) bool { + return nodeType == wfv1.NodeTypeContainer || nodeType == wfv1.NodeTypePod || nodeType == wfv1.NodeTypeHTTP || nodeType == wfv1.NodeTypePlugin +} + +// dagSortedNodes sorts the nodes based on topological order, omits onExitNode +func dagSortedNodes(nodes []*dagNode, rootNodeName string) []*dagNode { + sortedNodes := make([]*dagNode, 0) + + if len(nodes) == 0 { + return sortedNodes + } + + queue := make([]*dagNode, 0) + + for _, n := range nodes { + if n.n.Name == rootNodeName { + queue = append(queue, n) break } } - return newWF, resetParentGroupNodes + + if len(queue) != 1 { + panic("couldn't find root node") + } + + for len(queue) > 0 { + curr := queue[0] + sortedNodes = append(sortedNodes, curr) + queue = queue[1:] + queue = append(queue, curr.children...) + } + + return sortedNodes } -// FormulateRetryWorkflow formulates a previous workflow to be retried, deleting all failed steps as well as the onExit node (and children) +// FormulateRetryWorkflow attempts to retry a workflow +// The logic is as follows: +// create a DAG +// topological sort +// iterate through all must delete nodes: iterator $node +// obtain singular path to each $node +// reset all "reset points" to $node func FormulateRetryWorkflow(ctx context.Context, wf *wfv1.Workflow, restartSuccessful bool, nodeFieldSelector string, parameters []string) (*wfv1.Workflow, []string, error) { + switch wf.Status.Phase { case wfv1.WorkflowFailed, wfv1.WorkflowError: case wfv1.WorkflowSucceeded: - if !(restartSuccessful && len(nodeFieldSelector) > 0) { + if !restartSuccessful || len(nodeFieldSelector) <= 0 { return nil, nil, errors.Errorf(errors.CodeBadRequest, "To retry a succeeded workflow, set the options restartSuccessful and nodeFieldSelector") } default: return nil, nil, errors.Errorf(errors.CodeBadRequest, "Cannot retry a workflow in phase %s", wf.Status.Phase) } - newWF := wf.DeepCopy() + onExitNodeName := wf.Name + ".onExit" - // Delete/reset fields which indicate workflow completed - delete(newWF.Labels, common.LabelKeyCompleted) - delete(newWF.Labels, common.LabelKeyWorkflowArchivingStatus) - newWF.Status.Conditions.UpsertCondition(wfv1.Condition{Status: metav1.ConditionFalse, Type: wfv1.ConditionTypeCompleted}) - newWF.ObjectMeta.Labels[common.LabelKeyPhase] = string(wfv1.NodeRunning) - newWF.Status.Phase = wfv1.WorkflowRunning - newWF.Status.Nodes = make(wfv1.Nodes) - newWF.Status.Message = "" - newWF.Status.StartedAt = metav1.Time{Time: time.Now().UTC()} - newWF.Status.FinishedAt = metav1.Time{} - if newWF.Status.StoredWorkflowSpec != nil { - newWF.Status.StoredWorkflowSpec.Shutdown = "" + newWf, err := createNewRetryWorkflow(wf, parameters) + if err != nil { + return nil, nil, err } - newWF.Spec.Shutdown = "" - newWF.Status.PersistentVolumeClaims = []apiv1.Volume{} - if newWF.Spec.ActiveDeadlineSeconds != nil && *newWF.Spec.ActiveDeadlineSeconds == 0 { - // if it was terminated, unset the deadline - newWF.Spec.ActiveDeadlineSeconds = nil + + deleteNodesMap, err := getNodeIDsToReset(restartSuccessful, nodeFieldSelector, wf.Status.Nodes) + if err != nil { + return nil, nil, err } - // Override parameters - if parameters != nil { - if _, ok := wf.ObjectMeta.Labels[common.LabelKeyPreviousWorkflowName]; ok { - log.Warnln("Overriding parameters on resubmitted workflows may have unexpected results") + + failed := make(map[string]bool) + for nodeID, node := range wf.Status.Nodes { + if node.FailedOrError() && isExecutionNodeType(node.Type) { + // Check its parent if current node is retry node + if node.NodeFlag != nil && node.NodeFlag.Retried { + node = *wf.Status.Nodes.Find(func(nodeStatus wfv1.NodeStatus) bool { + return nodeStatus.HasChild(node.ID) + }) + } + if !isDescendantNodeSucceeded(wf, node, deleteNodesMap) { + failed[nodeID] = true + } } - err := overrideParameters(newWF, parameters) + } + for failedNode := range failed { + deleteNodesMap[failedNode] = true + } + + nodes, err := newWorkflowsDag(wf) + if err != nil { + return nil, nil, err + } + + toReset := make(map[string]bool) + toDelete := make(map[string]bool) + + nodesMap := make(map[string]*dagNode) + for i := range nodes { + nodesMap[nodes[i].n.ID] = nodes[i] + } + + nodes = dagSortedNodes(nodes, wf.Name) + + deleteNodes := make([]*dagNode, 0) + + // deleteNodes will not contain an exit node + for i := range nodes { + if _, ok := deleteNodesMap[nodes[i].n.ID]; ok { + deleteNodes = append(deleteNodes, nodes[i]) + } + } + + // this is kind of complex + // we rely on deleteNodes being topologically sorted. + // this is done via a breadth first search on the dag via `dagSortedNodes` + // This is because nodes at the top take precedence over nodes at the bottom. + // if a failed node was declared to be scheduled for deletion, we should + // never execute resetPath on that node. + // we ensure this behaviour via calling resetPath in topological order. + for i := range deleteNodes { + currNode := deleteNodes[i] + shouldDelete := toDelete[currNode.n.ID] + if shouldDelete { + continue + } + pathToReset, pathToDelete, err := resetPath(nodes, currNode.n.ID) if err != nil { return nil, nil, err } + toReset = setUnion(toReset, pathToReset) + toDelete = setUnion(toDelete, pathToDelete) } - onExitNodeName := wf.ObjectMeta.Name + ".onExit" - // Get all children of nodes that match filter - nodeIDsToReset, err := getNodeIDsToReset(restartSuccessful, nodeFieldSelector, wf.Status.Nodes) - if err != nil { - return nil, nil, err + for nodeID := range toReset { + // avoid resetting nodes that are marked for deletion + if in := toDelete[nodeID]; in { + continue + } + + n := wf.Status.Nodes[nodeID] + + newWf.Status.Nodes.Set(nodeID, resetNode(*n.DeepCopy())) } - // Iterate the previous nodes. If it was successful Pod carry it forward - deletedNodes := make(map[string]bool) deletedPods := make(map[string]bool) - var podsToDelete []string - var resetParentGroupNodes []string - for _, node := range wf.Status.Nodes { - doForceResetNode := false - if _, present := nodeIDsToReset[node.ID]; present { - // if we are resetting this node then don't carry it across regardless of its phase - doForceResetNode = true - } - switch node.Phase { - case wfv1.NodeSucceeded, wfv1.NodeSkipped: - if strings.HasPrefix(node.Name, onExitNodeName) || doForceResetNode { - log.Debugf("Force reset for node: %s", node.Name) - // Reset parent node if this node is a step/task group or DAG. - if isGroupNode(node) && node.BoundaryID != "" { - if node.ID != wf.ObjectMeta.Name { // Skip root node - descendantNodeIDs := getDescendantNodeIDs(wf, node) - var nodeGroupNeedsReset bool - // Only reset DAG that's in the same branch as the nodeIDsToReset - for _, child := range descendantNodeIDs { - childNode, err := wf.Status.Nodes.Get(child) - if err != nil { - log.Warnf("was unable to obtain node for %s due to %s", child, err) - return nil, nil, fmt.Errorf("Was unable to obtain node for %s due to %s", child, err) - } - if _, present := nodeIDsToReset[child]; present { - log.Debugf("Group node %s needs to reset since its child %s is in the force reset path", node.Name, childNode.Name) - nodeGroupNeedsReset = true - break - } - } - if nodeGroupNeedsReset { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - } - } - } else { - if node.Type == wfv1.NodeTypePod || node.Type == wfv1.NodeTypeSuspend || node.Type == wfv1.NodeTypeSkipped { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - // Only remove the descendants of a suspended node but not the suspended node itself. The descendants - // of a suspended node need to be removed since the conditions should be re-evaluated based on - // the modified supplied parameter values. - if node.Type != wfv1.NodeTypeSuspend { - deletedNodes[node.ID] = true - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, node, deletedPods, podsToDelete) - log.Debugf("Deleted pod node: %s", node.Name) - } + podsToDelete := []string{} - descendantNodeIDs := getDescendantNodeIDs(wf, node) - for _, descendantNodeID := range descendantNodeIDs { - deletedNodes[descendantNodeID] = true - descendantNode, err := wf.Status.Nodes.Get(descendantNodeID) - if err != nil { - log.Warnf("Was unable to obtain node for %s due to %s", descendantNodeID, err) - return nil, nil, fmt.Errorf("Was unable to obtain node for %s due to %s", descendantNodeID, err) - } - if descendantNode.Type == wfv1.NodeTypePod { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, *descendantNode, deletedPods, podsToDelete) - log.Debugf("Deleted pod node %s since it belongs to node %s", descendantNode.Name, node.Name) - } - } - } else { - log.Debugf("Reset non-pod/suspend/skipped node %s", node.Name) - newNode := node.DeepCopy() - newWF.Status.Nodes.Set(newNode.ID, resetNode(*newNode)) - } - } - } else { - if !containsNode(resetParentGroupNodes, node.ID) { - log.Debugf("Node %s remains as is", node.Name) - newWF.Status.Nodes.Set(node.ID, node) + for nodeID := range toDelete { + n := wf.Status.Nodes[nodeID] + if n.Type == wfv1.NodeTypePod { + deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, n, deletedPods, podsToDelete) + } + } + + for id, n := range wf.Status.Nodes { + shouldDelete := toDelete[id] || strings.HasPrefix(n.Name, onExitNodeName) + if _, err := newWf.Status.Nodes.Get(id); err != nil && !shouldDelete { + newWf.Status.Nodes.Set(id, *n.DeepCopy()) + } + if n.Name == onExitNodeName { + queue := list.New() + queue.PushBack(&n) + for { + currNode := queue.Front() + if currNode == nil { + break } - } - case wfv1.NodeError, wfv1.NodeFailed, wfv1.NodeOmitted: - if isGroupNode(node) { - newNode := node.DeepCopy() - newWF.Status.Nodes.Set(newNode.ID, resetNode(*newNode)) - log.Debugf("Reset %s node %s since it's a group node", node.Name, string(node.Phase)) - continue - } else { - if node.Type != wfv1.NodeTypeRetry && isDescendantNodeSucceeded(wf, node, nodeIDsToReset) { - log.Debugf("Node %s remains as is since it has succeed child nodes.", node.Name) - newWF.Status.Nodes.Set(node.ID, node) - continue + curr := currNode.Value.(*wfv1.NodeStatus) + deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, *curr, deletedPods, podsToDelete) + for i := range curr.Children { + child, err := wf.Status.Nodes.Get(curr.Children[i]) + if err != nil { + return nil, nil, err + } + queue.PushBack(child) } - log.Debugf("Deleted %s node %s since it's not a group node", node.Name, string(node.Phase)) - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, node, deletedPods, podsToDelete) - log.Debugf("Deleted pod node: %s", node.Name) - deletedNodes[node.ID] = true + queue.Remove(currNode) } - // do not add this status to the node. pretend as if this node never existed. - default: - // Do not allow retry of workflows with pods in Running/Pending phase - return nil, nil, errors.InternalErrorf("Workflow cannot be retried with node %s in %s phase", node.Name, node.Phase) } } + for id, oldWfNode := range wf.Status.Nodes { - if len(deletedNodes) > 0 { - for _, node := range newWF.Status.Nodes { - if deletedNodes[node.ID] { - log.Debugf("Removed node: %s", node.Name) - newWF.Status.Nodes.Delete(node.ID) - continue - } + if !newWf.Status.Nodes.Has(id) { + continue + } - var newChildren []string - for _, child := range node.Children { - if !deletedNodes[child] { - newChildren = append(newChildren, child) - } + newChildren := []string{} + for _, childID := range oldWfNode.Children { + if toDelete[childID] { + continue } - node.Children = newChildren + newChildren = append(newChildren, childID) + } + newOutboundNodes := []string{} - var outboundNodes []string - for _, outboundNode := range node.OutboundNodes { - if !deletedNodes[outboundNode] { - outboundNodes = append(outboundNodes, outboundNode) - } + for _, outBoundNodeID := range oldWfNode.OutboundNodes { + if toDelete[outBoundNodeID] { + continue } - node.OutboundNodes = outboundNodes - - newWF.Status.Nodes.Set(node.ID, node) + newOutboundNodes = append(newOutboundNodes, outBoundNodeID) } - } - newWF.Status.StoredTemplates = make(map[string]wfv1.Template) - for id, tmpl := range wf.Status.StoredTemplates { - newWF.Status.StoredTemplates[id] = tmpl + wfNode := newWf.Status.Nodes[id] + wfNode.Children = newChildren + wfNode.OutboundNodes = newOutboundNodes + newWf.Status.Nodes.Set(id, *wfNode.DeepCopy()) } - return newWF, podsToDelete, nil + return newWf, podsToDelete, nil } func resetNode(node wfv1.NodeStatus) wfv1.NodeStatus { @@ -1082,25 +1344,13 @@ func getNodeIDsToReset(restartSuccessful bool, nodeFieldSelector string, nodes w selector, err := fields.ParseSelector(nodeFieldSelector) if err != nil { return nil, err - } else { - for _, node := range nodes { - if SelectorMatchesNode(selector, node) { - // traverse all children of the node - var queue []string - queue = append(queue, node.ID) - - for len(queue) > 0 { - childNode := queue[0] - // if the child isn't already in nodeIDsToReset then we add it and traverse its children - if _, present := nodeIDsToReset[childNode]; !present { - nodeIDsToReset[childNode] = true - queue = append(queue, nodes[childNode].Children...) - } - queue = queue[1:] - } - } + } + for _, node := range nodes { + if SelectorMatchesNode(selector, node) { + nodeIDsToReset[node.ID] = true } } + return nodeIDsToReset, nil } diff --git a/workflow/util/util_test.go b/workflow/util/util_test.go index 67b1b2e84374..542ff7aecbf0 100644 --- a/workflow/util/util_test.go +++ b/workflow/util/util_test.go @@ -7,7 +7,6 @@ import ( "path/filepath" "strings" "testing" - "time" "github.com/go-jose/go-jose/v3/jwt" "github.com/stretchr/testify/assert" @@ -41,7 +40,7 @@ spec: entrypoint: whalesay templates: - name: whalesay - container: + container: image: docker/whalesay:latest command: [cowsay] args: ["hello world"] @@ -77,7 +76,7 @@ func TestResubmitWorkflowWithOnExit(t *testing.T) { wf.Status.Nodes.Set(onExitID, onExitNode) newWF, err := FormulateResubmitWorkflow(context.Background(), &wf, true, nil) require.NoError(t, err) - newWFOnExitName := newWF.ObjectMeta.Name + ".onExit" + newWFOnExitName := newWF.Name + ".onExit" newWFOneExitID := newWF.NodeID(newWFOnExitName) _, ok := newWF.Status.Nodes[newWFOneExitID] assert.False(t, ok) @@ -113,7 +112,7 @@ func TestReadFromSingleorMultiplePath(t *testing.T) { } } body, err := ReadFromFilePathsOrUrls(filePaths...) - assert.Equal(t, len(body), len(filePaths)) + assert.Len(t, filePaths, len(body)) require.NoError(t, err) for i := range body { assert.Equal(t, body[i], []byte(tc.contents[i])) @@ -181,19 +180,19 @@ metadata: selfLink: /apis/argoproj.io/v1alpha1/namespaces/argo/workflows/suspend uid: 4f08d325-dc5a-43a3-9986-259e259e6ea3 spec: - + entrypoint: suspend templates: - - + - inputs: {} metadata: {} name: suspend outputs: {} steps: - - - + - - name: approve template: approve - - + - inputs: {} metadata: {} name: approve @@ -267,7 +266,7 @@ func TestResumeWorkflowByNodeName(t *testing.T) { wf, err = wfIf.Get(ctx, "suspend", metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByDisplayName("approve").Phase) - assert.Equal(t, "", wf.Status.Nodes.FindByDisplayName("approve").Message) + assert.Empty(t, wf.Status.Nodes.FindByDisplayName("approve").Message) }) t.Run("With user info", func(t *testing.T) { @@ -365,16 +364,16 @@ kind: Workflow metadata: name: suspend-template spec: - + entrypoint: suspend templates: - - + - inputs: {} metadata: {} name: suspend outputs: {} steps: - - - + - - name: approve template: approve - - arguments: @@ -383,7 +382,7 @@ spec: value: '{{steps.approve.outputs.parameters.message}}' name: release template: whalesay - - + - inputs: {} metadata: {} name: approve @@ -393,7 +392,7 @@ spec: valueFrom: supplied: {} suspend: {} - - + - container: args: - '{{inputs.parameters.message}}' @@ -742,22 +741,22 @@ metadata: selfLink: /apis/argoproj.io/v1alpha1/namespaces/argo/workflows/steps-9fkqc uid: 241a39ef-4ff1-487f-8461-98df5d2b50fb spec: - + entrypoint: foo templates: - - + - inputs: {} metadata: {} name: foo outputs: {} steps: - - - + - - name: pass template: pass - - - + - - name: fail template: fail - - + - container: args: - exit 0 @@ -771,7 +770,7 @@ spec: metadata: {} name: pass outputs: {} - - + - container: args: - exit 1 @@ -1026,51 +1025,6 @@ func TestRetryExitHandler(t *testing.T) { func TestFormulateRetryWorkflow(t *testing.T) { ctx := context.Background() wfClient := argofake.NewSimpleClientset().ArgoprojV1alpha1().Workflows("my-ns") - createdTime := metav1.Time{Time: time.Now().Add(-1 * time.Second).UTC()} - finishedTime := metav1.Time{Time: createdTime.Add(time.Second * 2)} - t.Run("Steps", func(t *testing.T) { - wf := &wfv1.Workflow{ - ObjectMeta: metav1.ObjectMeta{ - Name: "my-steps", - Labels: map[string]string{ - common.LabelKeyCompleted: "true", - common.LabelKeyWorkflowArchivingStatus: "Pending", - }, - }, - Status: wfv1.WorkflowStatus{ - Phase: wfv1.WorkflowFailed, - StartedAt: createdTime, - FinishedAt: finishedTime, - Nodes: map[string]wfv1.NodeStatus{ - "failed-node": {Name: "failed-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeFailed, Message: "failed"}, - "succeeded-node": {Name: "succeeded-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeSucceeded, Message: "succeeded"}}, - }, - } - _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) - require.NoError(t, err) - wf, _, err = FormulateRetryWorkflow(ctx, wf, false, "", nil) - require.NoError(t, err) - assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) - assert.Equal(t, metav1.Time{}, wf.Status.FinishedAt) - assert.True(t, wf.Status.StartedAt.After(createdTime.Time)) - assert.NotContains(t, wf.Labels, common.LabelKeyCompleted) - assert.NotContains(t, wf.Labels, common.LabelKeyWorkflowArchivingStatus) - for _, node := range wf.Status.Nodes { - switch node.Phase { - case wfv1.NodeSucceeded: - assert.Equal(t, "succeeded", node.Message) - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.Equal(t, createdTime, node.StartedAt) - assert.Equal(t, finishedTime, node.FinishedAt) - case wfv1.NodeFailed: - assert.Equal(t, "", node.Message) - assert.Equal(t, wfv1.NodeRunning, node.Phase) - assert.Equal(t, metav1.Time{}, node.FinishedAt) - assert.True(t, node.StartedAt.After(createdTime.Time)) - } - } - - }) t.Run("DAG", func(t *testing.T) { wf := &wfv1.Workflow{ ObjectMeta: metav1.ObjectMeta{ @@ -1080,15 +1034,14 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "": {Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeTaskGroup}}, + "my-dag": {Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Name: "my-dag", ID: "my-dag"}}, }, } _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) wf, _, err = FormulateRetryWorkflow(ctx, wf, false, "", nil) require.NoError(t, err) - require.Len(t, wf.Status.Nodes, 1) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes[""].Phase) + assert.Len(t, wf.Status.Nodes, 1) }) t.Run("Skipped and Suspended Nodes", func(t *testing.T) { wf := &wfv1.Workflow{ @@ -1099,12 +1052,12 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "entrypoint": {ID: "entrypoint", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"suspended", "skipped"}}, + "wf-with-skipped-and-suspended-nodes": {ID: "wf-with-skipped-and-suspended-nodes", Name: "wf-with-skipped-and-suspended-nodes", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"suspended", "skipped"}}, "suspended": { ID: "suspended", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeSuspend, - BoundaryID: "entrypoint", + BoundaryID: "wf-with-skipped-and-suspended-nodes", Children: []string{"child"}, Outputs: &wfv1.Outputs{Parameters: []wfv1.Parameter{{ Name: "param-1", @@ -1119,14 +1072,8 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) wf, _, err = FormulateRetryWorkflow(ctx, wf, true, "id=suspended", nil) require.NoError(t, err) - require.Len(t, wf.Status.Nodes, 3) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["entrypoint"].Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["suspended"].Phase) - assert.Equal(t, wfv1.Parameter{ - Name: "param-1", - Value: nil, - ValueFrom: &wfv1.ValueFrom{Supplied: &wfv1.SuppliedValueFrom{}}, - }, wf.Status.Nodes["suspended"].Outputs.Parameters[0]) + require.Len(t, wf.Status.Nodes, 2) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["wf-with-skipped-and-suspended-nodes"].Phase) assert.Equal(t, wfv1.NodeSkipped, wf.Status.Nodes["skipped"].Phase) }) t.Run("Nested DAG with Non-group Node Selected", func(t *testing.T) { @@ -1138,7 +1085,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "my-nested-dag-1": {ID: "my-nested-dag-1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, + "my-nested-dag-1": {ID: "my-nested-dag-1", Name: "my-nested-dag-1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "my-nested-dag-1", Children: []string{"2", "4"}}, "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, @@ -1151,7 +1098,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #3, #4 are deleted and will be recreated so only 3 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 3) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["my-nested-dag-1"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["my-nested-dag-1"].Phase) // The parent group nodes should be running. assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["2"].Phase) @@ -1165,9 +1112,9 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "my-nested-dag-2": {ID: "my-nested-dag-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, + "my-nested-dag-2": {ID: "my-nested-dag-2", Name: "my-nested-dag-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "my-nested-dag-2", Children: []string{"2", "4"}}, - "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, + "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1", Children: []string{"3"}}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, "4": {ID: "4", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, }, @@ -1178,12 +1125,10 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #2, #3, and #4 are deleted and will be recreated so only 2 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["my-nested-dag-2"].Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["my-nested-dag-2"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["2"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["3"].Phase) - assert.Equal(t, "", string(wf.Status.Nodes["4"].Phase)) - }) t.Run("OverrideParams", func(t *testing.T) { wf := &wfv1.Workflow{ @@ -1199,7 +1144,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, + "override-param-wf": {ID: "override-param-wf", Name: "override-param-wf", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG}, }}, } wf, _, err := FormulateRetryWorkflow(context.Background(), wf, false, "", []string{"message=modified"}) @@ -1222,7 +1167,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, + "override-param-wf": {ID: "override-param-wf", Name: "override-param-wf", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, }, StoredWorkflowSpec: &wfv1.WorkflowSpec{Arguments: wfv1.Arguments{ Parameters: []wfv1.Parameter{ @@ -1300,11 +1245,11 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowSucceeded, Nodes: map[string]wfv1.NodeStatus{ - "successful-workflow-2": {ID: "successful-workflow-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "successful-workflow-2", Children: []string{"2", "4"}}, - "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, - "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, - "4": {ID: "4", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, + "successful-workflow-2": {ID: "successful-workflow-2", Name: "successful-workflow-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, + "1": {ID: "1", Name: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "successful-workflow-2", Children: []string{"2", "4"}}, + "2": {ID: "2", Name: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, + "3": {ID: "3", Name: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, + "4": {ID: "4", Name: "4", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, }, } _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) @@ -1313,7 +1258,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #4 is deleted and will be recreated so only 4 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["successful-workflow-2"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["successful-workflow-2"].Phase) // The parent group nodes should be running. assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["2"].Phase) @@ -1329,7 +1274,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "continue-on-failed-workflow": {ID: "continue-on-failed-workflow", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, + "continue-on-failed-workflow": {ID: "continue-on-failed-workflow", Name: "continue-on-failed-workflow", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Children: []string{"2", "4"}, Name: "node1"}, "2": {ID: "2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Children: []string{"3"}, Name: "node2"}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Name: "node3"}, @@ -1342,9 +1287,8 @@ func TestFormulateRetryWorkflow(t *testing.T) { wf, podsToDelete, err := FormulateRetryWorkflow(ctx, wf, false, "", nil) require.NoError(t, err) require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeFailed, wf.Status.Nodes["2"].Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["3"].Phase) - assert.Len(t, podsToDelete, 2) + assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) + assert.Len(t, podsToDelete, 1) }) t.Run("Retry continue on failed workflow with restartSuccessful and nodeFieldSelector", func(t *testing.T) { @@ -1356,7 +1300,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "continue-on-failed-workflow-2": {ID: "continue-on-failed-workflow-2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, + "continue-on-failed-workflow-2": {ID: "continue-on-failed-workflow-2", Name: "continue-on-failed-workflow-2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Children: []string{"2", "4"}, Name: "node1"}, "2": {ID: "2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Children: []string{"3"}, Name: "node2"}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Name: "node3"}, @@ -1371,7 +1315,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.Len(t, wf.Status.Nodes, 2) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["continue-on-failed-workflow-2"].Phase) - assert.Len(t, podsToDelete, 4) + assert.Len(t, podsToDelete, 3) }) } @@ -1916,24 +1860,23 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { wf, podsToDelete, err := FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step1", nil) require.NoError(t, err) assert.Len(t, wf.Status.Nodes, 1) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Len(t, podsToDelete, 6) // Retry top individual suspend node wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 3) + require.Len(t, wf.Status.Nodes, 2) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) assert.Len(t, podsToDelete, 5) // Retry the starting on first DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + + assert.Len(t, wf.Status.Nodes, 9) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1944,17 +1887,13 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - // The nodes in the retrying branch are reset - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the starting on second DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + assert.Len(t, wf.Status.Nodes, 10) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1967,15 +1906,13 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) // The nodes in the retrying branch are reset assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the first individual node (suspended node) connecting to the second DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + assert.Len(t, wf.Status.Nodes, 11) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1989,7 +1926,6 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { // The nodes in the retrying branch are reset (parent DAGs are marked as running) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the second individual node (pod node) connecting to the second DAG in one of the branches @@ -2041,7 +1977,7 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 15) + assert.Len(t, wf.Status.Nodes, 14) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -2054,34 +1990,20 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) // The nodes in the retrying branch are reset (parent DAGs are marked as running) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) // The suspended node remains succeeded assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) assert.Len(t, podsToDelete, 1) // Retry the node that connects the two branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step4", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 16) + assert.Len(t, wf.Status.Nodes, 15) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) - // All nodes in two branches remains succeeded - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step4").Phase) assert.Len(t, podsToDelete, 1) // Retry the last node (failing node) @@ -2092,18 +2014,2165 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) - // All nodes in two branches remains succeeded - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step4").Phase) assert.Len(t, podsToDelete, 1) } + +const stepsRetryFormulate = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-09-19T02:41:51Z" + generateName: steps- + generation: 29 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + name: steps-4k5vn + namespace: argo + resourceVersion: "50080" + uid: 0e7608c7-4555-46a4-8697-3be04eec428b +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: hello-hello-hello + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - inputs: {} + metadata: {} + name: hello-hello-hello + outputs: {} + steps: + - - arguments: + parameters: + - name: message + value: hello1 + name: hello1 + template: whalesay + - - arguments: + parameters: + - name: message + value: hello2a + name: hello2a + template: whalesay + - arguments: + parameters: + - name: message + value: hello2b + name: hello2b + template: whalesay + - container: + args: + - '{{inputs.parameters.message}}' + command: + - cowsay + image: docker/whalesay + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: whalesay + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-09-19T02:43:44Z" + nodes: + steps-4k5vn: + children: + - steps-4k5vn-899690889 + displayName: steps-4k5vn + finishedAt: "2024-09-19T02:43:44Z" + id: steps-4k5vn + name: steps-4k5vn + outboundNodes: + - steps-4k5vn-2627784879 + - steps-4k5vn-2644562498 + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:43:30Z" + templateName: hello-hello-hello + templateScope: local/steps-4k5vn + type: Steps + steps-4k5vn-899690889: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-1044844302 + displayName: '[0]' + finishedAt: "2024-09-19T02:42:12Z" + id: steps-4k5vn-899690889 + name: steps-4k5vn[0] + nodeFlag: {} + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:41:51Z" + templateScope: local/steps-4k5vn + type: StepGroup + steps-4k5vn-1044844302: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-4053927188 + displayName: hello1 + finishedAt: "2024-09-19T02:42:09Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-1044844302 + inputs: + parameters: + - name: message + value: hello1 + name: steps-4k5vn[0].hello1 + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-1044844302/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 1 + memory: 11 + startedAt: "2024-09-19T02:41:51Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-2627784879: + boundaryID: steps-4k5vn + displayName: hello2a + finishedAt: "2024-09-19T02:43:39Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-2627784879 + inputs: + parameters: + - name: message + value: hello2a + name: steps-4k5vn[1].hello2a + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-2627784879/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 5 + startedAt: "2024-09-19T02:43:30Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-2644562498: + boundaryID: steps-4k5vn + displayName: hello2b + finishedAt: "2024-09-19T02:43:41Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-2644562498 + inputs: + parameters: + - name: message + value: hello2b + name: steps-4k5vn[1].hello2b + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-2644562498/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2024-09-19T02:43:30Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-4053927188: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-2627784879 + - steps-4k5vn-2644562498 + displayName: '[1]' + finishedAt: "2024-09-19T02:43:44Z" + id: steps-4k5vn-4053927188 + name: steps-4k5vn[1] + nodeFlag: {} + phase: Succeeded + progress: 2/2 + resourcesDuration: + cpu: 0 + memory: 11 + startedAt: "2024-09-19T02:43:30Z" + templateScope: local/steps-4k5vn + type: StepGroup + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:43:30Z" + taskResultsCompletionStatus: + steps-4k5vn-1044844302: true + steps-4k5vn-2627784879: true + steps-4k5vn-2644562498: true + +` + +func TestStepsRetryWorkflow(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(stepsRetryFormulate) + selectorStr := "id=steps-4k5vn-2627784879" + + running := map[string]bool{ + "steps-4k5vn-4053927188": true, + "steps-4k5vn": true, + } + + deleted := map[string]bool{ + "steps-4k5vn-2627784879": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + require.NoError(err) + assert.Len(podsToDelete, 1) + assert.Len(newWf.Status.Nodes, 5) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +} + +func TestDagConversion(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(stepsRetryFormulate) + + nodes, err := newWorkflowsDag(wf) + require.NoError(err) + assert.Len(nodes, len(wf.Status.Nodes)) + + numNilParent := 0 + for _, n := range nodes { + if n.parent == nil { + numNilParent++ + } + } + assert.Equal(1, numNilParent) +} + +const dagDiamondRetry = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-01T04:27:23Z" + generateName: dag-diamond- + generation: 16 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + name: dag-diamond-82q7s + namespace: argo + resourceVersion: "4633" + uid: dd3d2674-43d8-446a-afdf-17ec95afade2 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: diamond + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - dag: + tasks: + - arguments: + parameters: + - name: message + value: A + name: A + template: echo + - arguments: + parameters: + - name: message + value: B + depends: A + name: B + template: echo + - arguments: + parameters: + - name: message + value: C + depends: A + name: C + template: echo + - arguments: + parameters: + - name: message + value: D + depends: B && C + name: D + template: echo + inputs: {} + metadata: {} + name: diamond + outputs: {} + - container: + command: + - echo + - '{{inputs.parameters.message}}' + image: alpine:3.7 + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: echo + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-01T04:27:42Z" + nodes: + dag-diamond-82q7s: + children: + - dag-diamond-82q7s-1310542453 + displayName: dag-diamond-82q7s + finishedAt: "2024-10-01T04:27:42Z" + id: dag-diamond-82q7s + name: dag-diamond-82q7s + outboundNodes: + - dag-diamond-82q7s-1226654358 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 8 + startedAt: "2024-10-01T04:27:23Z" + templateName: diamond + templateScope: local/dag-diamond-82q7s + type: DAG + dag-diamond-82q7s-1226654358: + boundaryID: dag-diamond-82q7s + displayName: D + finishedAt: "2024-10-01T04:27:39Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1226654358 + inputs: + parameters: + - name: message + value: D + name: dag-diamond-82q7s.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1226654358/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:36Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1260209596: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1226654358 + displayName: B + finishedAt: "2024-10-01T04:27:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1260209596 + inputs: + parameters: + - name: message + value: B + name: dag-diamond-82q7s.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1260209596/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:30Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1276987215: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1226654358 + displayName: C + finishedAt: "2024-10-01T04:27:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1276987215 + inputs: + parameters: + - name: message + value: C + name: dag-diamond-82q7s.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1276987215/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:30Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1310542453: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1260209596 + - dag-diamond-82q7s-1276987215 + displayName: A + finishedAt: "2024-10-01T04:27:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1310542453 + inputs: + parameters: + - name: message + value: A + name: dag-diamond-82q7s.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1310542453/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:23Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 8 + startedAt: "2024-10-01T04:27:23Z" + taskResultsCompletionStatus: + dag-diamond-82q7s-1226654358: true + dag-diamond-82q7s-1260209596: true + dag-diamond-82q7s-1276987215: true + dag-diamond-82q7s-1310542453: true + +` + +func TestDAGDiamondRetryWorkflow(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(dagDiamondRetry) + selectorStr := "id=dag-diamond-82q7s-1260209596" + + running := map[string]bool{ + "dag-diamond-82q7s": true, + } + + deleted := map[string]bool{ + "dag-diamond-82q7s-1226654358": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + + require.NoError(err) + assert.Len(podsToDelete, 2) + assert.Len(newWf.Status.Nodes, 3) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } +} + +const onExitWorkflowRetry = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-02T05:54:00Z" + generateName: work-avoidance- + generation: 25 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + workflows.argoproj.io/resubmitted-from-workflow: work-avoidance-xghlj + name: work-avoidance-trkkq + namespace: argo + resourceVersion: "2661" + uid: 0271624e-0096-428a-81da-643dbbd69440 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: main + onExit: save-markers + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - inputs: {} + metadata: {} + name: main + outputs: {} + steps: + - - arguments: {} + name: load-markers + template: load-markers + - - arguments: + parameters: + - name: num + value: '{{item}}' + name: echo + template: echo + withSequence: + count: "3" + - container: + command: + - mkdir + - -p + - /work/markers + image: docker/whalesay:latest + name: "" + resources: {} + volumeMounts: + - mountPath: /work + name: work + inputs: + artifacts: + - name: markers + optional: true + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + metadata: {} + name: load-markers + outputs: {} + - inputs: + parameters: + - name: num + metadata: {} + name: echo + outputs: {} + script: + command: + - bash + - -eux + image: docker/whalesay:latest + name: "" + resources: {} + source: | + marker=/work/markers/$(date +%Y-%m-%d)-echo-{{inputs.parameters.num}} + if [ -e ${marker} ]; then + echo "work already done" + exit 0 + fi + echo "working very hard" + # toss a virtual coin and exit 1 if 1 + if [ $(($(($RANDOM%10))%2)) -eq 1 ]; then + echo "oh no!" + exit 1 + fi + touch ${marker} + volumeMounts: + - mountPath: /work + name: work + - container: + command: + - "true" + image: docker/whalesay:latest + name: "" + resources: {} + volumeMounts: + - mountPath: /work + name: work + inputs: {} + metadata: {} + name: save-markers + outputs: + artifacts: + - name: markers + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + volumeClaimTemplates: + - metadata: + creationTimestamp: null + name: work + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 10Mi + status: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-02T05:54:41Z" + nodes: + work-avoidance-trkkq: + children: + - work-avoidance-trkkq-88427725 + displayName: work-avoidance-trkkq + finishedAt: "2024-10-02T05:54:30Z" + id: work-avoidance-trkkq + name: work-avoidance-trkkq + outboundNodes: + - work-avoidance-trkkq-4180283560 + - work-avoidance-trkkq-605537244 + - work-avoidance-trkkq-4183398008 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-10-02T05:54:00Z" + templateName: main + templateScope: local/work-avoidance-trkkq + type: Steps + work-avoidance-trkkq-21464344: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-4180283560 + - work-avoidance-trkkq-605537244 + - work-avoidance-trkkq-4183398008 + displayName: '[1]' + finishedAt: "2024-10-02T05:54:30Z" + id: work-avoidance-trkkq-21464344 + name: work-avoidance-trkkq[1] + nodeFlag: {} + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 18 + startedAt: "2024-10-02T05:54:14Z" + templateScope: local/work-avoidance-trkkq + type: StepGroup + work-avoidance-trkkq-88427725: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-3329426915 + displayName: '[0]' + finishedAt: "2024-10-02T05:54:14Z" + id: work-avoidance-trkkq-88427725 + name: work-avoidance-trkkq[0] + nodeFlag: {} + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-10-02T05:54:00Z" + templateScope: local/work-avoidance-trkkq + type: StepGroup + work-avoidance-trkkq-605537244: + boundaryID: work-avoidance-trkkq + displayName: echo(1:1) + finishedAt: "2024-10-02T05:54:24Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-605537244 + inputs: + parameters: + - name: num + value: "1" + name: work-avoidance-trkkq[1].echo(1:1) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-605537244/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-1461956272: + displayName: work-avoidance-trkkq.onExit + finishedAt: "2024-10-02T05:54:38Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-1461956272 + name: work-avoidance-trkkq.onExit + nodeFlag: + hooked: true + outputs: + artifacts: + - name: markers + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-save-markers-1461956272/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 5 + startedAt: "2024-10-02T05:54:30Z" + templateName: save-markers + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-3329426915: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-21464344 + displayName: load-markers + finishedAt: "2024-10-02T05:54:12Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-3329426915 + inputs: + artifacts: + - name: markers + optional: true + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + name: work-avoidance-trkkq[0].load-markers + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-load-markers-3329426915/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-02T05:54:00Z" + templateName: load-markers + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-4180283560: + boundaryID: work-avoidance-trkkq + displayName: echo(0:0) + finishedAt: "2024-10-02T05:54:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-4180283560 + inputs: + parameters: + - name: num + value: "0" + name: work-avoidance-trkkq[1].echo(0:0) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-4180283560/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 1 + memory: 8 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-4183398008: + boundaryID: work-avoidance-trkkq + displayName: echo(2:2) + finishedAt: "2024-10-02T05:54:21Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-4183398008 + inputs: + parameters: + - name: num + value: "2" + name: work-avoidance-trkkq[1].echo(2:2) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-4183398008/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + phase: Succeeded + progress: 5/5 + resourcesDuration: + cpu: 1 + memory: 27 + startedAt: "2024-10-02T05:54:00Z" + taskResultsCompletionStatus: + work-avoidance-trkkq-605537244: true + work-avoidance-trkkq-1461956272: true + work-avoidance-trkkq-3329426915: true + work-avoidance-trkkq-4180283560: true + work-avoidance-trkkq-4183398008: true +` + +func TestOnExitWorkflowRetry(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(onExitWorkflowRetry) + running := map[string]bool{ + "work-avoidance-trkkq-21464344": true, + "work-avoidance-trkkq": true, + } + deleted := map[string]bool{ + "work-avoidance-trkkq-1461956272": true, + "work-avoidance-trkkq-4183398008": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + + selectorStr := "id=work-avoidance-trkkq-4183398008" + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + require.NoError(err) + assert.Len(newWf.Status.Nodes, 6) + assert.Len(podsToDelete, 2) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +} + +const onExitWorkflow = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-14T09:21:14Z" + generation: 10 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Failed + name: retry-workflow-with-failed-exit-handler + namespace: argo + resourceVersion: "13510" + uid: f72bf6f7-3d8c-4b31-893b-ef03d4718959 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: hello + onExit: exit-handler + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - container: + args: + - echo hello + command: + - sh + - -c + image: alpine:3.18 + name: "" + resources: {} + inputs: {} + metadata: {} + name: hello + outputs: {} + - container: + args: + - exit 1 + command: + - sh + - -c + image: alpine:3.18 + name: "" + resources: {} + inputs: {} + metadata: {} + name: exit-handler + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-14T09:21:27Z" + message: Error (exit code 1) + nodes: + retry-workflow-with-failed-exit-handler: + displayName: retry-workflow-with-failed-exit-handler + finishedAt: "2024-10-14T09:21:18Z" + hostNodeName: k3d-k3s-default-server-0 + id: retry-workflow-with-failed-exit-handler + name: retry-workflow-with-failed-exit-handler + outputs: + artifacts: + - name: main-logs + s3: + key: retry-workflow-with-failed-exit-handler/retry-workflow-with-failed-exit-handler/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-14T09:21:14Z" + templateName: hello + templateScope: local/retry-workflow-with-failed-exit-handler + type: Pod + retry-workflow-with-failed-exit-handler-512308683: + displayName: retry-workflow-with-failed-exit-handler.onExit + finishedAt: "2024-10-14T09:21:24Z" + hostNodeName: k3d-k3s-default-server-0 + id: retry-workflow-with-failed-exit-handler-512308683 + message: Error (exit code 1) + name: retry-workflow-with-failed-exit-handler.onExit + nodeFlag: + hooked: true + outputs: + artifacts: + - name: main-logs + s3: + key: retry-workflow-with-failed-exit-handler/retry-workflow-with-failed-exit-handler-exit-handler-512308683/main.log + exitCode: "1" + phase: Failed + progress: 0/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-14T09:21:21Z" + templateName: exit-handler + templateScope: local/retry-workflow-with-failed-exit-handler + type: Pod + phase: Failed + progress: 1/2 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-14T09:21:14Z" + taskResultsCompletionStatus: + retry-workflow-with-failed-exit-handler: true + retry-workflow-with-failed-exit-handler-512308683: true +` + +func TestOnExitWorkflow(t *testing.T) { + require := require.New(t) + assert := assert.New(t) + wf := wfv1.MustUnmarshalWorkflow(onExitWorkflow) + + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, false, "", []string{}) + require.NoError(err) + assert.Len(podsToDelete, 1) + assert.Len(newWf.Status.Nodes, 1) + assert.Equal(wfv1.NodeSucceeded, newWf.Status.Nodes["retry-workflow-with-failed-exit-handler"].Phase) + +} + +const nestedDAG = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-16T04:12:51Z" + generateName: dag-nested- + generation: 39 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + workflows.argoproj.io/resubmitted-from-workflow: dag-nested-52l5t + name: dag-nested-zxlc2 + namespace: argo + resourceVersion: "11348" + uid: 402ed1f0-0dbf-42fd-92b8-b7858ba2979c +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: diamond + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - container: + command: + - echo + - '{{inputs.parameters.message}}' + image: alpine:3.7 + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: echo + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: message + value: A + name: A + template: nested-diamond + - arguments: + parameters: + - name: message + value: B + depends: A + name: B + template: nested-diamond + - arguments: + parameters: + - name: message + value: C + depends: A + name: C + template: nested-diamond + - arguments: + parameters: + - name: message + value: D + depends: B && C + name: D + template: nested-diamond + inputs: {} + metadata: {} + name: diamond + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}A' + name: A + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}B' + depends: A + name: B + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}C' + depends: A + name: C + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}D' + depends: B && C + name: D + template: echo + inputs: + parameters: + - name: message + metadata: {} + name: nested-diamond + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-16T04:13:49Z" + nodes: + dag-nested-zxlc2: + children: + - dag-nested-zxlc2-1970677234 + displayName: dag-nested-zxlc2 + finishedAt: "2024-10-16T04:13:49Z" + id: dag-nested-zxlc2 + name: dag-nested-zxlc2 + outboundNodes: + - dag-nested-zxlc2-644277987 + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + templateName: diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-644277987: + boundaryID: dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:46Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-644277987 + inputs: + parameters: + - name: message + value: DD + name: dag-nested-zxlc2.D.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-644277987/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:43Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-694610844: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-744943701 + - dag-nested-zxlc2-728166082 + displayName: A + finishedAt: "2024-10-16T04:13:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-694610844 + inputs: + parameters: + - name: message + value: DA + name: dag-nested-zxlc2.D.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-694610844/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 1 + startedAt: "2024-10-16T04:13:30Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-725087280: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-1953899615 + - dag-nested-zxlc2-1937121996 + displayName: D + finishedAt: "2024-10-16T04:13:07Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-725087280 + inputs: + parameters: + - name: message + value: AD + name: dag-nested-zxlc2.A.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-725087280/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:03Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-728166082: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-644277987 + displayName: C + finishedAt: "2024-10-16T04:13:40Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-728166082 + inputs: + parameters: + - name: message + value: DC + name: dag-nested-zxlc2.D.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-728166082/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:36Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-744943701: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-644277987 + displayName: B + finishedAt: "2024-10-16T04:13:40Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-744943701 + inputs: + parameters: + - name: message + value: DB + name: dag-nested-zxlc2.D.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-744943701/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:36Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-808975375: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-825752994 + - dag-nested-zxlc2-842530613 + displayName: A + finishedAt: "2024-10-16T04:12:54Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-808975375 + inputs: + parameters: + - name: message + value: AA + name: dag-nested-zxlc2.A.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-808975375/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 1 + startedAt: "2024-10-16T04:12:51Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-825752994: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-725087280 + displayName: B + finishedAt: "2024-10-16T04:13:00Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-825752994 + inputs: + parameters: + - name: message + value: AB + name: dag-nested-zxlc2.A.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-825752994/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:12:57Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-842530613: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-725087280 + displayName: C + finishedAt: "2024-10-16T04:13:00Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-842530613 + inputs: + parameters: + - name: message + value: AC + name: dag-nested-zxlc2.A.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-842530613/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:12:57Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-903321510: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-903321510 + inputs: + parameters: + - name: message + value: CD + name: dag-nested-zxlc2.C.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-903321510/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:23Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-936876748: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-903321510 + displayName: B + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-936876748 + inputs: + parameters: + - name: message + value: CB + name: dag-nested-zxlc2.C.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-936876748/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-953654367: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-903321510 + displayName: C + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-953654367 + inputs: + parameters: + - name: message + value: CC + name: dag-nested-zxlc2.C.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-953654367/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-987209605: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-936876748 + - dag-nested-zxlc2-953654367 + displayName: A + finishedAt: "2024-10-16T04:13:13Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-987209605 + inputs: + parameters: + - name: message + value: CA + name: dag-nested-zxlc2.C.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-987209605/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:10Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-1920344377: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-694610844 + displayName: D + finishedAt: "2024-10-16T04:13:49Z" + id: dag-nested-zxlc2-1920344377 + inputs: + parameters: + - name: message + value: D + name: dag-nested-zxlc2.D + outboundNodes: + - dag-nested-zxlc2-644277987 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 7 + startedAt: "2024-10-16T04:13:30Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1937121996: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-987209605 + displayName: C + finishedAt: "2024-10-16T04:13:30Z" + id: dag-nested-zxlc2-1937121996 + inputs: + parameters: + - name: message + value: C + name: dag-nested-zxlc2.C + outboundNodes: + - dag-nested-zxlc2-903321510 + phase: Succeeded + progress: 8/8 + resourcesDuration: + cpu: 0 + memory: 15 + startedAt: "2024-10-16T04:13:10Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1953899615: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-3753141766 + displayName: B + finishedAt: "2024-10-16T04:13:30Z" + id: dag-nested-zxlc2-1953899615 + inputs: + parameters: + - name: message + value: B + name: dag-nested-zxlc2.B + outboundNodes: + - dag-nested-zxlc2-3837029861 + phase: Succeeded + progress: 8/8 + resourcesDuration: + cpu: 0 + memory: 15 + startedAt: "2024-10-16T04:13:10Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1970677234: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-808975375 + displayName: A + finishedAt: "2024-10-16T04:13:10Z" + id: dag-nested-zxlc2-1970677234 + inputs: + parameters: + - name: message + value: A + name: dag-nested-zxlc2.A + outboundNodes: + - dag-nested-zxlc2-725087280 + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-3719586528: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3837029861 + displayName: C + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3719586528 + inputs: + parameters: + - name: message + value: BC + name: dag-nested-zxlc2.B.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3719586528/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3736364147: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3837029861 + displayName: B + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3736364147 + inputs: + parameters: + - name: message + value: BB + name: dag-nested-zxlc2.B.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3736364147/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3753141766: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3736364147 + - dag-nested-zxlc2-3719586528 + displayName: A + finishedAt: "2024-10-16T04:13:13Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3753141766 + inputs: + parameters: + - name: message + value: BA + name: dag-nested-zxlc2.B.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3753141766/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:10Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3837029861: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3837029861 + inputs: + parameters: + - name: message + value: BD + name: dag-nested-zxlc2.B.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3837029861/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:23Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + taskResultsCompletionStatus: + dag-nested-zxlc2-644277987: true + dag-nested-zxlc2-694610844: true + dag-nested-zxlc2-725087280: true + dag-nested-zxlc2-728166082: true + dag-nested-zxlc2-744943701: true + dag-nested-zxlc2-808975375: true + dag-nested-zxlc2-825752994: true + dag-nested-zxlc2-842530613: true + dag-nested-zxlc2-903321510: true + dag-nested-zxlc2-936876748: true + dag-nested-zxlc2-953654367: true + dag-nested-zxlc2-987209605: true + dag-nested-zxlc2-3719586528: true + dag-nested-zxlc2-3736364147: true + dag-nested-zxlc2-3753141766: true + dag-nested-zxlc2-3837029861: true + +` + +func TestNestedDAG(t *testing.T) { + require := require.New(t) + assert := assert.New(t) + wf := wfv1.MustUnmarshalWorkflow(nestedDAG) + + running := map[string]bool{ + "dag-nested-zxlc2-1920344377": true, + "dag-nested-zxlc2-1970677234 ": true, + "dag-nested-zxlc2": true, + } + deleted := map[string]bool{ + "dag-nested-zxlc2-744943701": true, + "dag-nested-zxlc2-644277987": true, + } + + succeeded := map[string]bool{} + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, "id=dag-nested-zxlc2-744943701", []string{}) + require.NoError(err) + assert.Len(podsToDelete, 2) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +} + +const onExitPanic = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2025-02-11T05:25:47Z" + generateName: exit-handlers- + generation: 21 + labels: + default-label: thisLabelIsFromWorkflowDefaults + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Failed + name: exit-handlers-n7s4n + namespace: argo + resourceVersion: "2255" + uid: 7b2f1451-9a9a-4f66-a0d9-0364f814d948 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: intentional-fail + onExit: exit-handler + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - container: + args: + - echo intentional failure; exit 1 + command: + - sh + - -c + image: alpine:latest + name: "" + resources: {} + inputs: {} + metadata: {} + name: intentional-fail + outputs: {} + - inputs: {} + metadata: {} + name: exit-handler + outputs: {} + steps: + - - arguments: {} + name: notify + template: send-email + - arguments: {} + name: celebrate + template: celebrate + when: '{{workflow.status}} == Succeeded' + - arguments: {} + name: cry + template: cry + when: '{{workflow.status}} != Succeeded' + - container: + args: + - 'echo send e-mail: {{workflow.name}} {{workflow.status}} {{workflow.duration}}. + Failed steps {{workflow.failures}}' + command: + - sh + - -c + image: alpine:latest + name: "" + resources: {} + inputs: {} + metadata: {} + name: send-email + outputs: {} + - container: + args: + - echo hooray! + command: + - sh + - -c + image: alpine:latest + name: "" + resources: {} + inputs: {} + metadata: {} + name: celebrate + outputs: {} + - container: + args: + - echo boohoo! + command: + - sh + - -c + image: alpine:latest + name: "" + resources: {} + inputs: {} + metadata: {} + name: cry + outputs: {} + workflowMetadata: + labels: + default-label: thisLabelIsFromWorkflowDefaults +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2025-02-11T05:31:30Z" + message: 'main: Error (exit code 1)' + nodes: + exit-handlers-n7s4n: + displayName: exit-handlers-n7s4n + finishedAt: "2025-02-11T05:31:18Z" + hostNodeName: k3d-k3s-default-server-0 + id: exit-handlers-n7s4n + message: 'main: Error (exit code 1)' + name: exit-handlers-n7s4n + outputs: + artifacts: + - name: main-logs + s3: + key: exit-handlers-n7s4n/exit-handlers-n7s4n/main.log + exitCode: "1" + phase: Failed + progress: 0/1 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2025-02-11T05:31:12Z" + templateName: intentional-fail + templateScope: local/exit-handlers-n7s4n + type: Pod + exit-handlers-n7s4n-134905866: + boundaryID: exit-handlers-n7s4n-1410405845 + displayName: celebrate + finishedAt: "2025-02-11T05:31:21Z" + id: exit-handlers-n7s4n-134905866 + message: when 'Failed == Succeeded' evaluated false + name: exit-handlers-n7s4n.onExit[0].celebrate + nodeFlag: {} + phase: Skipped + startedAt: "2025-02-11T05:31:21Z" + templateName: celebrate + templateScope: local/exit-handlers-n7s4n + type: Skipped + exit-handlers-n7s4n-975057257: + boundaryID: exit-handlers-n7s4n-1410405845 + children: + - exit-handlers-n7s4n-3201878844 + - exit-handlers-n7s4n-134905866 + - exit-handlers-n7s4n-2699669595 + displayName: '[0]' + finishedAt: "2025-02-11T05:31:30Z" + id: exit-handlers-n7s4n-975057257 + name: exit-handlers-n7s4n.onExit[0] + nodeFlag: {} + phase: Succeeded + progress: 2/2 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2025-02-11T05:31:21Z" + templateScope: local/exit-handlers-n7s4n + type: StepGroup + exit-handlers-n7s4n-1410405845: + children: + - exit-handlers-n7s4n-975057257 + displayName: exit-handlers-n7s4n.onExit + finishedAt: "2025-02-11T05:31:30Z" + id: exit-handlers-n7s4n-1410405845 + name: exit-handlers-n7s4n.onExit + nodeFlag: + hooked: true + outboundNodes: + - exit-handlers-n7s4n-3201878844 + - exit-handlers-n7s4n-134905866 + - exit-handlers-n7s4n-2699669595 + phase: Succeeded + progress: 2/2 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2025-02-11T05:31:21Z" + templateName: exit-handler + templateScope: local/exit-handlers-n7s4n + type: Steps + exit-handlers-n7s4n-2699669595: + boundaryID: exit-handlers-n7s4n-1410405845 + displayName: cry + finishedAt: "2025-02-11T05:31:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: exit-handlers-n7s4n-2699669595 + name: exit-handlers-n7s4n.onExit[0].cry + outputs: + artifacts: + - name: main-logs + s3: + key: exit-handlers-n7s4n/exit-handlers-n7s4n-cry-2699669595/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 3 + startedAt: "2025-02-11T05:31:21Z" + templateName: cry + templateScope: local/exit-handlers-n7s4n + type: Pod + exit-handlers-n7s4n-3201878844: + boundaryID: exit-handlers-n7s4n-1410405845 + displayName: notify + finishedAt: "2025-02-11T05:31:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: exit-handlers-n7s4n-3201878844 + name: exit-handlers-n7s4n.onExit[0].notify + outputs: + artifacts: + - name: main-logs + s3: + key: exit-handlers-n7s4n/exit-handlers-n7s4n-send-email-3201878844/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 3 + startedAt: "2025-02-11T05:31:21Z" + templateName: send-email + templateScope: local/exit-handlers-n7s4n + type: Pod + phase: Failed + progress: 2/3 + resourcesDuration: + cpu: 0 + memory: 10 + startedAt: "2025-02-11T05:31:12Z" + taskResultsCompletionStatus: + exit-handlers-n7s4n: true + exit-handlers-n7s4n-2699669595: true + exit-handlers-n7s4n-3201878844: true +` + +func TestRegressions(t *testing.T) { + t.Run("exit handler", func(t *testing.T) { + wf := wfv1.MustUnmarshalWorkflow(onExitPanic) + newWf, _, err := FormulateRetryWorkflow(context.Background(), wf, true, "id=exit-handlers-n7s4n-975057257", []string{}) + require.NoError(t, err) + // we can't really handle exit handlers granually yet + assert.Empty(t, newWf.Status.Nodes) + }) +} diff --git a/workflow/validate/validate.go b/workflow/validate/validate.go index 30574f2b0508..5310c1c9cb2b 100644 --- a/workflow/validate/validate.go +++ b/workflow/validate/validate.go @@ -206,8 +206,8 @@ func ValidateWorkflow(wftmplGetter templateresolution.WorkflowTemplateNamespaced } } - annotationSources := [][]string{maps.Keys(wf.ObjectMeta.Annotations)} - labelSources := [][]string{maps.Keys(wf.ObjectMeta.Labels)} + annotationSources := [][]string{maps.Keys(wf.Annotations)} + labelSources := [][]string{maps.Keys(wf.Labels)} if wf.Spec.WorkflowMetadata != nil { annotationSources = append(annotationSources, maps.Keys(wf.Spec.WorkflowMetadata.Annotations)) labelSources = append(labelSources, maps.Keys(wf.Spec.WorkflowMetadata.Labels), maps.Keys(wf.Spec.WorkflowMetadata.LabelsFrom)) @@ -338,8 +338,8 @@ func ValidateWorkflowTemplate(wftmplGetter templateresolution.WorkflowTemplateNa wf := &wfv1.Workflow{ ObjectMeta: v1.ObjectMeta{ - Labels: wftmpl.ObjectMeta.Labels, - Annotations: wftmpl.ObjectMeta.Annotations, + Labels: wftmpl.Labels, + Annotations: wftmpl.Annotations, }, Spec: wftmpl.Spec, } @@ -356,8 +356,8 @@ func ValidateClusterWorkflowTemplate(wftmplGetter templateresolution.WorkflowTem wf := &wfv1.Workflow{ ObjectMeta: v1.ObjectMeta{ - Labels: cwftmpl.ObjectMeta.Labels, - Annotations: cwftmpl.ObjectMeta.Annotations, + Labels: cwftmpl.Labels, + Annotations: cwftmpl.Annotations, }, Spec: cwftmpl.Spec, } @@ -406,7 +406,7 @@ func ValidateCronWorkflow(ctx context.Context, wftmplGetter templateresolution.W func (ctx *templateValidationCtx) validateInitContainers(containers []wfv1.UserContainer) error { for _, container := range containers { - if len(container.Container.Name) == 0 { + if len(container.Name) == 0 { return errors.Errorf(errors.CodeBadRequest, "initContainers must all have container name") } } @@ -755,11 +755,11 @@ func (ctx *templateValidationCtx) validateLeaf(scope map[string]interface{}, tmp if tmpl.Container.Image == "" { switch baseTemplate := tmplCtx.GetCurrentTemplateBase().(type) { case *wfv1.Workflow: - if !(baseTemplate.Spec.TemplateDefaults != nil && baseTemplate.Spec.TemplateDefaults.Container != nil && baseTemplate.Spec.TemplateDefaults.Container.Image != "") { + if baseTemplate.Spec.TemplateDefaults == nil || baseTemplate.Spec.TemplateDefaults.Container == nil || baseTemplate.Spec.TemplateDefaults.Container.Image == "" { return errors.Errorf(errors.CodeBadRequest, "templates.%s.container.image may not be empty", tmpl.Name) } case *wfv1.WorkflowTemplate: - if !(baseTemplate.Spec.TemplateDefaults != nil && baseTemplate.Spec.TemplateDefaults.Container != nil && baseTemplate.Spec.TemplateDefaults.Container.Image != "") { + if baseTemplate.Spec.TemplateDefaults == nil || baseTemplate.Spec.TemplateDefaults.Container == nil || baseTemplate.Spec.TemplateDefaults.Container.Image == "" { return errors.Errorf(errors.CodeBadRequest, "templates.%s.container.image may not be empty", tmpl.Name) } default: @@ -826,11 +826,11 @@ func (ctx *templateValidationCtx) validateLeaf(scope map[string]interface{}, tmp if tmpl.Script.Image == "" { switch baseTemplate := tmplCtx.GetCurrentTemplateBase().(type) { case *wfv1.Workflow: - if !(baseTemplate.Spec.TemplateDefaults != nil && baseTemplate.Spec.TemplateDefaults.Script != nil && baseTemplate.Spec.TemplateDefaults.Script.Image != "") { + if baseTemplate.Spec.TemplateDefaults == nil || baseTemplate.Spec.TemplateDefaults.Script == nil || baseTemplate.Spec.TemplateDefaults.Script.Image == "" { return errors.Errorf(errors.CodeBadRequest, "templates.%s.script.image may not be empty", tmpl.Name) } case *wfv1.WorkflowTemplate: - if !(baseTemplate.Spec.TemplateDefaults != nil && baseTemplate.Spec.TemplateDefaults.Script != nil && baseTemplate.Spec.TemplateDefaults.Script.Image != "") { + if baseTemplate.Spec.TemplateDefaults == nil || baseTemplate.Spec.TemplateDefaults.Script == nil || baseTemplate.Spec.TemplateDefaults.Script.Image == "" { return errors.Errorf(errors.CodeBadRequest, "templates.%s.script.image may not be empty", tmpl.Name) } default: