diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index c60668e82..000000000 --- a/.dockerignore +++ /dev/null @@ -1,8 +0,0 @@ -# General - -# Backend -server/build/libs - -# UI -**/node_modules -ui/build \ No newline at end of file diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 6e4598114..000000000 --- a/.gitattributes +++ /dev/null @@ -1,4 +0,0 @@ -docs/* linguist-documentation -server/src/main/resources/swagger-ui/* linguist-vendored - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 6ee796ba4..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: "" -labels: 'type: bug' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**Details** -Conductor version: -Persistence implementation: Cassandra, Postgres, MySQL, Dynomite etc -Queue implementation: Postgres, MySQL, Dynoqueues etc -Lock: Redis or Zookeeper? -Workflow definition: -Task definition: -Event handler definition: - - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 3ba13e0ce..000000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1 +0,0 @@ -blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md deleted file mode 100644 index 790cd31e1..000000000 --- a/.github/ISSUE_TEMPLATE/documentation.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -name: Documentation -about: Something in the documentation that needs improvement -title: "[DOC]: " -labels: 'type: docs' -assignees: '' - ---- - -## What are you missing in the docs - -## Proposed text diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 659e4a8cd..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Propose a new feature -title: "[FEATURE]: " -labels: 'type: feature' -assignees: '' - ---- - -Please read our [contributor guide](https://github.com/Netflix/conductor/blob/main/CONTRIBUTING.md) before creating an issue. -Also consider discussing your idea on the [discussion forum](https://github.com/Netflix/conductor/discussions) first. - -## Describe the Feature Request -_A clear and concise description of what the feature request is._ - -## Describe Preferred Solution -_A clear and concise description of what you want to happen._ - -## Describe Alternatives -_A clear and concise description of any alternative solutions or features you've considered._ diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index ef87e0b82..000000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -updates: - - package-ecosystem: "gradle" - directory: "/" - schedule: - interval: "weekly" - reviewers: - - "aravindanr" - - "jxu-nflx" - - "apanicker-nflx" - - package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "weekly" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index 35e6d8221..000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,21 +0,0 @@ -Pull Request type ----- -- [ ] Bugfix -- [ ] Feature -- [ ] Refactoring (no functional changes, no api changes) -- [ ] Build related changes (Please run `./gradlew generateLock saveLock` to refresh dependencies) -- [ ] WHOSUSING.md -- [ ] Other (please describe): - -Please remember to run `./gradlew :conductor-java-sdk:spotlessApply` to fix any format violations. - -Changes in this PR ----- - -_Describe the new behavior from this PR, and why it's needed_ -Issue # - -Alternatives considered ----- - -_Describe alternative implementation you have considered_ diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml deleted file mode 100644 index aea460e5e..000000000 --- a/.github/release-drafter.yml +++ /dev/null @@ -1,39 +0,0 @@ -template: | - ## What’s Changed - - $CHANGES - -name-template: 'v$RESOLVED_VERSION' -tag-template: 'v$RESOLVED_VERSION' - -categories: - - title: 'IMPORTANT' - label: 'type: important' - - title: 'New' - label: 'type: feature' - - title: 'Bug Fixes' - label: 'type: bug' - - title: 'Refactor' - label: 'type: maintenance' - - title: 'Documentation' - label: 'type: docs' - - title: 'Dependency Updates' - label: 'type: dependencies' - -version-resolver: - minor: - labels: - - 'type: important' - - patch: - labels: - - 'type: bug' - - 'type: maintenance' - - 'type: docs' - - 'type: dependencies' - - 'type: feature' - -exclude-labels: - - 'skip-changelog' - - 'gradle-wrapper' - - 'github_actions' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 70b2df207..000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,81 +0,0 @@ -name: CI - -on: [ push, pull_request ] - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 0 - - # MicroSensor Analysis - - - name: Perform static source code analysis - uses: microsensorproject/microsensor@development - - - name: Gradle wrapper validation - uses: gradle/wrapper-validation-action@v1 - - name: Set up Zulu JDK 11 - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: '11' - - name: Cache SonarCloud packages - uses: actions/cache@v3 - with: - path: ~/.sonar/cache - key: ${{ runner.os }}-sonar - restore-keys: ${{ runner.os }}-sonar - - name: Cache Gradle packages - uses: actions/cache@v3 - with: - path: | - ~/.gradle/caches - ~/.gradle/wrapper - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} - restore-keys: ${{ runner.os }}-gradle- - - name: Build with Gradle - if: github.ref != 'refs/heads/main' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: | - ./gradlew build --scan - - name: Build and Publish snapshot - if: github.event_name != 'pull_request' && github.ref == 'refs/heads/main' - run: | - echo "Running build for commit ${{ github.sha }}" - ./gradlew build snapshot --scan - env: - NETFLIX_OSS_SIGNING_KEY: ${{ secrets.ORG_SIGNING_KEY }} - NETFLIX_OSS_SIGNING_PASSWORD: ${{ secrets.ORG_SIGNING_PASSWORD }} - NETFLIX_OSS_REPO_USERNAME: ${{ secrets.ORG_NETFLIXOSS_USERNAME }} - NETFLIX_OSS_REPO_PASSWORD: ${{ secrets.ORG_NETFLIXOSS_PASSWORD }} - - name: Upload build artifacts - uses: actions/upload-artifact@v3 - with: - name: build-artifacts - path: '**/build/reports' - - name: Store Buildscan URL - uses: actions/upload-artifact@v3 - with: - name: build-scan - path: 'buildscan.log' - build-ui: - runs-on: ubuntu-latest - defaults: - run: - working-directory: ui - steps: - - uses: actions/checkout@v3 - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: '14.x' - - name: Install dependencies - run: yarn install - - run: yarn run build - diff --git a/.github/workflows/microsensor.yaml b/.github/workflows/microsensor.yaml deleted file mode 100644 index 90a05f63d..000000000 --- a/.github/workflows/microsensor.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: MicroSensor - -on: [push] - -jobs: - microsensor: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Perform static source code analysis - uses: microsensorproject/microsensor@development diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index e8d5a5ca0..000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: Publish to NetflixOSS and Maven Central -on: - release: - types: - - released - - prereleased - -permissions: - contents: read - -jobs: - publish: - runs-on: ubuntu-latest - name: Gradle Build and Publish - steps: - - uses: actions/checkout@v3 - - name: Set up Zulu JDK 11 - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: '11' - - name: Cache Gradle packages - uses: actions/cache@v3 - with: - path: | - ~/.gradle/caches - ~/.gradle/wrapper - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} - restore-keys: | - ${{ runner.os }}-gradle- - - name: Publish candidate - if: startsWith(github.ref, 'refs/tags/v') && contains(github.ref, '-rc.') - run: ./gradlew -Prelease.useLastTag=true candidate --scan - env: - NETFLIX_OSS_SONATYPE_USERNAME: ${{ secrets.ORG_SONATYPE_USERNAME }} - NETFLIX_OSS_SONATYPE_PASSWORD: ${{ secrets.ORG_SONATYPE_PASSWORD }} - NETFLIX_OSS_SIGNING_KEY: ${{ secrets.ORG_SIGNING_KEY }} - NETFLIX_OSS_SIGNING_PASSWORD: ${{ secrets.ORG_SIGNING_PASSWORD }} - NETFLIX_OSS_REPO_USERNAME: ${{ secrets.ORG_NETFLIXOSS_USERNAME }} - NETFLIX_OSS_REPO_PASSWORD: ${{ secrets.ORG_NETFLIXOSS_PASSWORD }} - - name: Publish release - if: startsWith(github.ref, 'refs/tags/v') && (!contains(github.ref, '-rc.')) - run: ./gradlew -Prelease.useLastTag=true final --scan - env: - NETFLIX_OSS_SONATYPE_USERNAME: ${{ secrets.ORG_SONATYPE_USERNAME }} - NETFLIX_OSS_SONATYPE_PASSWORD: ${{ secrets.ORG_SONATYPE_PASSWORD }} - NETFLIX_OSS_SIGNING_KEY: ${{ secrets.ORG_SIGNING_KEY }} - NETFLIX_OSS_SIGNING_PASSWORD: ${{ secrets.ORG_SIGNING_PASSWORD }} - NETFLIX_OSS_REPO_USERNAME: ${{ secrets.ORG_NETFLIXOSS_USERNAME }} - NETFLIX_OSS_REPO_PASSWORD: ${{ secrets.ORG_NETFLIXOSS_PASSWORD }} - - name: Publish tag to community repo - if: startsWith(github.ref, 'refs/tags/v') - run: | - export TAG=$(git describe --tags --abbrev=0) - echo "Current release version is $TAG" - echo "Triggering community build" - curl \ - -H "Accept: application/vnd.github.v3+json" \ - -H "Authorization: Bearer ${{ secrets.COMMUNITY_REPO_TRIGGER }}" \ - -X POST https://api.github.com/repos/Netflix/conductor-community/dispatches \ - -d '{"event_type": "publish_build","client_payload": {"tag":"'"$TAG"'"}}' diff --git a/.github/workflows/release_draft.yml b/.github/workflows/release_draft.yml deleted file mode 100644 index 2f185417d..000000000 --- a/.github/workflows/release_draft.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Release Drafter - -on: - push: - branches: - - main - -permissions: - contents: read - -jobs: - update_release_draft: - permissions: - contents: write # for release-drafter/release-drafter to create a github release - pull-requests: write # for release-drafter/release-drafter to add label to PR - runs-on: ubuntu-latest - steps: - - uses: release-drafter/release-drafter@v5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 7143e8a7a..000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Close stale issues and pull requests - -on: - schedule: - - cron: "0 0 * * *" - -permissions: - contents: read - -jobs: - stale: - permissions: - issues: write # for actions/stale to close stale issues - pull-requests: write # for actions/stale to close stale PRs - runs-on: ubuntu-latest - steps: - - uses: actions/stale@v5 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - stale-issue-message: 'This issue is stale, because it has been open for 45 days with no activity. Remove the stale label or comment, or this will be closed in 7 days.' - close-issue-message: 'This issue was closed, because it has been stalled for 7 days with no activity.' - stale-pr-message: 'This PR is stale, because it has been open for 45 days with no activity. Remove the stale label or comment, or this will be closed in 7 days.' - close-pr-message: 'This PR was closed, because it has been stalled for 7 days with no activity.' - days-before-issue-stale: 45 - days-before-issue-close: 7 - days-before-pr-stale: 45 - days-before-pr-close: 7 - exempt-issue-labels: 'type: bug,enhancement,work_in_progress,help_wanted' diff --git a/.github/workflows/update-gradle-wrapper.yml b/.github/workflows/update-gradle-wrapper.yml deleted file mode 100644 index db3a8b1e2..000000000 --- a/.github/workflows/update-gradle-wrapper.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Update Gradle Wrapper - -on: - schedule: - - cron: "0 0 * * *" - workflow_dispatch: - -jobs: - update-gradle-wrapper: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Zulu JDK 11 - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: '11' - - name: Update Gradle Wrapper - uses: gradle-update/update-gradle-wrapper-action@v1 diff --git a/.gitignore b/.gitignore deleted file mode 100644 index f79fb99da..000000000 --- a/.gitignore +++ /dev/null @@ -1,34 +0,0 @@ -# Java Build -.gradle -.classpath -dump.rdb -out -bin -target -buildscan.log -/docs/site - -# Python -/polyglot-clients/python/conductor.egg-info -*.pyc - -# OS & IDE -.DS_Store -.settings -.vscode -.idea -.project -*.iml - -# JS & UI Related -node_modules -/ui/build - -# publishing secrets -secrets/signing-key - -# local builds -lib/ -build/ -*/build/ - diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index beb97c8ef..000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,369 +0,0 @@ -Conductor has been upgraded to use the SpringBoot framework and requires Java11 or above. -#### NOTE: The java clients (conductor-client, conductor-client-spring, conductor-grpc-client) are still compiled using Java8 to ensure backward compatibility and smoother migration. - -## Removals/Deprecations -- Removed support for EmbeddedElasticSearch -- Removed deprecated constructors in DynoQueueDAO -- Removed deprecated methods in the Worker interface -- Removed OAuth Support in HTTP task (Looking for contributions for OAuth/OAuth2.0) -- Removed deprecated fields and methods in the Workflow object -- Removed deprecated fields and methods in the Task object -- Removed deprecated fields and methods in the WorkflowTask object - -Removed unused methods from QueueDAO: -- List pop(String, int, int, long) -- List pollMessages(String, int, int, long) - -Removed APIs: -- GET /tasks/in_progress/{tasktype} -- GET /tasks/in_progress/{workflowId}/{taskRefName} -- POST /tasks/{taskId}/ack -- POST /tasks/queue/requeue -- DELETE /queue/{taskType}/{taskId} - - -- GET /event/queues -- GET /event/queues/providers - - -- void restart(String) in workflow client -- List getPendingTasksByType(String, String, Integer) in task client -- Task getPendingTaskForWorkflow(String, String) in task client -- boolean preAck(Task) in Worker -- int getPollCount() in Worker - -## What's changed -Changes to configurations: - -### `azureblob-storage` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.external.payload.storage.azure_blob.connection_string | conductor.external-payload-storage.azureblob.connectionString | null | -| workflow.external.payload.storage.azure_blob.container_name | conductor.external-payload-storage.azureblob.containerName | conductor-payloads | -| workflow.external.payload.storage.azure_blob.endpoint | conductor.external-payload-storage.azureblob.endpoint | null | -| workflow.external.payload.storage.azure_blob.sas_token | conductor.external-payload-storage.azureblob.sasToken | null | -| workflow.external.payload.storage.azure_blob.signedurlexpirationseconds | conductor.external-payload-storage.azureblob.signedUrlExpirationDuration | 5s | -| workflow.external.payload.storage.azure_blob.workflow_input_path | conductor.external-payload-storage.azureblob.workflowInputPath | workflow/input/ | -| workflow.external.payload.storage.azure_blob.workflow_output_path | conductor.external-payload-storage.azureblob.workflowOutputPath | workflow/output/ | -| workflow.external.payload.storage.azure_blob.task_input_path | conductor.external-payload-storage.azureblob.taskInputPath | task/input/ | -| workflow.external.payload.storage.azure_blob.task_output_path | conductor.external-payload-storage.azureblob.taskOutputPath | task/output/ | - -### `cassandra-persistence` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.cassandra.host | conductor.cassandra.hostAddress | 127.0.0.1 | -| workflow.cassandra.port | conductor.cassandra.port | 9142 | -| workflow.cassandra.cluster | conductor.cassandra.cluster | "" | -| workflow.cassandra.keyspace | conductor.cassandra.keyspace | conductor | -| workflow.cassandra.shard.size | conductor.cassandra.shardSize | 100 | -| workflow.cassandra.replication.strategy | conductor.cassandra.replicationStrategy | SimpleStrategy | -| workflow.cassandra.replication.factor.key | conductor.cassandra.replicationFactorKey | replication_factor | -| workflow.cassandra.replication.factor.value | conductor.cassandra.replicationFactorValue | 3 | -| workflow.cassandra.read.consistency.level | conductor.cassandra.readConsistencyLevel | LOCAL_QUORUM | -| workflow.cassandra.write.consistency.level | conductor.cassandra.writeConsistencyLevel | LOCAL_QUORUM | -| conductor.taskdef.cache.refresh.time.seconds | conductor.cassandra.taskDefCacheRefreshInterval | 60s | -| conductor.eventhandler.cache.refresh.time.seconds | conductor.cassandra.eventHandlerCacheRefreshInterval | 60s | -| workflow.event.execution.persistence.ttl.seconds | conductor.cassandra.eventExecutionPersistenceTTL | 0s | - -### `contribs` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.archival.ttl.seconds | conductor.workflow-status-listener.archival.ttlDuration | 0s | -| workflow.archival.delay.queue.worker.thread.count | conductor.workflow-status-listener.archival.delayQueueWorkerThreadCount | 5 | -| workflow.archival.delay.seconds | conductor.workflow-status-listener.archival.delaySeconds | 60 | -| | | -| workflowstatuslistener.publisher.success.queue | conductor.workflow-status-listener.queue-publisher.successQueue | _callbackSuccessQueue | -| workflowstatuslistener.publisher.failure.queue | conductor.workflow-status-listener.queue-publisher.failureQueue | _callbackFailureQueue | -| | | | -| com.netflix.conductor.contribs.metrics.LoggingMetricsModule.reportPeriodSeconds | conductor.metrics-logger.reportInterval | 30s | -| | | | -| workflow.event.queues.amqp.batchSize | conductor.event-queues.amqp.batchSize | 1 | -| workflow.event.queues.amqp.pollTimeInMs | conductor.event-queues.amqp.pollTimeDuration | 100ms | -| workflow.event.queues.amqp.hosts | conductor.event-queues.amqp.hosts | localhost | -| workflow.event.queues.amqp.username | conductor.event-queues.amqp.username | guest | -| workflow.event.queues.amqp.password | conductor.event-queues.amqp.password | guest | -| workflow.event.queues.amqp.virtualHost | conductor.event-queues.amqp.virtualHost | / | -| workflow.event.queues.amqp.port | conductor.event-queues.amqp.port.port | 5672 | -| workflow.event.queues.amqp.connectionTimeout | conductor.event-queues.amqp.connectionTimeout | 60000ms | -| workflow.event.queues.amqp.useNio | conductor.event-queues.amqp.useNio | false | -| workflow.event.queues.amqp.durable | conductor.event-queues.amqp.durable | true | -| workflow.event.queues.amqp.exclusive | conductor.event-queues.amqp.exclusive | false | -| workflow.event.queues.amqp.autoDelete | conductor.event-queues.amqp.autoDelete | false | -| workflow.event.queues.amqp.contentType | conductor.event-queues.amqp.contentType | application/json | -| workflow.event.queues.amqp.contentEncoding | conductor.event-queues.amqp.contentEncoding | UTF-8 | -| workflow.event.queues.amqp.amqp_exchange | conductor.event-queues.amqp.exchangeType | topic | -| workflow.event.queues.amqp.deliveryMode | conductor.event-queues.amqp.deliveryMode | 2 | -| workflow.listener.queue.useExchange | conductor.event-queues.amqp.useExchange | true | -| workflow.listener.queue.prefix | conductor.event-queues.amqp.listenerQueuePrefix | "" | -| | | | -| io.nats.streaming.clusterId | conductor.event-queues.nats-stream.clusterId | test-cluster | -| io.nats.streaming.durableName | conductor.event-queues.nats-stream.durableName | null | -| io.nats.streaming.url | conductor.event-queues.nats-stream.url | nats://localhost:4222 | -| | | | -| workflow.event.queues.sqs.batchSize | conductor.event-queues.sqs.batchSize | 1 | -| workflow.event.queues.sqs.pollTimeInMS | conductor.event-queues.sqs.pollTimeDuration | 100ms | -| workflow.event.queues.sqs.visibilityTimeoutInSeconds | conductor.event-queues.sqs.visibilityTimeout | 60s | -| workflow.listener.queue.prefix | conductor.event-queues.sqs.listenerQueuePrefix | "" | -| workflow.listener.queue.authorizedAccounts | conductor.event-queues.sqs.authorizedAccounts | "" | -| | | | -| workflow.external.payload.storage.s3.bucket | conductor.external-payload-storage.s3.bucketName | conductor_payloads | -| workflow.external.payload.storage.s3.signedurlexpirationseconds | conductor.external-payload-storage.s3.signedUrlExpirationDuration | 5s | -| workflow.external.payload.storage.s3.region | conductor.external-payload-storage.s3.region | us-east-1 | -| | | | -| http.task.read.timeout | conductor.tasks.http.readTimeout | 150ms | -| http.task.connect.timeout | conductor.tasks.http.connectTimeout | 100ms | -| | | | -| kafka.publish.request.timeout.ms | conductor.tasks.kafka-publish.requestTimeout | 100ms | -| kafka.publish.max.block.ms | conductor.tasks.kafka-publish.maxBlock | 500ms | -| kafka.publish.producer.cache.size | conductor.tasks.kafka-publish.cacheSize | 10 | -| kafka.publish.producer.cache.time.ms | conductor.tasks.kafka-publish.cacheTime | 120000ms | - -### `core` module: - -| Old | New | Default | -| --- | --- | --- | -| environment | _removed_ | | -| STACK | conductor.app.stack | test | -| APP_ID | conductor.app.appId | conductor | -| workflow.executor.service.max.threads | conductor.app.executorServiceMaxThreadCount | 50 | -| decider.sweep.frequency.seconds | conductor.app.sweepFrequency | 30s | -| workflow.sweeper.thread.count | conductor.app.sweeperThreadCount | 5 | -| workflow.event.processor.thread.count | conductor.app.eventProcessorThreadCount | 2 | -| workflow.event.message.indexing.enabled | conductor.app.eventMessageIndexingEnabled | true | -| workflow.event.execution.indexing.enabled | conductor.app.eventExecutionIndexingEnabled | true | -| workflow.decider.locking.enabled | conductor.app.workflowExecutionLockEnabled | false | -| workflow.locking.lease.time.ms | conductor.app.lockLeaseTime | 60000ms | -| workflow.locking.time.to.try.ms | conductor.app.lockTimeToTry | 500ms | -| tasks.active.worker.lastpoll | conductor.app.activeWorkerLastPollTimeout | 10s | -| task.queue.message.postponeSeconds | conductor.app.taskExecutionPostponeDuration | 60s | -| workflow.taskExecLog.indexing.enabled | conductor.app.taskExecLogIndexingEnabled | true | -| async.indexing.enabled | conductor.app.asyncIndexingEnabled | false | -| workflow.system.task.worker.thread.count | conductor.app.systemTaskWorkerThreadCount | # available processors * 2 | -| workflow.system.task.worker.callback.seconds | conductor.app.systemTaskWorkerCallbackDuration | 30s | -| workflow.system.task.worker.poll.interval | conductor.app.systemTaskWorkerPollInterval | 50s | -| workflow.system.task.worker.executionNameSpace | conductor.app.systemTaskWorkerExecutionNamespace | "" | -| workflow.isolated.system.task.worker.thread.count | conductor.app.isolatedSystemTaskWorkerThreadCount | 1 | -| workflow.system.task.queue.pollCount | conductor.app.systemTaskMaxPollCount | 1 | -| async.update.short.workflow.duration.seconds | conductor.app.asyncUpdateShortRunningWorkflowDuration | 30s | -| async.update.delay.seconds | conductor.app.asyncUpdateDelay | 60s | -| summary.input.output.json.serialization.enabled | conductor.app.summary-input-output-json-serialization.enabled | false | -| workflow.owner.email.mandatory | conductor.app.ownerEmailMandatory | true | -| workflow.repairservice.enabled | conductor.app.workflowRepairServiceEnabled | false | -| workflow.event.queue.scheduler.poll.thread.count | conductor.app.eventSchedulerPollThreadCount | # CPU cores | -| workflow.dyno.queues.pollingInterval | conductor.app.eventQueuePollInterval | 100ms | -| workflow.dyno.queues.pollCount | conductor.app.eventQueuePollCount | 10 | -| workflow.dyno.queues.longPollTimeout | conductor.app.eventQueueLongPollTimeout | 1000ms | -| conductor.workflow.input.payload.threshold.kb | conductor.app.workflowInputPayloadSizeThreshold | 5120KB | -| conductor.max.workflow.input.payload.threshold.kb | conductor.app.maxWorkflowInputPayloadSizeThreshold | 10240KB | -| conductor.workflow.output.payload.threshold.kb | conductor.app.workflowOutputPayloadSizeThreshold | 5120KB | -| conductor.max.workflow.output.payload.threshold.kb | conductor.app.maxWorkflowOutputPayloadSizeThreshold | 10240KB | -| conductor.task.input.payload.threshold.kb | conductor.app.taskInputPayloadSizeThreshold | 3072KB | -| conductor.max.task.input.payload.threshold.kb | conductor.app.maxTaskInputPayloadSizeThreshold | 10240KB | -| conductor.task.output.payload.threshold.kb | conductor.app.taskOutputPayloadSizeThreshold | 3072KB | -| conductor.max.task.output.payload.threshold.kb | conductor.app.maxTaskOutputPayloadSizeThreshold | 10240KB | -| conductor.max.workflow.variables.payload.threshold.kb | conductor.app.maxWorkflowVariablesPayloadSizeThreshold | 256KB | -| | | | -| workflow.isolated.system.task.enable | conductor.app.isolatedSystemTaskEnabled | false | -| workflow.isolated.system.task.poll.time.secs | conductor.app.isolatedSystemTaskQueuePollInterval | 10s | -| | | | -| workflow.task.pending.time.threshold.minutes | conductor.app.taskPendingTimeThreshold | 60m | -| | | | -| workflow.monitor.metadata.refresh.counter | conductor.workflow-monitor.metadataRefreshInterval | 10 | -| workflow.monitor.stats.freq.seconds | conductor.workflow-monitor.statsFrequency | 60s | - -### `es6-persistence` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.elasticsearch.version | conductor.elasticsearch.version | 6 | -| workflow.elasticsearch.url | conductor.elasticsearch.url | localhost:9300 | -| workflow.elasticsearch.index.name | conductor.elasticsearch.indexPrefix | conductor | -| workflow.elasticsearch.tasklog.index.name | _removed_ | | -| workflow.elasticsearch.cluster.health.color | conductor.elasticsearch.clusterHealthColor | green | -| workflow.elasticsearch.archive.search.batchSize | _removed_ | | -| workflow.elasticsearch.index.batchSize | conductor.elasticsearch.indexBatchSize | 1 | -| workflow.elasticsearch.async.dao.worker.queue.size | conductor.elasticsearch.asyncWorkerQueueSize | 100 | -| workflow.elasticsearch.async.dao.max.pool.size | conductor.elasticsearch.asyncMaxPoolSize | 12 | -| workflow.elasticsearch.async.buffer.flush.timeout.seconds | conductor.elasticsearch.asyncBufferFlushTimeout | 10s | -| workflow.elasticsearch.index.shard.count | conductor.elasticsearch.indexShardCount | 5 | -| workflow.elasticsearch.index.replicas.count | conductor.elasticsearch.indexReplicasCount | 1 | -| tasklog.elasticsearch.query.size | conductor.elasticsearch.taskLogResultLimit | 10 | -| workflow.elasticsearch.rest.client.connectionRequestTimeout.milliseconds | conductor.elasticsearch.restClientConnectionRequestTimeout | -1 | -| workflow.elasticsearch.auto.index.management.enabled | conductor.elasticsearch.autoIndexManagementEnabled | true | -| workflow.elasticsearch.document.type.override | conductor.elasticsearch.documentTypeOverride | "" | - -### `es7-persistence` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.elasticsearch.version | conductor.elasticsearch.version | 7 | -| workflow.elasticsearch.url | conductor.elasticsearch.url | localhost:9300 | -| workflow.elasticsearch.index.name | conductor.elasticsearch.indexPrefix | conductor | -| workflow.elasticsearch.tasklog.index.name | _removed_ | | -| workflow.elasticsearch.cluster.health.color | conductor.elasticsearch.clusterHealthColor | green | -| workflow.elasticsearch.archive.search.batchSize | _removed_ | | -| workflow.elasticsearch.index.batchSize | conductor.elasticsearch.indexBatchSize | 1 | -| workflow.elasticsearch.async.dao.worker.queue.size | conductor.elasticsearch.asyncWorkerQueueSize | 100 | -| workflow.elasticsearch.async.dao.max.pool.size | conductor.elasticsearch.asyncMaxPoolSize | 12 | -| workflow.elasticsearch.async.buffer.flush.timeout.seconds | conductor.elasticsearch.asyncBufferFlushTimeout | 10s | -| workflow.elasticsearch.index.shard.count | conductor.elasticsearch.indexShardCount | 5 | -| workflow.elasticsearch.index.replicas.count | conductor.elasticsearch.indexReplicasCount | 1 | -| tasklog.elasticsearch.query.size | conductor.elasticsearch.taskLogResultLimit | 10 | -| workflow.elasticsearch.rest.client.connectionRequestTimeout.milliseconds | conductor.elasticsearch.restClientConnectionRequestTimeout | -1 | -| workflow.elasticsearch.auto.index.management.enabled | conductor.elasticsearch.autoIndexManagementEnabled | true | -| workflow.elasticsearch.document.type.override | conductor.elasticsearch.documentTypeOverride | "" | -| workflow.elasticsearch.basic.auth.username | conductor.elasticsearch.username | "" | -| workflow.elasticsearch.basic.auth.password | conductor.elasticsearch.password | "" | - -### `grpc-server` module: - -| Old | New | Default | -| --- | --- | --- | -| conductor.grpc.server.port | conductor.grpc-server.port | 8090 | -| conductor.grpc.server.reflectionEnabled | conductor.grpc-server.reflectionEnabled | true | - -### `mysql-persistence` module (v3.0.0 - v3.0.5): - -| Old | New | Default | -| --- | --- | --- | -| jdbc.url | conductor.mysql.jdbcUrl | jdbc:mysql://localhost:3306/conductor | -| jdbc.username | conductor.mysql.jdbcUsername | conductor | -| jdbc.password | conductor.mysql.jdbcPassword | password | -| flyway.enabled | conductor.mysql.flywayEnabled | true | -| flyway.table | conductor.mysql.flywayTable | null | -| conductor.mysql.connection.pool.size.max | conductor.mysql.connectionPoolMaxSize | -1 | -| conductor.mysql.connection.pool.idle.min | conductor.mysql.connectionPoolMinIdle | -1 | -| conductor.mysql.connection.lifetime.max | conductor.mysql.connectionMaxLifetime | 30m | -| conductor.mysql.connection.idle.timeout | conductor.mysql.connectionIdleTimeout | 10m | -| conductor.mysql.connection.timeout | conductor.mysql.connectionTimeout | 30s | -| conductor.mysql.transaction.isolation.level | conductor.mysql.transactionIsolationLevel | "" | -| conductor.mysql.autocommit | conductor.mysql.autoCommit | false | -| conductor.taskdef.cache.refresh.time.seconds | conductor.mysql.taskDefCacheRefreshInterval | 60s | - -### `mysql-persistence` module (v3.0.5+): - -| Old | New | -| --- | --- | -| jdbc.url | spring.datasource.url | -| jdbc.username | spring.datasource.username | -| jdbc.password | spring.datasource.password | -| flyway.enabled | spring.flyway.enabled | -| flyway.table | spring.flyway.table | -| conductor.mysql.connection.pool.size.max | spring.datasource.hikari.maximum-pool-size | -| conductor.mysql.connection.pool.idle.min | spring.datasource.hikari.minimum-idle | -| conductor.mysql.connection.lifetime.max | spring.datasource.hikari.max-lifetime | -| conductor.mysql.connection.idle.timeout | spring.datasource.hikari.idle-timeout | -| conductor.mysql.connection.timeout | spring.datasource.hikari.connection-timeout | -| conductor.mysql.transaction.isolation.level | spring.datasource.hikari.transaction-isolation | -| conductor.mysql.autocommit | spring.datasource.hikari.auto-commit | -| conductor.taskdef.cache.refresh.time.seconds | conductor.mysql.taskDefCacheRefreshInterval | - -* for more properties and default values: https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#application-properties.data.spring.datasource.hikari - -### `postgres-persistence` module (v3.0.0 - v3.0.5): - -| Old | New | Default | -| --- | --- | --- | -| jdbc.url | conductor.postgres.jdbcUrl | jdbc:postgresql://localhost:5432/conductor | -| jdbc.username | conductor.postgres.jdbcUsername | conductor | -| jdbc.password | conductor.postgres.jdbcPassword | password | -| flyway.enabled | conductor.postgres.flywayEnabled | true | -| flyway.table | conductor.postgres.flywayTable | null | -| conductor.postgres.connection.pool.size.max | conductor.postgres.connectionPoolMaxSize | -1 | -| conductor.postgres.connection.pool.idle.min | conductor.postgres.connectionPoolMinIdle | -1 | -| conductor.postgres.connection.lifetime.max | conductor.postgres.connectionMaxLifetime | 30m | -| conductor.postgres.connection.idle.timeout | conductor.postgres.connectionIdleTimeout | 10m | -| conductor.postgres.connection.timeout | conductor.postgres.connectionTimeout | 30s | -| conductor.postgres.transaction.isolation.level | conductor.postgres.transactionIsolationLevel | "" | -| conductor.postgres.autocommit | conductor.postgres.autoCommit | false | -| conductor.taskdef.cache.refresh.time.seconds | conductor.postgres.taskDefCacheRefreshInterval | 60s | - -### `postgres-persistence` module (v3.0.5+): - -| Old | New | -| --- | --- | -| jdbc.url | spring.datasource.url | -| jdbc.username | spring.datasource.username | -| jdbc.password | spring.datasource.password | -| flyway.enabled | spring.flyway.enabled | -| flyway.table | spring.flyway.table | -| conductor.postgres.connection.pool.size.max | spring.datasource.hikari.maximum-pool-size | -| conductor.postgres.connection.pool.idle.min | spring.datasource.hikari.minimum-idle | -| conductor.postgres.connection.lifetime.max | spring.datasource.hikari.max-lifetime | -| conductor.postgres.connection.idle.timeout | spring.datasource.hikari.idle-timeout | -| conductor.postgres.connection.timeout | spring.datasource.hikari.connection-timeout | -| conductor.postgres.transaction.isolation.level | spring.datasource.hikari.transaction-isolation | -| conductor.postgres.autocommit | spring.datasource.hikari.auto-commit | -| conductor.taskdef.cache.refresh.time.seconds | conductor.postgres.taskDefCacheRefreshInterval | - -* for more properties and default values: https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#application-properties.data.spring.datasource.hikari - -### `redis-lock` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.redis.locking.server.type | conductor.redis-lock.serverType | single | -| workflow.redis.locking.server.address | conductor.redis-lock.serverAddress | redis://127.0.0.1:6379 | -| workflow.redis.locking.server.password | conductor.redis-lock.serverPassword | null | -| workflow.redis.locking.server.master.name | conductor.redis-lock.serverMasterName | master | -| workflow.decider.locking.namespace | conductor.redis-lock.namespace | "" | -| workflow.decider.locking.exceptions.ignore | conductor.redis-lock.ignoreLockingExceptions | false | - -### `redis-persistence` module: - -| Old | New | Default | -| --- | --- | --- | -| EC2_REGION | conductor.redis.dataCenterRegion | us-east-1 | -| EC2_AVAILABILITY_ZONE | conductor.redis.availabilityZone | us-east-1c | -| workflow.dynomite.cluster | _removed_ | -| workflow.dynomite.cluster.name | conductor.redis.clusterName | "" | -| workflow.dynomite.cluster.hosts | conductor.redis.hosts | null | -| workflow.namespace.prefix | conductor.redis.workflowNamespacePrefix | null | -| workflow.namespace.queue.prefix | conductor.redis.queueNamespacePrefix | null | -| workflow.dyno.keyspace.domain | conductor.redis.keyspaceDomain | null | -| workflow.dynomite.connection.maxConnsPerHost | conductor.redis.maxConnectionsPerHost | 10 | -| workflow.dynomite.connection.max.retry.attempt | conductor.redis.maxRetryAttempts | 0 | -| workflow.dynomite.connection.max.timeout.exhausted.ms | conductor.redis.maxTimeoutWhenExhausted | 800ms | -| queues.dynomite.nonQuorum.port | conductor.redis.queuesNonQuorumPort | 22122 | -| workflow.dyno.queue.sharding.strategy | conductor.redis.queueShardingStrategy | roundRobin | -| conductor.taskdef.cache.refresh.time.seconds | conductor.redis.taskDefCacheRefreshInterval | 60s | -| workflow.event.execution.persistence.ttl.seconds | conductor.redis.eventExecutionPersistenceTTL | 60s | - -### `zookeeper-lock` module: - -| Old | New | Default | -| --- | --- | --- | -| workflow.zookeeper.lock.connection | conductor.zookeeper-lock.connectionString | localhost:2181 | -| workflow.zookeeper.lock.sessionTimeoutMs | conductor.zookeeper-lock.sessionTimeout | 60000ms | -| workflow.zookeeper.lock.connectionTimeoutMs | conductor.zookeeper-lock.connectionTimeout | 15000ms | -| workflow.decider.locking.namespace | conductor.zookeeper-lock.namespace | "" | - -### Component configuration: - -| Old | New | Default | -| --- | --- | --- | -| db | conductor.db.type | "" | -| workflow.indexing.enabled | conductor.indexing.enabled | true | -| conductor.disable.async.workers | conductor.system-task-workers.enabled | true | -| decider.sweep.disable | conductor.workflow-reconciler.enabled | true | -| conductor.grpc.server.enabled | conductor.grpc-server.enabled | false | -| workflow.external.payload.storage | conductor.external-payload-storage.type | dummy | -| workflow.default.event.processor.enabled | conductor.default-event-processor.enabled | true | -| workflow.events.default.queue.type | conductor.default-event-queue.type | sqs | -| workflow.status.listener.type | conductor.workflow-status-listener.type | stub | -| workflow.decider.locking.server | conductor.workflow-execution-lock.type | noop_lock | -| | | | -| workflow.default.event.queue.enabled | conductor.event-queues.default.enabled | true | -| workflow.sqs.event.queue.enabled | conductor.event-queues.sqs.enabled | false | -| workflow.amqp.event.queue.enabled | conductor.event-queues.amqp.enabled | false | -| workflow.nats.event.queue.enabled | conductor.event-queues.nats.enabled | false | -| workflow.nats_stream.event.queue.enabled | conductor.event-queues.nats-stream.enabled | false | -| | | | -| - | conductor.metrics-logger.enabled | false | -| - | conductor.metrics-prometheus.enabled | false | -| - | conductor.metrics-datadog.enable | false | -| - | conductor.metrics-datadog.api-key | | - diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index b9c30c329..000000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1 +0,0 @@ -[Code of Conduct](docs/docs/resources/code-of-conduct.md) \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index ba24e0019..000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -[Contributing](docs/docs/resources/contributing.md) \ No newline at end of file diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 6a1d025d8..000000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} Netflix, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/OSSMETADATA b/OSSMETADATA deleted file mode 100644 index b96d4a4df..000000000 --- a/OSSMETADATA +++ /dev/null @@ -1 +0,0 @@ -osslifecycle=active diff --git a/README.md b/README.md deleted file mode 100644 index bde5d0180..000000000 --- a/README.md +++ /dev/null @@ -1,91 +0,0 @@ -![Conductor](docs/docs/img/logo.png) - -# Conductor -Conductor is a platform created by Netflix to orchestrate workflows that span across microservices. - -[![Github release](https://img.shields.io/github/v/release/Netflix/conductor.svg)](https://GitHub.com/Netflix/conductor/releases) -[![CI](https://github.com/Netflix/conductor/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/Netflix/conductor/actions/workflows/ci.yml) -[![License](https://img.shields.io/github/license/Netflix/conductor.svg)](http://www.apache.org/licenses/LICENSE-2.0) -[![NetflixOSS Lifecycle](https://img.shields.io/osslifecycle/Netflix/conductor.svg)]() - - -## Documentation -[Main Documentation Site](http://conductor.netflix.com/) - -## Releases -The latest version is [![Github release](https://img.shields.io/github/v/release/Netflix/conductor.svg)](https://GitHub.com/Netflix/conductor/releases) - -[2.31.8](https://github.com/Netflix/conductor/releases/tag/v2.31.8) is the **final** release of `2.31` branch. As of Feb 2022, `1.x` & `2.x` versions are no longer supported. - -## Community Contributions -The modules contributed by the community are housed at [conductor-community](https://github.com/Netflix/conductor-community). Compatible versions of the community modules are released simultaneously with releases of the main modules. - -[Discussion Forum](https://github.com/Netflix/conductor/discussions) Please use the forum for questions and discussing ideas and join the community. - -[List of Conductor community projects](/docs/docs/resources/related.md) - Backup tool, Cron like workflow starter, Docker containers... - -## Getting Started - Building & Running Conductor -### Docker -The easiest way to get started is with Docker containers. Please follow the instructions [here](https://conductor.netflix.com/gettingstarted/docker.html). - -### From Source -Conductor Server is a [Spring Boot](https://spring.io/projects/spring-boot) project and follows all applicable conventions. See instructions [here](http://conductor.netflix.com/gettingstarted/source.html). - - -## Published Artifacts -Binaries are available from [Netflix OSS Maven](https://artifacts.netflix.net/netflixoss/com/netflix/conductor/) repository, or the [Maven Central Repository](https://search.maven.org/search?q=g:com.netflix.conductor). - -| Artifact | Description | -|---------------------------------|-------------------------------------------------------------------------------------------------| -| conductor-common | Common models used by various conductor modules | -| conductor-core | Core Conductor module | -| conductor-redis-persistence | Persistence and queue using Redis/Dynomite | -| conductor-cassandra-persistence | Persistence using Cassandra | -| conductor-es6-persistence | Indexing using Elasticsearch 6.X | -| conductor-rest | Spring MVC resources for the core services | -| conductor-ui | node.js based UI for Conductor | -| conductor-client | Java client for Conductor that includes helpers for running worker tasks | -| conductor-client-spring | Client starter kit for Spring | -| conductor-server | Spring Boot Web Application | -| conductor-redis-lock | Workflow execution lock implementation using Redis | -| conductor-awss3-storage | External payload storage implementation using AWS S3 | -| conductor-awssqs-event-queue | Event queue implementation using AWS SQS | -| conductor-http-task | Workflow system task implementation to send make requests | -| conductor-json-jq-task | Workflow system task implementation to evaluate JSON using [jq](https://stedolan.github.io/jq/) | -| conductor-grpc | Protobuf models used by the server and client | -| conductor-grpc-client | gRPC server Application | -| conductor-grpc-server | gRPC client to interact with the gRPC server | -| conductor-test-harness | Integration and regression tests | - -## Database Requirements - -* The default persistence used is [Dynomite](https://github.com/Netflix/dynomite) -* For queues, we are relying on [dyno-queues](https://github.com/Netflix/dyno-queues) -* The indexing backend is [Elasticsearch](https://www.elastic.co/) (6.x) - -## Other Requirements -* JDK 11+ -* UI requires Node 14 to build. Earlier Node versions may work but is untested. - -## Get Support -Conductor is maintained by Media Workflow Infrastructure team at Netflix. Use Github issue tracking for filing issues and [Discussion Forum](https://github.com/Netflix/conductor/discussions) for any other questions, ideas or support requests. - -## Contributions -Whether it is a small documentation correction, bug fix or new features, contributions are highly appreciated. We just ask to follow standard oss guidelines. [Discussion Forum](https://github.com/Netflix/conductor/discussions) is a good place to ask questions, discuss new features and explore ideas. Please check with us before spending too much time, only to find later that someone else is already working on a similar feature. - -`main` branch is the current working branch. Please send your PR's to `main` branch, making sure that it builds on your local system successfully. Also, please make sure all the conflicts are resolved. - -## License -Copyright 2022 Netflix, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/RELATED.md b/RELATED.md deleted file mode 100644 index b7adabea3..000000000 --- a/RELATED.md +++ /dev/null @@ -1 +0,0 @@ -[Related Projects](docs/docs/resources/related.md) diff --git a/WHOSUSING.md b/WHOSUSING.md deleted file mode 100644 index e93606723..000000000 --- a/WHOSUSING.md +++ /dev/null @@ -1,15 +0,0 @@ - -## Who uses Conductor? - -We would like to keep track of whose using Conductor. Please send a pull request with your company name and Github handle. - -* [Netflix](www.netflix.com) [[@aravindanr](https://github.com/aravindanr)] -* [Florida Blue](www.bcbsfl.com) [[@rickfish](https://github.com/rickfish)] -* [UWM](www.uwm.com)[[@zergrushjoe](https://github.com/ZergRushJoe)] -* [Deutsche Telekom Digital Labs](https://dtdl.in) [[@jas34](https://github.com/jas34)] [[@deoramanas](https://github.com/deoramanas)] -* [VMware](www.vmware.com) [[@taojwmware](https://github.com/taojwmware)] [[@venkag](https://github.com/venkag)] -* [JP Morgan Chase](www.chase.com) [[@maheshyaddanapudi](https://github.com/maheshyaddanapudi)] -* [Orkes ](www.orkes.io)[[@CherishSantoshi](https://github.com/CherishSantoshi)] -* [313X](https://313x.com.br)[[@dalmoveras](https://github.com/dalmoveras)] -* [Supercharge](https://supercharge.io)[[@team-supercharge](https://github.com/team-supercharge)] - diff --git a/annotations-processor/README.md b/annotations-processor/README.md deleted file mode 100644 index 13ec4a3d0..000000000 --- a/annotations-processor/README.md +++ /dev/null @@ -1 +0,0 @@ -[Annotations Processor](docs/docs/reference-docs/annotations-processor.md) \ No newline at end of file diff --git a/annotations-processor/build.gradle b/annotations-processor/build.gradle deleted file mode 100644 index ac6ae8863..000000000 --- a/annotations-processor/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ - -sourceSets { - example -} - -dependencies { - implementation project(':conductor-annotations') - api 'com.google.guava:guava:31.1-jre' - api 'com.squareup:javapoet:1.13.+' - api 'com.github.jknack:handlebars:4.3.+' - api 'com.google.protobuf:protobuf-java:3.21.1' - api 'javax.annotation:javax.annotation-api:1.3.2' - api gradleApi() - - exampleImplementation sourceSets.main.output - exampleImplementation project(':conductor-annotations') -} - -task exampleJar(type: Jar) { - archiveFileName = 'example.jar' - from sourceSets.example.output.classesDirs -} - -testClasses.finalizedBy(exampleJar) diff --git a/annotations-processor/dependencies.lock b/annotations-processor/dependencies.lock deleted file mode 100644 index 1472ab8c8..000000000 --- a/annotations-processor/dependencies.lock +++ /dev/null @@ -1,235 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.github.jknack:handlebars": { - "locked": "4.3.0" - }, - "com.google.guava:guava": { - "locked": "31.1-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.21.1" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "com.squareup:javapoet": { - "locked": "1.13.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "exampleCompileClasspath": { - "com.netflix.conductor:conductor-annotations": { - "project": true - } - }, - "exampleRuntimeClasspath": { - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.2" - } - }, - "runtimeClasspath": { - "com.github.jknack:handlebars": { - "locked": "4.3.0" - }, - "com.google.guava:guava": { - "locked": "31.1-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.21.1" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "com.squareup:javapoet": { - "locked": "1.13.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.github.jknack:handlebars": { - "locked": "4.3.0" - }, - "com.google.guava:guava": { - "locked": "31.1-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.21.1" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "com.squareup:javapoet": { - "locked": "1.13.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.github.jknack:handlebars": { - "locked": "4.3.0" - }, - "com.google.guava:guava": { - "locked": "31.1-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.21.1" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "com.squareup:javapoet": { - "locked": "1.13.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/annotations-processor/src/example/java/com/example/Example.java b/annotations-processor/src/example/java/com/example/Example.java deleted file mode 100644 index b3c7befe8..000000000 --- a/annotations-processor/src/example/java/com/example/Example.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.example; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -@ProtoMessage -public class Example { - @ProtoField(id = 1) - public String name; - - @ProtoField(id = 2) - public Long count; -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java deleted file mode 100644 index bc92d901f..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType; -import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeSpec; - -public abstract class AbstractMessage { - protected Class clazz; - protected MessageType type; - protected List fields = new ArrayList(); - protected List nested = new ArrayList<>(); - - public AbstractMessage(Class cls, MessageType parentType) { - assert cls.isAnnotationPresent(ProtoMessage.class) - || cls.isAnnotationPresent(ProtoEnum.class); - - this.clazz = cls; - this.type = TypeMapper.INSTANCE.declare(cls, parentType); - - for (Class nested : clazz.getDeclaredClasses()) { - if (nested.isEnum()) addNestedEnum(nested); - else addNestedClass(nested); - } - } - - private void addNestedEnum(Class cls) { - ProtoEnum ann = (ProtoEnum) cls.getAnnotation(ProtoEnum.class); - if (ann != null) { - nested.add(new Enum(cls, this.type)); - } - } - - private void addNestedClass(Class cls) { - ProtoMessage ann = (ProtoMessage) cls.getAnnotation(ProtoMessage.class); - if (ann != null) { - nested.add(new Message(cls, this.type)); - } - } - - public abstract String getProtoClass(); - - protected abstract void javaMapToProto(TypeSpec.Builder builder); - - protected abstract void javaMapFromProto(TypeSpec.Builder builder); - - public void generateJavaMapper(TypeSpec.Builder builder) { - javaMapToProto(builder); - javaMapFromProto(builder); - - for (AbstractMessage abstractMessage : this.nested) { - abstractMessage.generateJavaMapper(builder); - } - } - - public void generateAbstractMethods(Set specs) { - for (Field field : fields) { - field.generateAbstractMethods(specs); - } - - for (AbstractMessage elem : nested) { - elem.generateAbstractMethods(specs); - } - } - - public void findDependencies(Set dependencies) { - for (Field field : fields) { - field.getDependencies(dependencies); - } - - for (AbstractMessage elem : nested) { - elem.findDependencies(dependencies); - } - } - - public List getNested() { - return nested; - } - - public List getFields() { - return fields; - } - - public String getName() { - return clazz.getSimpleName(); - } - - public abstract static class Field { - protected int protoIndex; - protected java.lang.reflect.Field field; - - protected Field(int index, java.lang.reflect.Field field) { - this.protoIndex = index; - this.field = field; - } - - public abstract String getProtoTypeDeclaration(); - - public int getProtoIndex() { - return protoIndex; - } - - public String getName() { - return field.getName(); - } - - public String getProtoName() { - return field.getName().toUpperCase(); - } - - public void getDependencies(Set deps) {} - - public void generateAbstractMethods(Set specs) {} - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Enum.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Enum.java deleted file mode 100644 index 3944bafb1..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Enum.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import javax.lang.model.element.Modifier; - -import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; -import com.squareup.javapoet.TypeSpec; - -public class Enum extends AbstractMessage { - public enum MapType { - FROM_PROTO("fromProto"), - TO_PROTO("toProto"); - - private final String methodName; - - MapType(String m) { - methodName = m; - } - - public String getMethodName() { - return methodName; - } - } - - public Enum(Class cls, MessageType parent) { - super(cls, parent); - - int protoIndex = 0; - for (java.lang.reflect.Field field : cls.getDeclaredFields()) { - if (field.isEnumConstant()) fields.add(new EnumField(protoIndex++, field)); - } - } - - @Override - public String getProtoClass() { - return "enum"; - } - - private MethodSpec javaMap(MapType mt, TypeName from, TypeName to) { - MethodSpec.Builder method = MethodSpec.methodBuilder(mt.getMethodName()); - method.addModifiers(Modifier.PUBLIC); - method.returns(to); - method.addParameter(from, "from"); - - method.addStatement("$T to", to); - method.beginControlFlow("switch (from)"); - - for (Field field : fields) { - String fromName = (mt == MapType.TO_PROTO) ? field.getName() : field.getProtoName(); - String toName = (mt == MapType.TO_PROTO) ? field.getProtoName() : field.getName(); - method.addStatement("case $L: to = $T.$L; break", fromName, to, toName); - } - - method.addStatement( - "default: throw new $T(\"Unexpected enum constant: \" + from)", - IllegalArgumentException.class); - method.endControlFlow(); - method.addStatement("return to"); - return method.build(); - } - - @Override - protected void javaMapFromProto(TypeSpec.Builder type) { - type.addMethod( - javaMap( - MapType.FROM_PROTO, - this.type.getJavaProtoType(), - TypeName.get(this.clazz))); - } - - @Override - protected void javaMapToProto(TypeSpec.Builder type) { - type.addMethod( - javaMap(MapType.TO_PROTO, TypeName.get(this.clazz), this.type.getJavaProtoType())); - } - - public class EnumField extends Field { - protected EnumField(int index, java.lang.reflect.Field field) { - super(index, field); - } - - @Override - public String getProtoTypeDeclaration() { - return String.format("%s = %d", getProtoName(), getProtoIndex()); - } - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java deleted file mode 100644 index 9dfaf2883..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.lang.model.element.Modifier; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.annotationsprocessor.protogen.types.AbstractType; -import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType; -import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeSpec; - -public class Message extends AbstractMessage { - public Message(Class cls, MessageType parent) { - super(cls, parent); - - for (java.lang.reflect.Field field : clazz.getDeclaredFields()) { - ProtoField ann = field.getAnnotation(ProtoField.class); - if (ann == null) continue; - - fields.add(new MessageField(ann.id(), field)); - } - } - - protected ProtoMessage getAnnotation() { - return (ProtoMessage) this.clazz.getAnnotation(ProtoMessage.class); - } - - @Override - public String getProtoClass() { - return "message"; - } - - @Override - protected void javaMapToProto(TypeSpec.Builder type) { - if (!getAnnotation().toProto() || getAnnotation().wrapper()) return; - - ClassName javaProtoType = (ClassName) this.type.getJavaProtoType(); - MethodSpec.Builder method = MethodSpec.methodBuilder("toProto"); - method.addModifiers(Modifier.PUBLIC); - method.returns(javaProtoType); - method.addParameter(this.clazz, "from"); - - method.addStatement( - "$T to = $T.newBuilder()", javaProtoType.nestedClass("Builder"), javaProtoType); - - for (Field field : this.fields) { - if (field instanceof MessageField) { - AbstractType fieldType = ((MessageField) field).getAbstractType(); - fieldType.mapToProto(field.getName(), method); - } - } - - method.addStatement("return to.build()"); - type.addMethod(method.build()); - } - - @Override - protected void javaMapFromProto(TypeSpec.Builder type) { - if (!getAnnotation().fromProto() || getAnnotation().wrapper()) return; - - MethodSpec.Builder method = MethodSpec.methodBuilder("fromProto"); - method.addModifiers(Modifier.PUBLIC); - method.returns(this.clazz); - method.addParameter(this.type.getJavaProtoType(), "from"); - - method.addStatement("$T to = new $T()", this.clazz, this.clazz); - - for (Field field : this.fields) { - if (field instanceof MessageField) { - AbstractType fieldType = ((MessageField) field).getAbstractType(); - fieldType.mapFromProto(field.getName(), method); - } - } - - method.addStatement("return to"); - type.addMethod(method.build()); - } - - public static class MessageField extends Field { - protected AbstractType type; - - protected MessageField(int index, java.lang.reflect.Field field) { - super(index, field); - } - - public AbstractType getAbstractType() { - if (type == null) { - type = TypeMapper.INSTANCE.get(field.getGenericType()); - } - return type; - } - - private static Pattern CAMEL_CASE_RE = Pattern.compile("(?<=[a-z])[A-Z]"); - - private static String toUnderscoreCase(String input) { - Matcher m = CAMEL_CASE_RE.matcher(input); - StringBuilder sb = new StringBuilder(); - while (m.find()) { - m.appendReplacement(sb, "_" + m.group()); - } - m.appendTail(sb); - return sb.toString().toLowerCase(); - } - - @Override - public String getProtoTypeDeclaration() { - return String.format( - "%s %s = %d", - getAbstractType().getProtoType(), toUnderscoreCase(getName()), getProtoIndex()); - } - - @Override - public void getDependencies(Set deps) { - getAbstractType().getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - getAbstractType().generateAbstractMethods(specs); - } - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoFile.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoFile.java deleted file mode 100644 index 1bd543a60..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoFile.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.util.HashSet; -import java.util.Set; - -import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper; - -import com.squareup.javapoet.ClassName; - -public class ProtoFile { - public static String PROTO_SUFFIX = "Pb"; - - private ClassName baseClass; - private AbstractMessage message; - private String filePath; - - private String protoPackageName; - private String javaPackageName; - private String goPackageName; - - public ProtoFile( - Class object, - String protoPackageName, - String javaPackageName, - String goPackageName) { - this.protoPackageName = protoPackageName; - this.javaPackageName = javaPackageName; - this.goPackageName = goPackageName; - - String className = object.getSimpleName() + PROTO_SUFFIX; - this.filePath = "model/" + object.getSimpleName().toLowerCase() + ".proto"; - this.baseClass = ClassName.get(this.javaPackageName, className); - this.message = new Message(object, TypeMapper.INSTANCE.baseClass(baseClass, filePath)); - } - - public String getJavaClassName() { - return baseClass.simpleName(); - } - - public String getFilePath() { - return filePath; - } - - public String getProtoPackageName() { - return protoPackageName; - } - - public String getJavaPackageName() { - return javaPackageName; - } - - public String getGoPackageName() { - return goPackageName; - } - - public AbstractMessage getMessage() { - return message; - } - - public Set getIncludes() { - Set includes = new HashSet<>(); - message.findDependencies(includes); - includes.remove(this.getFilePath()); - return includes; - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGen.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGen.java deleted file mode 100644 index a2550d369..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGen.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Writer; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.*; - -import javax.annotation.Generated; -import javax.lang.model.element.Modifier; - -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -import com.github.jknack.handlebars.EscapingStrategy; -import com.github.jknack.handlebars.Handlebars; -import com.github.jknack.handlebars.Template; -import com.github.jknack.handlebars.io.ClassPathTemplateLoader; -import com.github.jknack.handlebars.io.TemplateLoader; -import com.google.common.reflect.ClassPath; -import com.squareup.javapoet.AnnotationSpec; -import com.squareup.javapoet.JavaFile; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeSpec; - -public class ProtoGen { - private static final String GENERATOR_NAME = - "com.netflix.conductor.annotationsprocessor.protogen"; - - private String protoPackageName; - private String javaPackageName; - private String goPackageName; - private List protoFiles = new ArrayList<>(); - - public ProtoGen(String protoPackageName, String javaPackageName, String goPackageName) { - this.protoPackageName = protoPackageName; - this.javaPackageName = javaPackageName; - this.goPackageName = goPackageName; - } - - public void writeMapper(File root, String mapperPackageName) throws IOException { - TypeSpec.Builder protoMapper = - TypeSpec.classBuilder("AbstractProtoMapper") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .addAnnotation( - AnnotationSpec.builder(Generated.class) - .addMember("value", "$S", GENERATOR_NAME) - .build()); - - Set abstractMethods = new HashSet<>(); - - protoFiles.sort( - new Comparator() { - public int compare(ProtoFile p1, ProtoFile p2) { - String n1 = p1.getMessage().getName(); - String n2 = p2.getMessage().getName(); - return n1.compareTo(n2); - } - }); - - for (ProtoFile protoFile : protoFiles) { - AbstractMessage elem = protoFile.getMessage(); - elem.generateJavaMapper(protoMapper); - elem.generateAbstractMethods(abstractMethods); - } - - protoMapper.addMethods(abstractMethods); - - JavaFile javaFile = - JavaFile.builder(mapperPackageName, protoMapper.build()).indent(" ").build(); - File filename = new File(root, "AbstractProtoMapper.java"); - try (Writer writer = new FileWriter(filename.toString())) { - System.out.printf("protogen: writing '%s'...\n", filename); - javaFile.writeTo(writer); - } - } - - public void writeProtos(File root) throws IOException { - TemplateLoader loader = new ClassPathTemplateLoader("/templates", ".proto"); - Handlebars handlebars = - new Handlebars(loader) - .infiniteLoops(true) - .prettyPrint(true) - .with(EscapingStrategy.NOOP); - - Template protoFile = handlebars.compile("file"); - - for (ProtoFile file : protoFiles) { - File filename = new File(root, file.getFilePath()); - try (Writer writer = new FileWriter(filename)) { - System.out.printf("protogen: writing '%s'...\n", filename); - protoFile.apply(file, writer); - } - } - } - - public void processPackage(File jarFile, String packageName) throws IOException { - if (!jarFile.isFile()) throw new IOException("missing Jar file " + jarFile); - - URL[] urls = new URL[] {jarFile.toURI().toURL()}; - ClassLoader loader = - new URLClassLoader(urls, Thread.currentThread().getContextClassLoader()); - ClassPath cp = ClassPath.from(loader); - - System.out.printf("protogen: processing Jar '%s'\n", jarFile); - for (ClassPath.ClassInfo info : cp.getTopLevelClassesRecursive(packageName)) { - try { - processClass(info.load()); - } catch (NoClassDefFoundError ignored) { - } - } - } - - public void processClass(Class obj) { - if (obj.isAnnotationPresent(ProtoMessage.class)) { - System.out.printf("protogen: found %s\n", obj.getCanonicalName()); - protoFiles.add(new ProtoFile(obj, protoPackageName, javaPackageName, goPackageName)); - } - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTask.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTask.java deleted file mode 100644 index fb411fc4f..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTask.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.io.File; -import java.io.IOException; - -public class ProtoGenTask { - private String protoPackage; - private String javaPackage; - private String goPackage; - - private File protosDir; - private File mapperDir; - private String mapperPackage; - - private File sourceJar; - private String sourcePackage; - - public String getProtoPackage() { - return protoPackage; - } - - public void setProtoPackage(String protoPackage) { - this.protoPackage = protoPackage; - } - - public String getJavaPackage() { - return javaPackage; - } - - public void setJavaPackage(String javaPackage) { - this.javaPackage = javaPackage; - } - - public String getGoPackage() { - return goPackage; - } - - public void setGoPackage(String goPackage) { - this.goPackage = goPackage; - } - - public File getProtosDir() { - return protosDir; - } - - public void setProtosDir(File protosDir) { - this.protosDir = protosDir; - } - - public File getMapperDir() { - return mapperDir; - } - - public void setMapperDir(File mapperDir) { - this.mapperDir = mapperDir; - } - - public String getMapperPackage() { - return mapperPackage; - } - - public void setMapperPackage(String mapperPackage) { - this.mapperPackage = mapperPackage; - } - - public File getSourceJar() { - return sourceJar; - } - - public void setSourceJar(File sourceJar) { - this.sourceJar = sourceJar; - } - - public String getSourcePackage() { - return sourcePackage; - } - - public void setSourcePackage(String sourcePackage) { - this.sourcePackage = sourcePackage; - } - - public void generate() { - ProtoGen generator = new ProtoGen(protoPackage, javaPackage, goPackage); - try { - generator.processPackage(sourceJar, sourcePackage); - generator.writeMapper(mapperDir, mapperPackage); - generator.writeProtos(protosDir); - } catch (IOException e) { - System.err.printf("protogen: failed with %s\n", e); - } - } - - public static void main(String[] args) { - if (args == null || args.length < 8) { - throw new RuntimeException( - "protogen configuration incomplete, please provide all required (8) inputs"); - } - ProtoGenTask task = new ProtoGenTask(); - int argsId = 0; - task.setProtoPackage(args[argsId++]); - task.setJavaPackage(args[argsId++]); - task.setGoPackage(args[argsId++]); - task.setProtosDir(new File(args[argsId++])); - task.setMapperDir(new File(args[argsId++])); - task.setMapperPackage(args[argsId++]); - task.setSourceJar(new File(args[argsId++])); - task.setSourcePackage(args[argsId]); - System.out.println("Running protogen with arguments: " + task); - task.generate(); - System.out.println("protogen completed."); - } - - @Override - public String toString() { - return "ProtoGenTask{" - + "protoPackage='" - + protoPackage - + '\'' - + ", javaPackage='" - + javaPackage - + '\'' - + ", goPackage='" - + goPackage - + '\'' - + ", protosDir=" - + protosDir - + ", mapperDir=" - + mapperDir - + ", mapperPackage='" - + mapperPackage - + '\'' - + ", sourceJar=" - + sourceJar - + ", sourcePackage='" - + sourcePackage - + '\'' - + '}'; - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java deleted file mode 100644 index fbfa8e72c..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.Set; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -public abstract class AbstractType { - Type javaType; - TypeName javaProtoType; - - AbstractType(Type javaType, TypeName javaProtoType) { - this.javaType = javaType; - this.javaProtoType = javaProtoType; - } - - public Type getJavaType() { - return javaType; - } - - public TypeName getJavaProtoType() { - return javaProtoType; - } - - public abstract String getProtoType(); - - public abstract TypeName getRawJavaType(); - - public abstract void mapToProto(String field, MethodSpec.Builder method); - - public abstract void mapFromProto(String field, MethodSpec.Builder method); - - public abstract void getDependencies(Set deps); - - public abstract void generateAbstractMethods(Set specs); - - protected String javaMethodName(String m, String field) { - String fieldName = field.substring(0, 1).toUpperCase() + field.substring(1); - return m + fieldName; - } - - private static class ProtoCase { - static String convert(String s) { - StringBuilder out = new StringBuilder(s.length()); - final int len = s.length(); - int i = 0; - int j = -1; - while ((j = findWordBoundary(s, ++j)) != -1) { - out.append(normalizeWord(s.substring(i, j))); - if (j < len && s.charAt(j) == '_') j++; - i = j; - } - if (i == 0) return normalizeWord(s); - if (i < len) out.append(normalizeWord(s.substring(i))); - return out.toString(); - } - - private static boolean isWordBoundary(char c) { - return (c >= 'A' && c <= 'Z'); - } - - private static int findWordBoundary(CharSequence sequence, int start) { - int length = sequence.length(); - if (start >= length) return -1; - - if (isWordBoundary(sequence.charAt(start))) { - int i = start; - while (i < length && isWordBoundary(sequence.charAt(i))) i++; - return i; - } else { - for (int i = start; i < length; i++) { - final char c = sequence.charAt(i); - if (c == '_' || isWordBoundary(c)) return i; - } - return -1; - } - } - - private static String normalizeWord(String word) { - if (word.length() < 2) return word.toUpperCase(); - return word.substring(0, 1).toUpperCase() + word.substring(1).toLowerCase(); - } - } - - protected String protoMethodName(String m, String field) { - return m + ProtoCase.convert(field); - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ExternMessageType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ExternMessageType.java deleted file mode 100644 index ed7eaae24..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ExternMessageType.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.Set; - -import javax.lang.model.element.Modifier; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; - -public class ExternMessageType extends MessageType { - private String externProtoType; - - public ExternMessageType( - Type javaType, ClassName javaProtoType, String externProtoType, String protoFilePath) { - super(javaType, javaProtoType, protoFilePath); - this.externProtoType = externProtoType; - } - - @Override - public String getProtoType() { - return externProtoType; - } - - @Override - public void generateAbstractMethods(Set specs) { - MethodSpec fromProto = - MethodSpec.methodBuilder("fromProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.getJavaType()) - .addParameter(this.getJavaProtoType(), "in") - .build(); - - MethodSpec toProto = - MethodSpec.methodBuilder("toProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.getJavaProtoType()) - .addParameter(this.getJavaType(), "in") - .build(); - - specs.add(fromProto); - specs.add(toProto); - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/GenericType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/GenericType.java deleted file mode 100644 index 5bad20a2f..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/GenericType.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.Set; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -abstract class GenericType extends AbstractType { - public GenericType(Type type) { - super(type, null); - } - - protected Class getRawType() { - ParameterizedType tt = (ParameterizedType) this.getJavaType(); - return (Class) tt.getRawType(); - } - - protected AbstractType resolveGenericParam(int idx) { - ParameterizedType tt = (ParameterizedType) this.getJavaType(); - Type[] types = tt.getActualTypeArguments(); - - AbstractType abstractType = TypeMapper.INSTANCE.get(types[idx]); - if (abstractType instanceof GenericType) { - return WrappedType.wrap((GenericType) abstractType); - } - return abstractType; - } - - public abstract String getWrapperSuffix(); - - public abstract AbstractType getValueType(); - - public abstract TypeName resolveJavaProtoType(); - - @Override - public TypeName getRawJavaType() { - return ClassName.get(getRawType()); - } - - @Override - public void getDependencies(Set deps) { - getValueType().getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - getValueType().generateAbstractMethods(specs); - } - - @Override - public TypeName getJavaProtoType() { - if (javaProtoType == null) { - javaProtoType = resolveJavaProtoType(); - } - return javaProtoType; - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ListType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ListType.java deleted file mode 100644 index 921594391..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ListType.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.stream.Collectors; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.ParameterizedTypeName; -import com.squareup.javapoet.TypeName; - -public class ListType extends GenericType { - private AbstractType valueType; - - public ListType(Type type) { - super(type); - } - - @Override - public String getWrapperSuffix() { - return "List"; - } - - @Override - public AbstractType getValueType() { - if (valueType == null) { - valueType = resolveGenericParam(0); - } - return valueType; - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - AbstractType subtype = getValueType(); - if (subtype instanceof ScalarType) { - method.addStatement( - "to.$L( from.$L() )", - protoMethodName("addAll", field), - javaMethodName("get", field)); - } else { - method.beginControlFlow( - "for ($T elem : from.$L())", - subtype.getJavaType(), - javaMethodName("get", field)); - method.addStatement("to.$L( toProto(elem) )", protoMethodName("add", field)); - method.endControlFlow(); - } - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - AbstractType subtype = getValueType(); - Type entryType = subtype.getJavaType(); - Class collector = TypeMapper.PROTO_LIST_TYPES.get(getRawType()); - - if (subtype instanceof ScalarType) { - if (entryType.equals(String.class)) { - method.addStatement( - "to.$L( from.$L().stream().collect($T.toCollection($T::new)) )", - javaMethodName("set", field), - protoMethodName("get", field) + "List", - Collectors.class, - collector); - } else { - method.addStatement( - "to.$L( from.$L() )", - javaMethodName("set", field), - protoMethodName("get", field) + "List"); - } - } else { - method.addStatement( - "to.$L( from.$L().stream().map(this::fromProto).collect($T.toCollection($T::new)) )", - javaMethodName("set", field), - protoMethodName("get", field) + "List", - Collectors.class, - collector); - } - } - - @Override - public TypeName resolveJavaProtoType() { - return ParameterizedTypeName.get( - (ClassName) getRawJavaType(), getValueType().getJavaProtoType()); - } - - @Override - public String getProtoType() { - return "repeated " + getValueType().getProtoType(); - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java deleted file mode 100644 index fe642fdec..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.HashMap; -import java.util.Map; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.ParameterizedTypeName; -import com.squareup.javapoet.TypeName; - -public class MapType extends GenericType { - private AbstractType keyType; - private AbstractType valueType; - - public MapType(Type type) { - super(type); - } - - @Override - public String getWrapperSuffix() { - return "Map"; - } - - @Override - public AbstractType getValueType() { - if (valueType == null) { - valueType = resolveGenericParam(1); - } - return valueType; - } - - public AbstractType getKeyType() { - if (keyType == null) { - keyType = resolveGenericParam(0); - } - return keyType; - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - AbstractType valueType = getValueType(); - if (valueType instanceof ScalarType) { - method.addStatement( - "to.$L( from.$L() )", - protoMethodName("putAll", field), - javaMethodName("get", field)); - } else { - TypeName typeName = - ParameterizedTypeName.get( - Map.Entry.class, - getKeyType().getJavaType(), - getValueType().getJavaType()); - method.beginControlFlow( - "for ($T pair : from.$L().entrySet())", typeName, javaMethodName("get", field)); - method.addStatement( - "to.$L( pair.getKey(), toProto( pair.getValue() ) )", - protoMethodName("put", field)); - method.endControlFlow(); - } - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - AbstractType valueType = getValueType(); - if (valueType instanceof ScalarType) { - method.addStatement( - "to.$L( from.$L() )", - javaMethodName("set", field), - protoMethodName("get", field) + "Map"); - } else { - Type keyType = getKeyType().getJavaType(); - Type valueTypeJava = getValueType().getJavaType(); - TypeName valueTypePb = getValueType().getJavaProtoType(); - - ParameterizedTypeName entryType = - ParameterizedTypeName.get( - ClassName.get(Map.Entry.class), TypeName.get(keyType), valueTypePb); - ParameterizedTypeName mapType = - ParameterizedTypeName.get(Map.class, keyType, valueTypeJava); - ParameterizedTypeName hashMapType = - ParameterizedTypeName.get(HashMap.class, keyType, valueTypeJava); - String mapName = field + "Map"; - - method.addStatement("$T $L = new $T()", mapType, mapName, hashMapType); - method.beginControlFlow( - "for ($T pair : from.$L().entrySet())", - entryType, - protoMethodName("get", field) + "Map"); - method.addStatement("$L.put( pair.getKey(), fromProto( pair.getValue() ) )", mapName); - method.endControlFlow(); - method.addStatement("to.$L($L)", javaMethodName("set", field), mapName); - } - } - - @Override - public TypeName resolveJavaProtoType() { - return ParameterizedTypeName.get( - (ClassName) getRawJavaType(), - getKeyType().getJavaProtoType(), - getValueType().getJavaProtoType()); - } - - @Override - public String getProtoType() { - AbstractType keyType = getKeyType(); - AbstractType valueType = getValueType(); - if (!(keyType instanceof ScalarType)) { - throw new IllegalArgumentException( - "cannot map non-scalar map key: " + this.getJavaType()); - } - return String.format("map<%s, %s>", keyType.getProtoType(), valueType.getProtoType()); - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java deleted file mode 100644 index d57228773..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.List; -import java.util.Set; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -public class MessageType extends AbstractType { - private String protoFilePath; - - public MessageType(Type javaType, ClassName javaProtoType, String protoFilePath) { - super(javaType, javaProtoType); - this.protoFilePath = protoFilePath; - } - - @Override - public String getProtoType() { - List classes = ((ClassName) getJavaProtoType()).simpleNames(); - return String.join(".", classes.subList(1, classes.size())); - } - - public String getProtoFilePath() { - return protoFilePath; - } - - @Override - public TypeName getRawJavaType() { - return getJavaProtoType(); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - final String getter = javaMethodName("get", field); - method.beginControlFlow("if (from.$L() != null)", getter); - method.addStatement("to.$L( toProto( from.$L() ) )", protoMethodName("set", field), getter); - method.endControlFlow(); - } - - private boolean isEnum() { - Type clazz = getJavaType(); - return (clazz instanceof Class) && ((Class) clazz).isEnum(); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - if (!isEnum()) method.beginControlFlow("if (from.$L())", protoMethodName("has", field)); - - method.addStatement( - "to.$L( fromProto( from.$L() ) )", - javaMethodName("set", field), - protoMethodName("get", field)); - - if (!isEnum()) method.endControlFlow(); - } - - @Override - public void getDependencies(Set deps) { - deps.add(protoFilePath); - } - - @Override - public void generateAbstractMethods(Set specs) {} -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ScalarType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ScalarType.java deleted file mode 100644 index c6958bdd9..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/ScalarType.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.Set; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -public class ScalarType extends AbstractType { - private String protoType; - - public ScalarType(Type javaType, TypeName javaProtoType, String protoType) { - super(javaType, javaProtoType); - this.protoType = protoType; - } - - @Override - public String getProtoType() { - return protoType; - } - - @Override - public TypeName getRawJavaType() { - return getJavaProtoType(); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - method.addStatement( - "to.$L( from.$L() )", javaMethodName("set", field), protoMethodName("get", field)); - } - - private boolean isNullableType() { - final Type jt = getJavaType(); - return jt.equals(Boolean.class) - || jt.equals(Byte.class) - || jt.equals(Character.class) - || jt.equals(Short.class) - || jt.equals(Integer.class) - || jt.equals(Long.class) - || jt.equals(Double.class) - || jt.equals(Float.class) - || jt.equals(String.class); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - final boolean nullable = isNullableType(); - String getter = - (getJavaType().equals(boolean.class) || getJavaType().equals(Boolean.class)) - ? javaMethodName("is", field) - : javaMethodName("get", field); - - if (nullable) method.beginControlFlow("if (from.$L() != null)", getter); - - method.addStatement("to.$L( from.$L() )", protoMethodName("set", field), getter); - - if (nullable) method.endControlFlow(); - } - - @Override - public void getDependencies(Set deps) {} - - @Override - public void generateAbstractMethods(Set specs) {} -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/TypeMapper.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/TypeMapper.java deleted file mode 100644 index 2363ed365..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/TypeMapper.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.*; - -import com.google.protobuf.Any; -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.TypeName; - -public class TypeMapper { - static Map PROTO_LIST_TYPES = new HashMap<>(); - - static { - PROTO_LIST_TYPES.put(List.class, ArrayList.class); - PROTO_LIST_TYPES.put(Set.class, HashSet.class); - PROTO_LIST_TYPES.put(LinkedList.class, LinkedList.class); - } - - public static TypeMapper INSTANCE = new TypeMapper(); - - private Map types = new HashMap<>(); - - public void addScalarType(Type t, String protoType) { - types.put(t, new ScalarType(t, TypeName.get(t), protoType)); - } - - public void addMessageType(Class t, MessageType message) { - types.put(t, message); - } - - public TypeMapper() { - addScalarType(int.class, "int32"); - addScalarType(Integer.class, "int32"); - addScalarType(long.class, "int64"); - addScalarType(Long.class, "int64"); - addScalarType(String.class, "string"); - addScalarType(boolean.class, "bool"); - addScalarType(Boolean.class, "bool"); - - addMessageType( - Object.class, - new ExternMessageType( - Object.class, - ClassName.get("com.google.protobuf", "Value"), - "google.protobuf.Value", - "google/protobuf/struct.proto")); - - addMessageType( - Any.class, - new ExternMessageType( - Any.class, - ClassName.get(Any.class), - "google.protobuf.Any", - "google/protobuf/any.proto")); - } - - public AbstractType get(Type t) { - if (!types.containsKey(t)) { - if (t instanceof ParameterizedType) { - Type raw = ((ParameterizedType) t).getRawType(); - if (PROTO_LIST_TYPES.containsKey(raw)) { - types.put(t, new ListType(t)); - } else if (raw.equals(Map.class)) { - types.put(t, new MapType(t)); - } - } - } - if (!types.containsKey(t)) { - throw new IllegalArgumentException("Cannot map type: " + t); - } - return types.get(t); - } - - public MessageType get(String className) { - for (Map.Entry pair : types.entrySet()) { - AbstractType t = pair.getValue(); - if (t instanceof MessageType) { - if (((Class) t.getJavaType()).getSimpleName().equals(className)) - return (MessageType) t; - } - } - return null; - } - - public MessageType declare(Class type, MessageType parent) { - return declare(type, (ClassName) parent.getJavaProtoType(), parent.getProtoFilePath()); - } - - public MessageType declare(Class type, ClassName parentType, String protoFilePath) { - String simpleName = type.getSimpleName(); - MessageType t = new MessageType(type, parentType.nestedClass(simpleName), protoFilePath); - if (types.containsKey(type)) { - throw new IllegalArgumentException("duplicate type declaration: " + type); - } - types.put(type, t); - return t; - } - - public MessageType baseClass(ClassName className, String protoFilePath) { - return new MessageType(Object.class, className, protoFilePath); - } -} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/WrappedType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/WrappedType.java deleted file mode 100644 index c6d04e172..000000000 --- a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/WrappedType.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen.types; - -import java.lang.reflect.Type; -import java.util.Set; - -import javax.lang.model.element.Modifier; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -public class WrappedType extends AbstractType { - private AbstractType realType; - private MessageType wrappedType; - - public static WrappedType wrap(GenericType realType) { - Type valueType = realType.getValueType().getJavaType(); - if (!(valueType instanceof Class)) - throw new IllegalArgumentException("cannot wrap primitive type: " + valueType); - - String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix(); - MessageType wrappedType = TypeMapper.INSTANCE.get(className); - if (wrappedType == null) - throw new IllegalArgumentException("missing wrapper class: " + className); - return new WrappedType(realType, wrappedType); - } - - public WrappedType(AbstractType realType, MessageType wrappedType) { - super(realType.getJavaType(), wrappedType.getJavaProtoType()); - this.realType = realType; - this.wrappedType = wrappedType; - } - - @Override - public String getProtoType() { - return wrappedType.getProtoType(); - } - - @Override - public TypeName getRawJavaType() { - return realType.getRawJavaType(); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - wrappedType.mapToProto(field, method); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - wrappedType.mapFromProto(field, method); - } - - @Override - public void getDependencies(Set deps) { - this.realType.getDependencies(deps); - this.wrappedType.getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - MethodSpec fromProto = - MethodSpec.methodBuilder("fromProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.realType.getJavaType()) - .addParameter(this.wrappedType.getJavaProtoType(), "in") - .build(); - - MethodSpec toProto = - MethodSpec.methodBuilder("toProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.wrappedType.getJavaProtoType()) - .addParameter(this.realType.getJavaType(), "in") - .build(); - - specs.add(fromProto); - specs.add(toProto); - } -} diff --git a/annotations-processor/src/main/resources/templates/file.proto b/annotations-processor/src/main/resources/templates/file.proto deleted file mode 100644 index 292515407..000000000 --- a/annotations-processor/src/main/resources/templates/file.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; -package {{protoPackageName}}; - -{{#includes}} -import "{{this}}"; -{{/includes}} - -option java_package = "{{javaPackageName}}"; -option java_outer_classname = "{{javaClassName}}"; -option go_package = "{{goPackageName}}"; - -{{#message}} -{{>message}} -{{/message}} diff --git a/annotations-processor/src/main/resources/templates/message.proto b/annotations-processor/src/main/resources/templates/message.proto deleted file mode 100644 index 7de110162..000000000 --- a/annotations-processor/src/main/resources/templates/message.proto +++ /dev/null @@ -1,8 +0,0 @@ -{{protoClass}} {{name}} { -{{#nested}} - {{>message}} -{{/nested}} -{{#fields}} - {{protoTypeDeclaration}}; -{{/fields}} -} diff --git a/annotations-processor/src/test/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTest.java b/annotations-processor/src/test/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTest.java deleted file mode 100644 index 0fe7a243b..000000000 --- a/annotations-processor/src/test/java/com/netflix/conductor/annotationsprocessor/protogen/ProtoGenTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotationsprocessor.protogen; - -import java.io.File; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.List; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import com.google.common.collect.Lists; -import com.google.common.io.Files; -import com.google.common.io.Resources; - -import static org.junit.Assert.*; - -public class ProtoGenTest { - private static final Charset charset = StandardCharsets.UTF_8; - - @Rule public TemporaryFolder folder = new TemporaryFolder(); - - @Test - public void happyPath() throws Exception { - File rootDir = folder.getRoot(); - String protoPackage = "protoPackage"; - String javaPackage = "abc.protogen.example"; - String goPackage = "goPackage"; - String sourcePackage = "com.example"; - String mapperPackage = "mapperPackage"; - - File jarFile = new File("./build/libs/example.jar"); - assertTrue(jarFile.exists()); - - File mapperDir = new File(rootDir, "mapperDir"); - mapperDir.mkdirs(); - - File protosDir = new File(rootDir, "protosDir"); - protosDir.mkdirs(); - - File modelDir = new File(protosDir, "model"); - modelDir.mkdirs(); - - ProtoGen generator = new ProtoGen(protoPackage, javaPackage, goPackage); - generator.processPackage(jarFile, sourcePackage); - generator.writeMapper(mapperDir, mapperPackage); - generator.writeProtos(protosDir); - - List models = Lists.newArrayList(modelDir.listFiles()); - assertEquals(1, models.size()); - File exampleProtoFile = - models.stream().filter(f -> f.getName().equals("example.proto")).findFirst().get(); - assertTrue(exampleProtoFile.length() > 0); - assertEquals( - Resources.asCharSource(Resources.getResource("example.proto.txt"), charset).read(), - Files.asCharSource(exampleProtoFile, charset).read()); - } -} diff --git a/annotations-processor/src/test/resources/example.proto.txt b/annotations-processor/src/test/resources/example.proto.txt deleted file mode 100644 index ac1379a53..000000000 --- a/annotations-processor/src/test/resources/example.proto.txt +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; -package protoPackage; - - -option java_package = "abc.protogen.example"; -option java_outer_classname = "ExamplePb"; -option go_package = "goPackage"; - -message Example { - string name = 1; - int64 count = 2; -} diff --git a/annotations/README.md b/annotations/README.md deleted file mode 100644 index aa9ae9fc6..000000000 --- a/annotations/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Annotations - -- `protogen` Annotations - - Original Author: Vicent Martí - https://github.com/vmg - - Original Repo: https://github.com/vmg/protogen - - diff --git a/annotations/build.gradle b/annotations/build.gradle deleted file mode 100644 index c3187c1db..000000000 --- a/annotations/build.gradle +++ /dev/null @@ -1,5 +0,0 @@ - - -dependencies { - -} \ No newline at end of file diff --git a/annotations/dependencies.lock b/annotations/dependencies.lock deleted file mode 100644 index b0fd3190f..000000000 --- a/annotations/dependencies.lock +++ /dev/null @@ -1,93 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "runtimeClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoEnum.java b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoEnum.java deleted file mode 100644 index 1514a3ed8..000000000 --- a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoEnum.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations.protogen; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoEnum annotates an enum type that will be exposed via the GRPC API as a native Protocol - * Buffers enum. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ProtoEnum {} diff --git a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java deleted file mode 100644 index 25ab478c8..000000000 --- a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations.protogen; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoField annotates a field inside an struct with metadata on how to expose it on its - * corresponding Protocol Buffers struct. For a field to be exposed in a ProtoBuf struct, the - * containing struct must also be annotated with a {@link ProtoMessage} or {@link ProtoEnum} tag. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface ProtoField { - /** - * Mandatory. Sets the Protocol Buffer ID for this specific field. Once a field has been - * annotated with a given ID, the ID can never change to a different value or the resulting - * Protocol Buffer struct will not be backwards compatible. - * - * @return the numeric ID for the field - */ - int id(); -} diff --git a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java deleted file mode 100644 index d66e4aa43..000000000 --- a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations.protogen; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoMessage annotates a given Java class so it becomes exposed via the GRPC API as a native - * Protocol Buffers struct. The annotated class must be a POJO. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ProtoMessage { - /** - * Sets whether the generated mapping code will contain a helper to translate the POJO for this - * class into the equivalent ProtoBuf object. - * - * @return whether this class will generate a mapper to ProtoBuf objects - */ - boolean toProto() default true; - - /** - * Sets whether the generated mapping code will contain a helper to translate the ProtoBuf - * object for this class into the equivalent POJO. - * - * @return whether this class will generate a mapper from ProtoBuf objects - */ - boolean fromProto() default true; - - /** - * Sets whether this is a wrapper class that will be used to encapsulate complex nested type - * interfaces. Wrapper classes are not directly exposed by the ProtoBuf API and must be mapped - * manually. - * - * @return whether this is a wrapper class - */ - boolean wrapper() default false; -} diff --git a/assets/images/blunder_icon.png b/assets/images/blunder_icon.png new file mode 100644 index 000000000..a2d8795b5 Binary files /dev/null and b/assets/images/blunder_icon.png differ diff --git a/assets/images/success_icon.png b/assets/images/success_icon.png new file mode 100644 index 000000000..4f58aa5a3 Binary files /dev/null and b/assets/images/success_icon.png differ diff --git a/awss3-storage/README.md b/awss3-storage/README.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/awss3-storage/build.gradle b/awss3-storage/build.gradle deleted file mode 100644 index adda2bb42..000000000 --- a/awss3-storage/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "com.amazonaws:aws-java-sdk-s3:${revAwsSdk}" - implementation "org.apache.commons:commons-lang3" -} diff --git a/awss3-storage/dependencies.lock b/awss3-storage/dependencies.lock deleted file mode 100644 index 5e4642332..000000000 --- a/awss3-storage/dependencies.lock +++ /dev/null @@ -1,384 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "locked": "1.11.86" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "locked": "1.11.86" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Configuration.java b/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Configuration.java deleted file mode 100644 index a188c8504..000000000 --- a/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Configuration.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.s3.config; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.s3.storage.S3PayloadStorage; - -@Configuration -@EnableConfigurationProperties(S3Properties.class) -@ConditionalOnProperty(name = "conductor.external-payload-storage.type", havingValue = "s3") -public class S3Configuration { - - @Bean - public ExternalPayloadStorage s3ExternalPayloadStorage( - IDGenerator idGenerator, S3Properties properties) { - return new S3PayloadStorage(idGenerator, properties); - } -} diff --git a/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Properties.java b/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Properties.java deleted file mode 100644 index 94a515f72..000000000 --- a/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Properties.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.s3.config; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; - -@ConfigurationProperties("conductor.external-payload-storage.s3") -public class S3Properties { - - /** The s3 bucket name where the payloads will be stored */ - private String bucketName = "conductor_payloads"; - - /** The time (in seconds) for which the signed url will be valid */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration signedUrlExpirationDuration = Duration.ofSeconds(5); - - /** The AWS region of the s3 bucket */ - private String region = "us-east-1"; - - public String getBucketName() { - return bucketName; - } - - public void setBucketName(String bucketName) { - this.bucketName = bucketName; - } - - public Duration getSignedUrlExpirationDuration() { - return signedUrlExpirationDuration; - } - - public void setSignedUrlExpirationDuration(Duration signedUrlExpirationDuration) { - this.signedUrlExpirationDuration = signedUrlExpirationDuration; - } - - public String getRegion() { - return region; - } - - public void setRegion(String region) { - this.region = region; - } -} diff --git a/awss3-storage/src/main/java/com/netflix/conductor/s3/storage/S3PayloadStorage.java b/awss3-storage/src/main/java/com/netflix/conductor/s3/storage/S3PayloadStorage.java deleted file mode 100644 index 86e57e685..000000000 --- a/awss3-storage/src/main/java/com/netflix/conductor/s3/storage/S3PayloadStorage.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.s3.storage; - -import java.io.InputStream; -import java.net.URISyntaxException; -import java.util.Date; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.s3.config.S3Properties; - -import com.amazonaws.HttpMethod; -import com.amazonaws.SdkClientException; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.*; - -/** - * An implementation of {@link ExternalPayloadStorage} using AWS S3 for storing large JSON payload - * data. - * - *

NOTE: The S3 client assumes that access to S3 is configured on the instance. - * - * @see DefaultAWSCredentialsProviderChain - */ -public class S3PayloadStorage implements ExternalPayloadStorage { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3PayloadStorage.class); - private static final String CONTENT_TYPE = "application/json"; - - private final IDGenerator idGenerator; - private final AmazonS3 s3Client; - private final String bucketName; - private final long expirationSec; - - public S3PayloadStorage(IDGenerator idGenerator, S3Properties properties) { - this.idGenerator = idGenerator; - bucketName = properties.getBucketName(); - expirationSec = properties.getSignedUrlExpirationDuration().getSeconds(); - String region = properties.getRegion(); - s3Client = AmazonS3ClientBuilder.standard().withRegion(region).build(); - } - - /** - * @param operation the type of {@link Operation} to be performed - * @param payloadType the {@link PayloadType} that is being accessed - * @return a {@link ExternalStorageLocation} object which contains the pre-signed URL and the s3 - * object key for the json payload - */ - @Override - public ExternalStorageLocation getLocation( - Operation operation, PayloadType payloadType, String path) { - try { - ExternalStorageLocation externalStorageLocation = new ExternalStorageLocation(); - - Date expiration = new Date(); - long expTimeMillis = expiration.getTime() + 1000 * expirationSec; - expiration.setTime(expTimeMillis); - - HttpMethod httpMethod = HttpMethod.GET; - if (operation == Operation.WRITE) { - httpMethod = HttpMethod.PUT; - } - - String objectKey; - if (StringUtils.isNotBlank(path)) { - objectKey = path; - } else { - objectKey = getObjectKey(payloadType); - } - externalStorageLocation.setPath(objectKey); - - GeneratePresignedUrlRequest generatePresignedUrlRequest = - new GeneratePresignedUrlRequest(bucketName, objectKey) - .withMethod(httpMethod) - .withExpiration(expiration); - - externalStorageLocation.setUri( - s3Client.generatePresignedUrl(generatePresignedUrlRequest) - .toURI() - .toASCIIString()); - return externalStorageLocation; - } catch (SdkClientException e) { - String msg = - String.format( - "Error communicating with S3 - operation:%s, payloadType: %s, path: %s", - operation, payloadType, path); - LOGGER.error(msg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e); - } catch (URISyntaxException e) { - String msg = "Invalid URI Syntax"; - LOGGER.error(msg, e); - throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, msg, e); - } - } - - /** - * Uploads the payload to the given s3 object key. It is expected that the caller retrieves the - * object key using {@link #getLocation(Operation, PayloadType, String)} before making this - * call. - * - * @param path the s3 key of the object to be uploaded - * @param payload an {@link InputStream} containing the json payload which is to be uploaded - * @param payloadSize the size of the json payload in bytes - */ - @Override - public void upload(String path, InputStream payload, long payloadSize) { - try { - ObjectMetadata objectMetadata = new ObjectMetadata(); - objectMetadata.setContentType(CONTENT_TYPE); - objectMetadata.setContentLength(payloadSize); - PutObjectRequest request = - new PutObjectRequest(bucketName, path, payload, objectMetadata); - s3Client.putObject(request); - } catch (SdkClientException e) { - String msg = - String.format( - "Error uploading to S3 - path:%s, payloadSize: %d", path, payloadSize); - LOGGER.error(msg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e); - } - } - - /** - * Downloads the payload stored in the s3 object. - * - * @param path the S3 key of the object - * @return an input stream containing the contents of the object Caller is expected to close the - * input stream. - */ - @Override - public InputStream download(String path) { - try { - S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucketName, path)); - return s3Object.getObjectContent(); - } catch (SdkClientException e) { - String msg = String.format("Error downloading from S3 - path:%s", path); - LOGGER.error(msg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e); - } - } - - private String getObjectKey(PayloadType payloadType) { - StringBuilder stringBuilder = new StringBuilder(); - switch (payloadType) { - case WORKFLOW_INPUT: - stringBuilder.append("workflow/input/"); - break; - case WORKFLOW_OUTPUT: - stringBuilder.append("workflow/output/"); - break; - case TASK_INPUT: - stringBuilder.append("task/input/"); - break; - case TASK_OUTPUT: - stringBuilder.append("task/output/"); - break; - } - stringBuilder.append(idGenerator.generate()).append(".json"); - return stringBuilder.toString(); - } -} diff --git a/awss3-storage/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/awss3-storage/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index d8c1f34e5..000000000 --- a/awss3-storage/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "hints": [ - { - "name": "conductor.external-payload-storage.type", - "values": [ - { - "value": "s3", - "description": "Use AWS S3 as the external payload storage." - } - ] - } - ] -} diff --git a/awssqs-event-queue/README.md b/awssqs-event-queue/README.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/awssqs-event-queue/build.gradle b/awssqs-event-queue/build.gradle deleted file mode 100644 index a795acf2c..000000000 --- a/awssqs-event-queue/build.gradle +++ /dev/null @@ -1,16 +0,0 @@ -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "org.apache.commons:commons-lang3" - // SBMTODO: remove guava dep - implementation "com.google.guava:guava:${revGuava}" - - implementation "com.amazonaws:aws-java-sdk-sqs:${revAwsSdk}" - - implementation "io.reactivex:rxjava:${revRxJava}" - - testImplementation 'org.springframework.boot:spring-boot-starter' - testImplementation project(':conductor-common').sourceSets.test.output -} \ No newline at end of file diff --git a/awssqs-event-queue/dependencies.lock b/awssqs-event-queue/dependencies.lock deleted file mode 100644 index dc5736a9f..000000000 --- a/awssqs-event-queue/dependencies.lock +++ /dev/null @@ -1,408 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.amazonaws:aws-java-sdk-sqs": { - "locked": "1.11.86" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-sqs": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.amazonaws:aws-java-sdk-sqs": { - "locked": "1.11.86" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-sqs": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueConfiguration.java b/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueConfiguration.java deleted file mode 100644 index 2ee425e7c..000000000 --- a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueConfiguration.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.config; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.lang3.StringUtils; -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.EventQueueProvider; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.model.TaskModel.Status; -import com.netflix.conductor.sqs.eventqueue.SQSObservableQueue.Builder; - -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.services.sqs.AmazonSQSClient; -import rx.Scheduler; - -@Configuration -@EnableConfigurationProperties(SQSEventQueueProperties.class) -@ConditionalOnProperty(name = "conductor.event-queues.sqs.enabled", havingValue = "true") -public class SQSEventQueueConfiguration { - - @ConditionalOnMissingBean - @Bean - public AmazonSQSClient getSQSClient(AWSCredentialsProvider credentialsProvider) { - return new AmazonSQSClient(credentialsProvider); - } - - @Bean - public EventQueueProvider sqsEventQueueProvider( - AmazonSQSClient sqsClient, SQSEventQueueProperties properties, Scheduler scheduler) { - return new SQSEventQueueProvider(sqsClient, properties, scheduler); - } - - @ConditionalOnProperty( - name = "conductor.default-event-queue.type", - havingValue = "sqs", - matchIfMissing = true) - @Bean - public Map getQueues( - ConductorProperties conductorProperties, - SQSEventQueueProperties properties, - AmazonSQSClient sqsClient) { - String stack = ""; - if (conductorProperties.getStack() != null && conductorProperties.getStack().length() > 0) { - stack = conductorProperties.getStack() + "_"; - } - Status[] statuses = new Status[] {Status.COMPLETED, Status.FAILED}; - Map queues = new HashMap<>(); - for (Status status : statuses) { - String queuePrefix = - StringUtils.isBlank(properties.getListenerQueuePrefix()) - ? conductorProperties.getAppId() + "_sqs_notify_" + stack - : properties.getListenerQueuePrefix(); - - String queueName = queuePrefix + status.name(); - - Builder builder = new Builder().withClient(sqsClient).withQueueName(queueName); - - String auth = properties.getAuthorizedAccounts(); - String[] accounts = auth.split(","); - for (String accountToAuthorize : accounts) { - accountToAuthorize = accountToAuthorize.trim(); - if (accountToAuthorize.length() > 0) { - builder.addAccountToAuthorize(accountToAuthorize.trim()); - } - } - ObservableQueue queue = builder.build(); - queues.put(status, queue); - } - - return queues; - } -} diff --git a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProperties.java b/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProperties.java deleted file mode 100644 index 7ce5a3e5d..000000000 --- a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProperties.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.config; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; - -@ConfigurationProperties("conductor.event-queues.sqs") -public class SQSEventQueueProperties { - - /** The maximum number of messages to be fetched from the queue in a single request */ - private int batchSize = 1; - - /** The polling interval (in milliseconds) */ - private Duration pollTimeDuration = Duration.ofMillis(100); - - /** The visibility timeout (in seconds) for the message on the queue */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration visibilityTimeout = Duration.ofSeconds(60); - - /** The prefix to be used for the default listener queues */ - private String listenerQueuePrefix = ""; - - /** The AWS account Ids authorized to send messages to the queues */ - private String authorizedAccounts = ""; - - public int getBatchSize() { - return batchSize; - } - - public void setBatchSize(int batchSize) { - this.batchSize = batchSize; - } - - public Duration getPollTimeDuration() { - return pollTimeDuration; - } - - public void setPollTimeDuration(Duration pollTimeDuration) { - this.pollTimeDuration = pollTimeDuration; - } - - public Duration getVisibilityTimeout() { - return visibilityTimeout; - } - - public void setVisibilityTimeout(Duration visibilityTimeout) { - this.visibilityTimeout = visibilityTimeout; - } - - public String getListenerQueuePrefix() { - return listenerQueuePrefix; - } - - public void setListenerQueuePrefix(String listenerQueuePrefix) { - this.listenerQueuePrefix = listenerQueuePrefix; - } - - public String getAuthorizedAccounts() { - return authorizedAccounts; - } - - public void setAuthorizedAccounts(String authorizedAccounts) { - this.authorizedAccounts = authorizedAccounts; - } -} diff --git a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProvider.java b/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProvider.java deleted file mode 100644 index 5ad88dc69..000000000 --- a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/config/SQSEventQueueProvider.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.config; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.lang.NonNull; - -import com.netflix.conductor.core.events.EventQueueProvider; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.sqs.eventqueue.SQSObservableQueue; - -import com.amazonaws.services.sqs.AmazonSQSClient; -import rx.Scheduler; - -public class SQSEventQueueProvider implements EventQueueProvider { - - private static final Logger LOGGER = LoggerFactory.getLogger(SQSEventQueueProvider.class); - private final Map queues = new ConcurrentHashMap<>(); - private final AmazonSQSClient client; - private final int batchSize; - private final long pollTimeInMS; - private final int visibilityTimeoutInSeconds; - private final Scheduler scheduler; - - public SQSEventQueueProvider( - AmazonSQSClient client, SQSEventQueueProperties properties, Scheduler scheduler) { - this.client = client; - this.batchSize = properties.getBatchSize(); - this.pollTimeInMS = properties.getPollTimeDuration().toMillis(); - this.visibilityTimeoutInSeconds = (int) properties.getVisibilityTimeout().getSeconds(); - this.scheduler = scheduler; - } - - @Override - public String getQueueType() { - return "sqs"; - } - - @Override - @NonNull - public ObservableQueue getQueue(String queueURI) { - return queues.computeIfAbsent( - queueURI, - q -> - new SQSObservableQueue.Builder() - .withBatchSize(this.batchSize) - .withClient(client) - .withPollTimeInMS(this.pollTimeInMS) - .withQueueName(queueURI) - .withVisibilityTimeout(this.visibilityTimeoutInSeconds) - .withScheduler(scheduler) - .build()); - } -} diff --git a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueue.java b/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueue.java deleted file mode 100644 index a65cff1d9..000000000 --- a/awssqs-event-queue/src/main/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueue.java +++ /dev/null @@ -1,379 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.eventqueue; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.metrics.Monitors; - -import com.amazonaws.auth.policy.Action; -import com.amazonaws.auth.policy.Policy; -import com.amazonaws.auth.policy.Principal; -import com.amazonaws.auth.policy.Resource; -import com.amazonaws.auth.policy.Statement; -import com.amazonaws.auth.policy.Statement.Effect; -import com.amazonaws.auth.policy.actions.SQSActions; -import com.amazonaws.services.sqs.AmazonSQSClient; -import com.amazonaws.services.sqs.model.BatchResultErrorEntry; -import com.amazonaws.services.sqs.model.ChangeMessageVisibilityRequest; -import com.amazonaws.services.sqs.model.CreateQueueRequest; -import com.amazonaws.services.sqs.model.CreateQueueResult; -import com.amazonaws.services.sqs.model.DeleteMessageBatchRequest; -import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry; -import com.amazonaws.services.sqs.model.DeleteMessageBatchResult; -import com.amazonaws.services.sqs.model.GetQueueAttributesResult; -import com.amazonaws.services.sqs.model.ListQueuesRequest; -import com.amazonaws.services.sqs.model.ListQueuesResult; -import com.amazonaws.services.sqs.model.ReceiveMessageRequest; -import com.amazonaws.services.sqs.model.ReceiveMessageResult; -import com.amazonaws.services.sqs.model.SendMessageBatchRequest; -import com.amazonaws.services.sqs.model.SendMessageBatchRequestEntry; -import com.amazonaws.services.sqs.model.SendMessageBatchResult; -import com.amazonaws.services.sqs.model.SetQueueAttributesResult; -import rx.Observable; -import rx.Observable.OnSubscribe; -import rx.Scheduler; - -public class SQSObservableQueue implements ObservableQueue { - - private static final Logger LOGGER = LoggerFactory.getLogger(SQSObservableQueue.class); - private static final String QUEUE_TYPE = "sqs"; - - private final String queueName; - private final int visibilityTimeoutInSeconds; - private final int batchSize; - private final AmazonSQSClient client; - private final long pollTimeInMS; - private final String queueURL; - private final Scheduler scheduler; - private volatile boolean running; - - private SQSObservableQueue( - String queueName, - AmazonSQSClient client, - int visibilityTimeoutInSeconds, - int batchSize, - long pollTimeInMS, - List accountsToAuthorize, - Scheduler scheduler) { - this.queueName = queueName; - this.client = client; - this.visibilityTimeoutInSeconds = visibilityTimeoutInSeconds; - this.batchSize = batchSize; - this.pollTimeInMS = pollTimeInMS; - this.queueURL = getOrCreateQueue(); - this.scheduler = scheduler; - addPolicy(accountsToAuthorize); - } - - @Override - public Observable observe() { - OnSubscribe subscriber = getOnSubscribe(); - return Observable.create(subscriber); - } - - @Override - public List ack(List messages) { - return delete(messages); - } - - @Override - public void publish(List messages) { - publishMessages(messages); - } - - @Override - public long size() { - GetQueueAttributesResult attributes = - client.getQueueAttributes( - queueURL, Collections.singletonList("ApproximateNumberOfMessages")); - String sizeAsStr = attributes.getAttributes().get("ApproximateNumberOfMessages"); - try { - return Long.parseLong(sizeAsStr); - } catch (Exception e) { - return -1; - } - } - - @Override - public void setUnackTimeout(Message message, long unackTimeout) { - int unackTimeoutInSeconds = (int) (unackTimeout / 1000); - ChangeMessageVisibilityRequest request = - new ChangeMessageVisibilityRequest( - queueURL, message.getReceipt(), unackTimeoutInSeconds); - client.changeMessageVisibility(request); - } - - @Override - public String getType() { - return QUEUE_TYPE; - } - - @Override - public String getName() { - return queueName; - } - - @Override - public String getURI() { - return queueURL; - } - - public long getPollTimeInMS() { - return pollTimeInMS; - } - - public int getBatchSize() { - return batchSize; - } - - public int getVisibilityTimeoutInSeconds() { - return visibilityTimeoutInSeconds; - } - - @Override - public void start() { - LOGGER.info("Started listening to {}:{}", getClass().getSimpleName(), queueName); - running = true; - } - - @Override - public void stop() { - LOGGER.info("Stopped listening to {}:{}", getClass().getSimpleName(), queueName); - running = false; - } - - @Override - public boolean isRunning() { - return running; - } - - public static class Builder { - - private String queueName; - private int visibilityTimeout = 30; // seconds - private int batchSize = 5; - private long pollTimeInMS = 100; - private AmazonSQSClient client; - private List accountsToAuthorize = new LinkedList<>(); - private Scheduler scheduler; - - public Builder withQueueName(String queueName) { - this.queueName = queueName; - return this; - } - - /** - * @param visibilityTimeout Visibility timeout for the message in SECONDS - * @return builder instance - */ - public Builder withVisibilityTimeout(int visibilityTimeout) { - this.visibilityTimeout = visibilityTimeout; - return this; - } - - public Builder withBatchSize(int batchSize) { - this.batchSize = batchSize; - return this; - } - - public Builder withClient(AmazonSQSClient client) { - this.client = client; - return this; - } - - public Builder withPollTimeInMS(long pollTimeInMS) { - this.pollTimeInMS = pollTimeInMS; - return this; - } - - public Builder withAccountsToAuthorize(List accountsToAuthorize) { - this.accountsToAuthorize = accountsToAuthorize; - return this; - } - - public Builder addAccountToAuthorize(String accountToAuthorize) { - this.accountsToAuthorize.add(accountToAuthorize); - return this; - } - - public Builder withScheduler(Scheduler scheduler) { - this.scheduler = scheduler; - return this; - } - - public SQSObservableQueue build() { - return new SQSObservableQueue( - queueName, - client, - visibilityTimeout, - batchSize, - pollTimeInMS, - accountsToAuthorize, - scheduler); - } - } - - // Private methods - String getOrCreateQueue() { - List queueUrls = listQueues(queueName); - if (queueUrls == null || queueUrls.isEmpty()) { - CreateQueueRequest createQueueRequest = - new CreateQueueRequest().withQueueName(queueName); - CreateQueueResult result = client.createQueue(createQueueRequest); - return result.getQueueUrl(); - } else { - return queueUrls.get(0); - } - } - - private String getQueueARN() { - GetQueueAttributesResult response = - client.getQueueAttributes(queueURL, Collections.singletonList("QueueArn")); - return response.getAttributes().get("QueueArn"); - } - - private void addPolicy(List accountsToAuthorize) { - if (accountsToAuthorize == null || accountsToAuthorize.isEmpty()) { - LOGGER.info("No additional security policies attached for the queue " + queueName); - return; - } - LOGGER.info("Authorizing " + accountsToAuthorize + " to the queue " + queueName); - Map attributes = new HashMap<>(); - attributes.put("Policy", getPolicy(accountsToAuthorize)); - SetQueueAttributesResult result = client.setQueueAttributes(queueURL, attributes); - LOGGER.info("policy attachment result: " + result); - LOGGER.info( - "policy attachment result: status=" - + result.getSdkHttpMetadata().getHttpStatusCode()); - } - - private String getPolicy(List accountIds) { - Policy policy = new Policy("AuthorizedWorkerAccessPolicy"); - Statement stmt = new Statement(Effect.Allow); - Action action = SQSActions.SendMessage; - stmt.getActions().add(action); - stmt.setResources(new LinkedList<>()); - for (String accountId : accountIds) { - Principal principal = new Principal(accountId); - stmt.getPrincipals().add(principal); - } - stmt.getResources().add(new Resource(getQueueARN())); - policy.getStatements().add(stmt); - return policy.toJson(); - } - - private List listQueues(String queueName) { - ListQueuesRequest listQueuesRequest = - new ListQueuesRequest().withQueueNamePrefix(queueName); - ListQueuesResult resultList = client.listQueues(listQueuesRequest); - return resultList.getQueueUrls().stream() - .filter(queueUrl -> queueUrl.contains(queueName)) - .collect(Collectors.toList()); - } - - private void publishMessages(List messages) { - LOGGER.debug("Sending {} messages to the SQS queue: {}", messages.size(), queueName); - SendMessageBatchRequest batch = new SendMessageBatchRequest(queueURL); - messages.forEach( - msg -> { - SendMessageBatchRequestEntry sendr = - new SendMessageBatchRequestEntry(msg.getId(), msg.getPayload()); - batch.getEntries().add(sendr); - }); - LOGGER.debug("sending {} messages in batch", batch.getEntries().size()); - SendMessageBatchResult result = client.sendMessageBatch(batch); - LOGGER.debug("send result: {} for SQS queue: {}", result.getFailed().toString(), queueName); - } - - List receiveMessages() { - try { - ReceiveMessageRequest receiveMessageRequest = - new ReceiveMessageRequest() - .withQueueUrl(queueURL) - .withVisibilityTimeout(visibilityTimeoutInSeconds) - .withMaxNumberOfMessages(batchSize); - - ReceiveMessageResult result = client.receiveMessage(receiveMessageRequest); - - List messages = - result.getMessages().stream() - .map( - msg -> - new Message( - msg.getMessageId(), - msg.getBody(), - msg.getReceiptHandle())) - .collect(Collectors.toList()); - Monitors.recordEventQueueMessagesProcessed(QUEUE_TYPE, this.queueName, messages.size()); - return messages; - } catch (Exception e) { - LOGGER.error("Exception while getting messages from SQS", e); - Monitors.recordObservableQMessageReceivedErrors(QUEUE_TYPE); - } - return new ArrayList<>(); - } - - OnSubscribe getOnSubscribe() { - return subscriber -> { - Observable interval = Observable.interval(pollTimeInMS, TimeUnit.MILLISECONDS); - interval.flatMap( - (Long x) -> { - if (!isRunning()) { - LOGGER.debug( - "Component stopped, skip listening for messages from SQS"); - return Observable.from(Collections.emptyList()); - } - List messages = receiveMessages(); - return Observable.from(messages); - }) - .subscribe(subscriber::onNext, subscriber::onError); - }; - } - - private List delete(List messages) { - if (messages == null || messages.isEmpty()) { - return null; - } - - DeleteMessageBatchRequest batch = new DeleteMessageBatchRequest().withQueueUrl(queueURL); - List entries = batch.getEntries(); - - messages.forEach( - m -> - entries.add( - new DeleteMessageBatchRequestEntry() - .withId(m.getId()) - .withReceiptHandle(m.getReceipt()))); - - DeleteMessageBatchResult result = client.deleteMessageBatch(batch); - List failures = - result.getFailed().stream() - .map(BatchResultErrorEntry::getId) - .collect(Collectors.toList()); - LOGGER.debug("Failed to delete messages from queue: {}: {}", queueName, failures); - return failures; - } -} diff --git a/awssqs-event-queue/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/awssqs-event-queue/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index 2cc76ff65..000000000 --- a/awssqs-event-queue/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.event-queues.sqs.enabled", - "type": "java.lang.Boolean", - "description": "Enable the use of AWS SQS implementation to provide queues for consuming events.", - "sourceType": "com.netflix.conductor.sqs.config.SQSEventQueueConfiguration" - }, - { - "name": "conductor.default-event-queue.type", - "type": "java.lang.String", - "description": "The default event queue type to listen on for the WAIT task.", - "sourceType": "com.netflix.conductor.sqs.config.SQSEventQueueConfiguration" - } - ], - "hints": [ - { - "name": "conductor.default-event-queue.type", - "values": [ - { - "value": "sqs", - "description": "Use AWS SQS as the event queue to listen on for the WAIT task." - } - ] - } - ] -} \ No newline at end of file diff --git a/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/DefaultEventQueueProcessorTest.java b/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/DefaultEventQueueProcessorTest.java deleted file mode 100644 index ab7be3118..000000000 --- a/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/DefaultEventQueueProcessorTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.eventqueue; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.stubbing.Answer; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.core.events.queue.DefaultEventQueueProcessor; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.TaskModel.Status; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@SuppressWarnings("unchecked") -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class DefaultEventQueueProcessorTest { - - private static SQSObservableQueue queue; - private static WorkflowExecutor workflowExecutor; - private DefaultEventQueueProcessor defaultEventQueueProcessor; - - @Autowired private ObjectMapper objectMapper; - - private static final List messages = new LinkedList<>(); - private static final List updatedTasks = new LinkedList<>(); - private static final List mappedTasks = new LinkedList<>(); - - @Before - public void init() { - Map queues = new HashMap<>(); - queues.put(Status.COMPLETED, queue); - defaultEventQueueProcessor = - new DefaultEventQueueProcessor(queues, workflowExecutor, objectMapper); - } - - @BeforeClass - public static void setup() { - - queue = mock(SQSObservableQueue.class); - when(queue.getOrCreateQueue()).thenReturn("junit_queue_url"); - when(queue.isRunning()).thenReturn(true); - Answer answer = - (Answer>) - invocation -> { - List copy = new LinkedList<>(messages); - messages.clear(); - return copy; - }; - - when(queue.receiveMessages()).thenAnswer(answer); - when(queue.getOnSubscribe()).thenCallRealMethod(); - when(queue.observe()).thenCallRealMethod(); - when(queue.getName()).thenReturn(Status.COMPLETED.name()); - - TaskModel task0 = new TaskModel(); - task0.setStatus(Status.IN_PROGRESS); - task0.setTaskId("t0"); - task0.setReferenceTaskName("t0"); - task0.setTaskType(TASK_TYPE_WAIT); - WorkflowModel workflow0 = new WorkflowModel(); - workflow0.setWorkflowId("v_0"); - workflow0.getTasks().add(task0); - - TaskModel task2 = new TaskModel(); - task2.setStatus(Status.IN_PROGRESS); - task2.setTaskId("t2"); - task2.setTaskType(TASK_TYPE_WAIT); - WorkflowModel workflow2 = new WorkflowModel(); - workflow2.setWorkflowId("v_2"); - workflow2.getTasks().add(task2); - - doAnswer( - (Answer) - invocation -> { - List msgs = invocation.getArgument(0, List.class); - messages.addAll(msgs); - return null; - }) - .when(queue) - .publish(any()); - - workflowExecutor = mock(WorkflowExecutor.class); - assertNotNull(workflowExecutor); - - doReturn(workflow0).when(workflowExecutor).getWorkflow(eq("v_0"), anyBoolean()); - - doReturn(workflow2).when(workflowExecutor).getWorkflow(eq("v_2"), anyBoolean()); - - doAnswer( - (Answer) - invocation -> { - updatedTasks.add(invocation.getArgument(0, TaskResult.class)); - return null; - }) - .when(workflowExecutor) - .updateTask(any(TaskResult.class)); - } - - @Test - public void test() throws Exception { - defaultEventQueueProcessor.updateByTaskRefName( - "v_0", "t0", new HashMap<>(), Status.COMPLETED); - Uninterruptibles.sleepUninterruptibly(1_000, TimeUnit.MILLISECONDS); - - assertTrue(updatedTasks.stream().anyMatch(task -> task.getTaskId().equals("t0"))); - } - - @Test(expected = IllegalArgumentException.class) - public void testFailure() throws Exception { - defaultEventQueueProcessor.updateByTaskRefName( - "v_1", "t1", new HashMap<>(), Status.CANCELED); - Uninterruptibles.sleepUninterruptibly(1_000, TimeUnit.MILLISECONDS); - } - - @Test - public void testWithTaskId() throws Exception { - defaultEventQueueProcessor.updateByTaskId("v_2", "t2", new HashMap<>(), Status.COMPLETED); - Uninterruptibles.sleepUninterruptibly(1_000, TimeUnit.MILLISECONDS); - assertTrue(updatedTasks.stream().anyMatch(task -> task.getTaskId().equals("t2"))); - } -} diff --git a/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueueTest.java b/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueueTest.java deleted file mode 100644 index be0e92fdc..000000000 --- a/awssqs-event-queue/src/test/java/com/netflix/conductor/sqs/eventqueue/SQSObservableQueueTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sqs.eventqueue; - -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.junit.Test; -import org.mockito.stubbing.Answer; - -import com.netflix.conductor.core.events.queue.Message; - -import com.amazonaws.services.sqs.AmazonSQSClient; -import com.amazonaws.services.sqs.model.ListQueuesRequest; -import com.amazonaws.services.sqs.model.ListQueuesResult; -import com.amazonaws.services.sqs.model.ReceiveMessageRequest; -import com.amazonaws.services.sqs.model.ReceiveMessageResult; -import com.google.common.util.concurrent.Uninterruptibles; -import rx.Observable; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SQSObservableQueueTest { - - @Test - public void test() { - - List messages = new LinkedList<>(); - Observable.range(0, 10) - .forEach((Integer x) -> messages.add(new Message("" + x, "payload: " + x, null))); - assertEquals(10, messages.size()); - - SQSObservableQueue queue = mock(SQSObservableQueue.class); - when(queue.getOrCreateQueue()).thenReturn("junit_queue_url"); - Answer answer = (Answer>) invocation -> Collections.emptyList(); - when(queue.receiveMessages()).thenReturn(messages).thenAnswer(answer); - when(queue.isRunning()).thenReturn(true); - when(queue.getOnSubscribe()).thenCallRealMethod(); - when(queue.observe()).thenCallRealMethod(); - - List found = new LinkedList<>(); - Observable observable = queue.observe(); - assertNotNull(observable); - observable.subscribe(found::add); - - Uninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS); - - assertEquals(messages.size(), found.size()); - assertEquals(messages, found); - } - - @Test - public void testException() { - com.amazonaws.services.sqs.model.Message message = - new com.amazonaws.services.sqs.model.Message() - .withMessageId("test") - .withBody("") - .withReceiptHandle("receiptHandle"); - Answer answer = (Answer) invocation -> new ReceiveMessageResult(); - - AmazonSQSClient client = mock(AmazonSQSClient.class); - when(client.listQueues(any(ListQueuesRequest.class))) - .thenReturn(new ListQueuesResult().withQueueUrls("junit_queue_url")); - when(client.receiveMessage(any(ReceiveMessageRequest.class))) - .thenThrow(new RuntimeException("Error in SQS communication")) - .thenReturn(new ReceiveMessageResult().withMessages(message)) - .thenAnswer(answer); - - SQSObservableQueue queue = - new SQSObservableQueue.Builder().withQueueName("junit").withClient(client).build(); - queue.start(); - - List found = new LinkedList<>(); - Observable observable = queue.observe(); - assertNotNull(observable); - observable.subscribe(found::add); - - Uninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS); - assertEquals(1, found.size()); - } -} diff --git a/build.gradle b/build.gradle deleted file mode 100644 index 46bf6d1d5..000000000 --- a/build.gradle +++ /dev/null @@ -1,197 +0,0 @@ -import org.springframework.boot.gradle.plugin.SpringBootPlugin - -buildscript { - repositories { - mavenCentral() - maven { - url "https://plugins.gradle.org/m2/" - } - } - dependencies { - classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' - classpath 'org.springframework.boot:spring-boot-gradle-plugin:2.6.7' - classpath 'com.diffplug.spotless:spotless-plugin-gradle:6.+' - } -} - -plugins { - id 'io.spring.dependency-management' version '1.0.11.RELEASE' - id 'java' - id 'application' - id 'jacoco' - id 'nebula.netflixoss' version '10.6.0' - id 'org.sonarqube' version '3.3' -} - -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -// Establish version and status -ext.githubProjectName = rootProject.name // Change if github project name is not the same as the root project's name - -subprojects { - tasks.withType(Javadoc).all { enabled = false } -} - -apply from: "$rootDir/dependencies.gradle" -apply from: "$rootDir/springboot-bom-overrides.gradle" - -allprojects { - apply plugin: 'nebula.netflixoss' - apply plugin: 'io.spring.dependency-management' - apply plugin: 'java-library' - apply plugin: 'project-report' - - sourceCompatibility = JavaVersion.VERSION_11 - targetCompatibility = JavaVersion.VERSION_11 - - group = 'com.netflix.conductor' - - configurations.all { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - exclude group: 'org.apache.logging.log4j', module: 'log4j-to-slf4j' - exclude group: 'org.slf4j', module: 'slf4j-log4j12' - } - - repositories { - mavenCentral() - - // oss-candidate for -rc.* verions: - maven { - url "https://artifactory-oss.prod.netflix.net/artifactory/maven-oss-candidates" - } - - /** - * This repository locates artifacts that don't exist in maven central but we had to backup from jcenter - * The exclusiveContent - */ - exclusiveContent { - forRepository { - maven { - url "https://artifactory-oss.prod.netflix.net/artifactory/required-jcenter-modules-backup" - } - } - filter { - includeGroupByRegex "com\\.github\\.vmg.*" - } - } - } - - dependencyManagement { - imports { - // dependency versions for the BOM can be found at https://docs.spring.io/spring-boot/docs/2.6.7/reference/htmlsingle/#appendix.dependency-versions - mavenBom(SpringBootPlugin.BOM_COORDINATES) - } - } - - dependencies { - implementation('org.apache.logging.log4j:log4j-core') { - version { - strictly '2.17.1' - } - } - implementation('org.apache.logging.log4j:log4j-api') { - version { - strictly '2.17.1' - } - } - implementation('org.apache.logging.log4j:log4j-slf4j-impl') { - version { - strictly '2.17.1' - } - } - implementation('org.apache.logging.log4j:log4j-jul') { - version { - strictly '2.17.1' - } - } - implementation('org.apache.logging.log4j:log4j-web') { - version { - strictly '2.17.1' - } - } - annotationProcessor 'org.springframework.boot:spring-boot-configuration-processor' - - testImplementation('org.springframework.boot:spring-boot-starter-test') - testImplementation('org.springframework.boot:spring-boot-starter-log4j2') - testImplementation 'junit:junit' - } - - // processes additional configuration metadata json file as described here - // https://docs.spring.io/spring-boot/docs/2.3.1.RELEASE/reference/html/appendix-configuration-metadata.html#configuration-metadata-additional-metadata - compileJava.inputs.files(processResources) - - test { - useJUnitPlatform() - testLogging { - events = ["SKIPPED", "FAILED"] - exceptionFormat = "full" - displayGranularity = 1 - showStandardStreams = false - } - } -} - -// all client and their related modules are published with Java 8 compatibility -["annotations", "common", "client", "client-spring", "grpc", "grpc-client"].each { - project(":conductor-$it") { - compileJava { - options.release = 8 - } - } -} - -jacocoTestReport { - reports { - html.required = true - xml.required = true - csv.required = false - } -} - -task server { - dependsOn ':conductor-server:bootRun' -} - -sonarqube { - properties { - property "sonar.projectKey", "com.netflix.conductor:conductor" - property "sonar.organization", "netflix" - property "sonar.host.url", "https://sonarcloud.io" - } -} - -configure(allprojects - project(':conductor-grpc')) { - apply plugin: 'com.diffplug.spotless' - - spotless { - java { - googleJavaFormat().aosp() - removeUnusedImports() - importOrder('java', 'javax', 'org', 'com.netflix', '', '\\#com.netflix', '\\#') - licenseHeaderFile("$rootDir/licenseheader.txt") - } - } -} - -['cassandra-persistence', 'core', 'redis-concurrency-limit', 'test-harness', 'client'].each { - configure(project(":conductor-$it")) { - spotless { - groovy { - importOrder('java', 'javax', 'org', 'com.netflix', '', '\\#com.netflix', '\\#') - licenseHeaderFile("$rootDir/licenseheader.txt") - } - } - } -} diff --git a/cassandra-persistence/build.gradle b/cassandra-persistence/build.gradle deleted file mode 100644 index 6fee6f8ff..000000000 --- a/cassandra-persistence/build.gradle +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -apply plugin: 'groovy' - -dependencies { - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation project(':conductor-common') - implementation project(':conductor-core') - implementation "com.datastax.cassandra:cassandra-driver-core:${revCassandra}" - implementation "org.apache.commons:commons-lang3" - - testImplementation project(':conductor-core').sourceSets.test.output - testImplementation project(':conductor-common').sourceSets.test.output - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" - testImplementation "org.testcontainers:spock:${revTestContainer}" - testImplementation "org.testcontainers:cassandra:${revTestContainer}" - testImplementation "com.google.protobuf:protobuf-java:${revProtoBuf}" -} diff --git a/cassandra-persistence/dependencies.lock b/cassandra-persistence/dependencies.lock deleted file mode 100644 index 984da1f3f..000000000 --- a/cassandra-persistence/dependencies.lock +++ /dev/null @@ -1,417 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.datastax.cassandra:cassandra-driver-core": { - "locked": "3.10.2" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.datastax.cassandra:cassandra-driver-core": { - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.datastax.cassandra:cassandra-driver-core": { - "locked": "3.10.2" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.testcontainers:cassandra": { - "locked": "1.15.3" - }, - "org.testcontainers:spock": { - "locked": "1.15.3" - } - }, - "testRuntimeClasspath": { - "com.datastax.cassandra:cassandra-driver-core": { - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.testcontainers:cassandra": { - "locked": "1.15.3" - }, - "org.testcontainers:spock": { - "locked": "1.15.3" - } - } -} \ No newline at end of file diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraConfiguration.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraConfiguration.java deleted file mode 100644 index 2d2725f3e..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraConfiguration.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.config; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.cassandra.dao.CassandraEventHandlerDAO; -import com.netflix.conductor.cassandra.dao.CassandraExecutionDAO; -import com.netflix.conductor.cassandra.dao.CassandraMetadataDAO; -import com.netflix.conductor.cassandra.dao.CassandraPollDataDAO; -import com.netflix.conductor.cassandra.util.Statements; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.MetadataDAO; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Metadata; -import com.datastax.driver.core.Session; -import com.fasterxml.jackson.databind.ObjectMapper; - -@Configuration(proxyBeanMethods = false) -@EnableConfigurationProperties(CassandraProperties.class) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "cassandra") -public class CassandraConfiguration { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraConfiguration.class); - - @Bean - public Cluster cluster(CassandraProperties properties) { - String host = properties.getHostAddress(); - int port = properties.getPort(); - - LOGGER.info("Connecting to cassandra cluster with host:{}, port:{}", host, port); - - Cluster cluster = Cluster.builder().addContactPoint(host).withPort(port).build(); - - Metadata metadata = cluster.getMetadata(); - LOGGER.info("Connected to cluster: {}", metadata.getClusterName()); - metadata.getAllHosts() - .forEach( - h -> - LOGGER.info( - "Datacenter:{}, host:{}, rack: {}", - h.getDatacenter(), - h.getEndPoint().resolve().getHostName(), - h.getRack())); - return cluster; - } - - @Bean - public Session session(Cluster cluster) { - LOGGER.info("Initializing cassandra session"); - return cluster.connect(); - } - - @Bean - public MetadataDAO cassandraMetadataDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - return new CassandraMetadataDAO(session, objectMapper, properties, statements); - } - - @Bean - public ExecutionDAO cassandraExecutionDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - return new CassandraExecutionDAO(session, objectMapper, properties, statements); - } - - @Bean - public EventHandlerDAO cassandraEventHandlerDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - return new CassandraEventHandlerDAO(session, objectMapper, properties, statements); - } - - @Bean - public CassandraPollDataDAO cassandraPollDataDAO() { - return new CassandraPollDataDAO(); - } - - @Bean - public Statements statements(CassandraProperties cassandraProperties) { - return new Statements(cassandraProperties.getKeyspace()); - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java deleted file mode 100644 index 28d3eee97..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.config; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; - -import com.datastax.driver.core.ConsistencyLevel; - -@ConfigurationProperties("conductor.cassandra") -public class CassandraProperties { - - /** The address for the cassandra database host */ - private String hostAddress = "127.0.0.1"; - - /** The port to be used to connect to the cassandra database instance */ - private int port = 9142; - - /** The name of the cassandra cluster */ - private String cluster = ""; - - /** The keyspace to be used in the cassandra datastore */ - private String keyspace = "conductor"; - - /** - * The number of tasks to be stored in a single partition which will be used for sharding - * workflows in the datastore - */ - private int shardSize = 100; - - /** The replication strategy with which to configure the keyspace */ - private String replicationStrategy = "SimpleStrategy"; - - /** The key to be used while configuring the replication factor */ - private String replicationFactorKey = "replication_factor"; - - /** The replication factor value with which the keyspace is configured */ - private int replicationFactorValue = 3; - - /** The consistency level to be used for read operations */ - private ConsistencyLevel readConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM; - - /** The consistency level to be used for write operations */ - private ConsistencyLevel writeConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM; - - /** The time in seconds after which the in-memory task definitions cache will be refreshed */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration taskDefCacheRefreshInterval = Duration.ofSeconds(60); - - /** The time in seconds after which the in-memory event handler cache will be refreshed */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration eventHandlerCacheRefreshInterval = Duration.ofSeconds(60); - - /** The time to live in seconds for which the event execution will be persisted */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration eventExecutionPersistenceTtl = Duration.ZERO; - - public String getHostAddress() { - return hostAddress; - } - - public void setHostAddress(String hostAddress) { - this.hostAddress = hostAddress; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public String getCluster() { - return cluster; - } - - public void setCluster(String cluster) { - this.cluster = cluster; - } - - public String getKeyspace() { - return keyspace; - } - - public void setKeyspace(String keyspace) { - this.keyspace = keyspace; - } - - public int getShardSize() { - return shardSize; - } - - public void setShardSize(int shardSize) { - this.shardSize = shardSize; - } - - public String getReplicationStrategy() { - return replicationStrategy; - } - - public void setReplicationStrategy(String replicationStrategy) { - this.replicationStrategy = replicationStrategy; - } - - public String getReplicationFactorKey() { - return replicationFactorKey; - } - - public void setReplicationFactorKey(String replicationFactorKey) { - this.replicationFactorKey = replicationFactorKey; - } - - public int getReplicationFactorValue() { - return replicationFactorValue; - } - - public void setReplicationFactorValue(int replicationFactorValue) { - this.replicationFactorValue = replicationFactorValue; - } - - public ConsistencyLevel getReadConsistencyLevel() { - return readConsistencyLevel; - } - - public void setReadConsistencyLevel(ConsistencyLevel readConsistencyLevel) { - this.readConsistencyLevel = readConsistencyLevel; - } - - public ConsistencyLevel getWriteConsistencyLevel() { - return writeConsistencyLevel; - } - - public void setWriteConsistencyLevel(ConsistencyLevel writeConsistencyLevel) { - this.writeConsistencyLevel = writeConsistencyLevel; - } - - public Duration getTaskDefCacheRefreshInterval() { - return taskDefCacheRefreshInterval; - } - - public void setTaskDefCacheRefreshInterval(Duration taskDefCacheRefreshInterval) { - this.taskDefCacheRefreshInterval = taskDefCacheRefreshInterval; - } - - public Duration getEventHandlerCacheRefreshInterval() { - return eventHandlerCacheRefreshInterval; - } - - public void setEventHandlerCacheRefreshInterval(Duration eventHandlerCacheRefreshInterval) { - this.eventHandlerCacheRefreshInterval = eventHandlerCacheRefreshInterval; - } - - public Duration getEventExecutionPersistenceTtl() { - return eventExecutionPersistenceTtl; - } - - public void setEventExecutionPersistenceTtl(Duration eventExecutionPersistenceTtl) { - this.eventExecutionPersistenceTtl = eventExecutionPersistenceTtl; - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java deleted file mode 100644 index c576be8e9..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.cassandra.config.CassandraProperties; -import com.netflix.conductor.metrics.Monitors; - -import com.datastax.driver.core.DataType; -import com.datastax.driver.core.Session; -import com.datastax.driver.core.schemabuilder.SchemaBuilder; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import static com.netflix.conductor.cassandra.util.Constants.DAO_NAME; -import static com.netflix.conductor.cassandra.util.Constants.ENTITY_KEY; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_EXECUTION_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.MESSAGE_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.PAYLOAD_KEY; -import static com.netflix.conductor.cassandra.util.Constants.SHARD_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_EXECUTIONS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_HANDLERS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEFS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEF_LIMIT; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_LOOKUP; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOWS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS_INDEX; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEF_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TOTAL_PARTITIONS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TOTAL_TASKS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_VALUE; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_VERSION_KEY; - -/** - * Creates the keyspace and tables. - * - *

CREATE KEYSPACE IF NOT EXISTS conductor WITH replication = { 'class' : - * 'NetworkTopologyStrategy', 'us-east': '3'}; - * - *

CREATE TABLE IF NOT EXISTS conductor.workflows ( workflow_id uuid, shard_id int, task_id text, - * entity text, payload text, total_tasks int STATIC, total_partitions int STATIC, PRIMARY - * KEY((workflow_id, shard_id), entity, task_id) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.task_lookup( task_id uuid, workflow_id uuid, PRIMARY KEY - * (task_id) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.task_def_limit( task_def_name text, task_id uuid, - * workflow_id uuid, PRIMARY KEY ((task_def_name), task_id_key) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.workflow_definitions( workflow_def_name text, version - * int, workflow_definition text, PRIMARY KEY ((workflow_def_name), version) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.workflow_defs_index( workflow_def_version_index text, - * workflow_def_name_version text, workflow_def_index_value text,PRIMARY KEY - * ((workflow_def_version_index), workflow_def_name_version) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.task_definitions( task_defs text, task_def_name text, - * task_definition text, PRIMARY KEY ((task_defs), task_def_name) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.event_handlers( handlers text, event_handler_name text, - * event_handler text, PRIMARY KEY ((handlers), event_handler_name) ); - * - *

CREATE TABLE IF NOT EXISTS conductor.event_executions( message_id text, event_handler_name - * text, event_execution_id text, payload text, PRIMARY KEY ((message_id, event_handler_name), - * event_execution_id) ); - */ -public abstract class CassandraBaseDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraBaseDAO.class); - - private final ObjectMapper objectMapper; - protected final Session session; - protected final CassandraProperties properties; - - private boolean initialized = false; - - public CassandraBaseDAO( - Session session, ObjectMapper objectMapper, CassandraProperties properties) { - this.session = session; - this.objectMapper = objectMapper; - this.properties = properties; - - init(); - } - - private void init() { - try { - if (!initialized) { - session.execute(getCreateKeyspaceStatement()); - session.execute(getCreateWorkflowsTableStatement()); - session.execute(getCreateTaskLookupTableStatement()); - session.execute(getCreateTaskDefLimitTableStatement()); - session.execute(getCreateWorkflowDefsTableStatement()); - session.execute(getCreateWorkflowDefsIndexTableStatement()); - session.execute(getCreateTaskDefsTableStatement()); - session.execute(getCreateEventHandlersTableStatement()); - session.execute(getCreateEventExecutionsTableStatement()); - LOGGER.info( - "{} initialization complete! Tables created!", getClass().getSimpleName()); - initialized = true; - } - } catch (Exception e) { - LOGGER.error("Error initializing and setting up keyspace and table in cassandra", e); - throw e; - } - } - - private String getCreateKeyspaceStatement() { - return SchemaBuilder.createKeyspace(properties.getKeyspace()) - .ifNotExists() - .with() - .replication( - ImmutableMap.of( - "class", - properties.getReplicationStrategy(), - properties.getReplicationFactorKey(), - properties.getReplicationFactorValue())) - .durableWrites(true) - .getQueryString(); - } - - private String getCreateWorkflowsTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOWS) - .ifNotExists() - .addPartitionKey(WORKFLOW_ID_KEY, DataType.uuid()) - .addPartitionKey(SHARD_ID_KEY, DataType.cint()) - .addClusteringColumn(ENTITY_KEY, DataType.text()) - .addClusteringColumn(TASK_ID_KEY, DataType.text()) - .addColumn(PAYLOAD_KEY, DataType.text()) - .addStaticColumn(TOTAL_TASKS_KEY, DataType.cint()) - .addStaticColumn(TOTAL_PARTITIONS_KEY, DataType.cint()) - .getQueryString(); - } - - private String getCreateTaskLookupTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_LOOKUP) - .ifNotExists() - .addPartitionKey(TASK_ID_KEY, DataType.uuid()) - .addColumn(WORKFLOW_ID_KEY, DataType.uuid()) - .getQueryString(); - } - - private String getCreateTaskDefLimitTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_DEF_LIMIT) - .ifNotExists() - .addPartitionKey(TASK_DEF_NAME_KEY, DataType.text()) - .addClusteringColumn(TASK_ID_KEY, DataType.uuid()) - .addColumn(WORKFLOW_ID_KEY, DataType.uuid()) - .getQueryString(); - } - - private String getCreateWorkflowDefsTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOW_DEFS) - .ifNotExists() - .addPartitionKey(WORKFLOW_DEF_NAME_KEY, DataType.text()) - .addClusteringColumn(WORKFLOW_VERSION_KEY, DataType.cint()) - .addColumn(WORKFLOW_DEFINITION_KEY, DataType.text()) - .getQueryString(); - } - - private String getCreateWorkflowDefsIndexTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOW_DEFS_INDEX) - .ifNotExists() - .addPartitionKey(WORKFLOW_DEF_INDEX_KEY, DataType.text()) - .addClusteringColumn(WORKFLOW_DEF_NAME_VERSION_KEY, DataType.text()) - .addColumn(WORKFLOW_DEF_INDEX_VALUE, DataType.text()) - .getQueryString(); - } - - private String getCreateTaskDefsTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_DEFS) - .ifNotExists() - .addPartitionKey(TASK_DEFS_KEY, DataType.text()) - .addClusteringColumn(TASK_DEF_NAME_KEY, DataType.text()) - .addColumn(TASK_DEFINITION_KEY, DataType.text()) - .getQueryString(); - } - - private String getCreateEventHandlersTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_EVENT_HANDLERS) - .ifNotExists() - .addPartitionKey(HANDLERS_KEY, DataType.text()) - .addClusteringColumn(EVENT_HANDLER_NAME_KEY, DataType.text()) - .addColumn(EVENT_HANDLER_KEY, DataType.text()) - .getQueryString(); - } - - private String getCreateEventExecutionsTableStatement() { - return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_EVENT_EXECUTIONS) - .ifNotExists() - .addPartitionKey(MESSAGE_ID_KEY, DataType.text()) - .addPartitionKey(EVENT_HANDLER_NAME_KEY, DataType.text()) - .addClusteringColumn(EVENT_EXECUTION_ID_KEY, DataType.text()) - .addColumn(PAYLOAD_KEY, DataType.text()) - .getQueryString(); - } - - String toJson(Object value) { - try { - return objectMapper.writeValueAsString(value); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - T readValue(String json, Class clazz) { - try { - return objectMapper.readValue(json, clazz); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - void recordCassandraDaoRequests(String action) { - recordCassandraDaoRequests(action, "n/a", "n/a"); - } - - void recordCassandraDaoRequests(String action, String taskType, String workflowType) { - Monitors.recordDaoRequests(DAO_NAME, action, taskType, workflowType); - } - - void recordCassandraDaoEventRequests(String action, String event) { - Monitors.recordDaoEventRequests(DAO_NAME, action, event); - } - - void recordCassandraDaoPayloadSize( - String action, int size, String taskType, String workflowType) { - Monitors.recordDaoPayloadSize(DAO_NAME, action, taskType, workflowType, size); - } - - static class WorkflowMetadata { - - private int totalTasks; - private int totalPartitions; - - public int getTotalTasks() { - return totalTasks; - } - - public void setTotalTasks(int totalTasks) { - this.totalTasks = totalTasks; - } - - public int getTotalPartitions() { - return totalPartitions; - } - - public void setTotalPartitions(int totalPartitions) { - this.totalPartitions = totalPartitions; - } - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAO.java deleted file mode 100644 index ce797cea9..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAO.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.cassandra.config.CassandraProperties; -import com.netflix.conductor.cassandra.util.Statements; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.metrics.Monitors; - -import com.datastax.driver.core.PreparedStatement; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY; -import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY; - -@Trace -public class CassandraEventHandlerDAO extends CassandraBaseDAO implements EventHandlerDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraEventHandlerDAO.class); - private static final String CLASS_NAME = CassandraEventHandlerDAO.class.getSimpleName(); - - private volatile Map eventHandlerCache = new HashMap<>(); - - private final PreparedStatement insertEventHandlerStatement; - private final PreparedStatement selectAllEventHandlersStatement; - private final PreparedStatement deleteEventHandlerStatement; - - public CassandraEventHandlerDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - super(session, objectMapper, properties); - - insertEventHandlerStatement = - session.prepare(statements.getInsertEventHandlerStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - selectAllEventHandlersStatement = - session.prepare(statements.getSelectAllEventHandlersStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - deleteEventHandlerStatement = - session.prepare(statements.getDeleteEventHandlerStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - long cacheRefreshTime = properties.getEventHandlerCacheRefreshInterval().getSeconds(); - Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay( - this::refreshEventHandlersCache, 0, cacheRefreshTime, TimeUnit.SECONDS); - } - - @Override - public void addEventHandler(EventHandler eventHandler) { - insertOrUpdateEventHandler(eventHandler); - } - - @Override - public void updateEventHandler(EventHandler eventHandler) { - insertOrUpdateEventHandler(eventHandler); - } - - @Override - public void removeEventHandler(String name) { - try { - recordCassandraDaoRequests("removeEventHandler"); - session.execute(deleteEventHandlerStatement.bind(name)); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeEventHandler"); - String errorMsg = String.format("Failed to remove event handler: %s", name); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - refreshEventHandlersCache(); - } - - @Override - public List getAllEventHandlers() { - if (eventHandlerCache.size() == 0) { - refreshEventHandlersCache(); - } - return new ArrayList<>(eventHandlerCache.values()); - } - - @Override - public List getEventHandlersForEvent(String event, boolean activeOnly) { - if (activeOnly) { - return getAllEventHandlers().stream() - .filter(eventHandler -> eventHandler.getEvent().equals(event)) - .filter(EventHandler::isActive) - .collect(Collectors.toList()); - } else { - return getAllEventHandlers().stream() - .filter(eventHandler -> eventHandler.getEvent().equals(event)) - .collect(Collectors.toList()); - } - } - - private void refreshEventHandlersCache() { - if (session.isClosed()) { - LOGGER.warn("session is closed"); - return; - } - try { - Map map = new HashMap<>(); - getAllEventHandlersFromDB() - .forEach(eventHandler -> map.put(eventHandler.getName(), eventHandler)); - this.eventHandlerCache = map; - LOGGER.debug("Refreshed event handlers, total num: " + this.eventHandlerCache.size()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "refreshEventHandlersCache"); - LOGGER.error("refresh EventHandlers failed", e); - } - } - - @SuppressWarnings("unchecked") - private List getAllEventHandlersFromDB() { - try { - ResultSet resultSet = - session.execute(selectAllEventHandlersStatement.bind(HANDLERS_KEY)); - List rows = resultSet.all(); - if (rows.size() == 0) { - LOGGER.info("No event handlers were found."); - return Collections.EMPTY_LIST; - } - return rows.stream() - .map(row -> readValue(row.getString(EVENT_HANDLER_KEY), EventHandler.class)) - .collect(Collectors.toList()); - - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getAllEventHandlersFromDB"); - String errorMsg = "Failed to get all event handlers"; - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private void insertOrUpdateEventHandler(EventHandler eventHandler) { - try { - String handler = toJson(eventHandler); - session.execute(insertEventHandlerStatement.bind(eventHandler.getName(), handler)); - recordCassandraDaoRequests("storeEventHandler"); - recordCassandraDaoPayloadSize("storeEventHandler", handler.length(), "n/a", "n/a"); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "insertOrUpdateEventHandler"); - String errorMsg = - String.format( - "Error creating/updating event handler: %s/%s", - eventHandler.getName(), eventHandler.getEvent()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - refreshEventHandlersCache(); - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraExecutionDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraExecutionDAO.java deleted file mode 100644 index f8184f4b5..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraExecutionDAO.java +++ /dev/null @@ -1,889 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao; - -import java.util.*; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.cassandra.config.CassandraProperties; -import com.netflix.conductor.cassandra.util.Statements; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.ConcurrentExecutionLimitDAO; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.datastax.driver.core.*; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; - -import static com.netflix.conductor.cassandra.util.Constants.*; - -@Trace -public class CassandraExecutionDAO extends CassandraBaseDAO - implements ExecutionDAO, ConcurrentExecutionLimitDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraExecutionDAO.class); - private static final String CLASS_NAME = CassandraExecutionDAO.class.getSimpleName(); - - private final PreparedStatement insertWorkflowStatement; - private final PreparedStatement insertTaskStatement; - private final PreparedStatement insertEventExecutionStatement; - - private final PreparedStatement selectTotalStatement; - private final PreparedStatement selectTaskStatement; - private final PreparedStatement selectWorkflowStatement; - private final PreparedStatement selectWorkflowWithTasksStatement; - private final PreparedStatement selectTaskLookupStatement; - private final PreparedStatement selectTasksFromTaskDefLimitStatement; - private final PreparedStatement selectEventExecutionsStatement; - - private final PreparedStatement updateWorkflowStatement; - private final PreparedStatement updateTotalTasksStatement; - private final PreparedStatement updateTotalPartitionsStatement; - private final PreparedStatement updateTaskLookupStatement; - private final PreparedStatement updateTaskDefLimitStatement; - private final PreparedStatement updateEventExecutionStatement; - - private final PreparedStatement deleteWorkflowStatement; - private final PreparedStatement deleteTaskStatement; - private final PreparedStatement deleteTaskLookupStatement; - private final PreparedStatement deleteTaskDefLimitStatement; - private final PreparedStatement deleteEventExecutionStatement; - - private final int eventExecutionsTTL; - - public CassandraExecutionDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - super(session, objectMapper, properties); - - eventExecutionsTTL = (int) properties.getEventExecutionPersistenceTtl().getSeconds(); - - this.insertWorkflowStatement = - session.prepare(statements.getInsertWorkflowStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.insertTaskStatement = - session.prepare(statements.getInsertTaskStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.insertEventExecutionStatement = - session.prepare(statements.getInsertEventExecutionStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - this.selectTotalStatement = - session.prepare(statements.getSelectTotalStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectTaskStatement = - session.prepare(statements.getSelectTaskStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectWorkflowStatement = - session.prepare(statements.getSelectWorkflowStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectWorkflowWithTasksStatement = - session.prepare(statements.getSelectWorkflowWithTasksStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectTaskLookupStatement = - session.prepare(statements.getSelectTaskFromLookupTableStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectTasksFromTaskDefLimitStatement = - session.prepare(statements.getSelectTasksFromTaskDefLimitStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectEventExecutionsStatement = - session.prepare( - statements - .getSelectAllEventExecutionsForMessageFromEventExecutionsStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - - this.updateWorkflowStatement = - session.prepare(statements.getUpdateWorkflowStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.updateTotalTasksStatement = - session.prepare(statements.getUpdateTotalTasksStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.updateTotalPartitionsStatement = - session.prepare(statements.getUpdateTotalPartitionsStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.updateTaskLookupStatement = - session.prepare(statements.getUpdateTaskLookupStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.updateTaskDefLimitStatement = - session.prepare(statements.getUpdateTaskDefLimitStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.updateEventExecutionStatement = - session.prepare(statements.getUpdateEventExecutionStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - this.deleteWorkflowStatement = - session.prepare(statements.getDeleteWorkflowStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteTaskStatement = - session.prepare(statements.getDeleteTaskStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteTaskLookupStatement = - session.prepare(statements.getDeleteTaskLookupStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteTaskDefLimitStatement = - session.prepare(statements.getDeleteTaskDefLimitStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteEventExecutionStatement = - session.prepare(statements.getDeleteEventExecutionsStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - } - - @Override - public List getPendingTasksByWorkflow(String taskName, String workflowId) { - List tasks = getTasksForWorkflow(workflowId); - return tasks.stream() - .filter(task -> taskName.equals(task.getTaskType())) - .filter(task -> TaskModel.Status.IN_PROGRESS.equals(task.getStatus())) - .collect(Collectors.toList()); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getTasks(String taskType, String startKey, int count) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * Inserts tasks into the Cassandra datastore. Note: Creates the task_id to workflow_id - * mapping in the task_lookup table first. Once this succeeds, inserts the tasks into the - * workflows table. Tasks belonging to the same shard are created using batch statements. - * - * @param tasks tasks to be created - */ - @Override - public List createTasks(List tasks) { - validateTasks(tasks); - String workflowId = tasks.get(0).getWorkflowInstanceId(); - try { - WorkflowMetadata workflowMetadata = getWorkflowMetadata(workflowId); - int totalTasks = workflowMetadata.getTotalTasks() + tasks.size(); - // TODO: write into multiple shards based on number of tasks - - // update the task_lookup table - tasks.forEach( - task -> { - task.setScheduledTime(System.currentTimeMillis()); - session.execute( - updateTaskLookupStatement.bind( - UUID.fromString(workflowId), - UUID.fromString(task.getTaskId()))); - }); - - // update all the tasks in the workflow using batch - BatchStatement batchStatement = new BatchStatement(); - tasks.forEach( - task -> { - String taskPayload = toJson(task); - batchStatement.add( - insertTaskStatement.bind( - UUID.fromString(workflowId), - DEFAULT_SHARD_ID, - task.getTaskId(), - taskPayload)); - recordCassandraDaoRequests( - "createTask", task.getTaskType(), task.getWorkflowType()); - recordCassandraDaoPayloadSize( - "createTask", - taskPayload.length(), - task.getTaskType(), - task.getWorkflowType()); - }); - batchStatement.add( - updateTotalTasksStatement.bind( - totalTasks, UUID.fromString(workflowId), DEFAULT_SHARD_ID)); - session.execute(batchStatement); - - // update the total tasks and partitions for the workflow - session.execute( - updateTotalPartitionsStatement.bind( - DEFAULT_TOTAL_PARTITIONS, totalTasks, UUID.fromString(workflowId))); - - return tasks; - } catch (ApplicationException e) { - throw e; - } catch (Exception e) { - Monitors.error(CLASS_NAME, "createTasks"); - String errorMsg = - String.format( - "Error creating %d tasks for workflow: %s", tasks.size(), workflowId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public void updateTask(TaskModel task) { - try { - // TODO: calculate the shard number the task belongs to - String taskPayload = toJson(task); - recordCassandraDaoRequests("updateTask", task.getTaskType(), task.getWorkflowType()); - recordCassandraDaoPayloadSize( - "updateTask", taskPayload.length(), task.getTaskType(), task.getWorkflowType()); - session.execute( - insertTaskStatement.bind( - UUID.fromString(task.getWorkflowInstanceId()), - DEFAULT_SHARD_ID, - task.getTaskId(), - taskPayload)); - if (task.getTaskDefinition().isPresent() - && task.getTaskDefinition().get().concurrencyLimit() > 0) { - if (task.getStatus().isTerminal()) { - removeTaskFromLimit(task); - } else if (task.getStatus() == TaskModel.Status.IN_PROGRESS) { - addTaskToLimit(task); - } - } - } catch (Exception e) { - Monitors.error(CLASS_NAME, "updateTask"); - String errorMsg = - String.format( - "Error updating task: %s in workflow: %s", - task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public boolean exceedsLimit(TaskModel task) { - Optional taskDefinition = task.getTaskDefinition(); - if (taskDefinition.isEmpty()) { - return false; - } - int limit = taskDefinition.get().concurrencyLimit(); - if (limit <= 0) { - return false; - } - - try { - recordCassandraDaoRequests( - "selectTaskDefLimit", task.getTaskType(), task.getWorkflowType()); - ResultSet resultSet = - session.execute( - selectTasksFromTaskDefLimitStatement.bind(task.getTaskDefName())); - List taskIds = - resultSet.all().stream() - .map(row -> row.getUUID(TASK_ID_KEY).toString()) - .collect(Collectors.toList()); - long current = taskIds.size(); - - if (!taskIds.contains(task.getTaskId()) && current >= limit) { - LOGGER.info( - "Task execution count limited. task - {}:{}, limit: {}, current: {}", - task.getTaskId(), - task.getTaskDefName(), - limit, - current); - Monitors.recordTaskConcurrentExecutionLimited(task.getTaskDefName(), limit); - return true; - } - } catch (Exception e) { - Monitors.error(CLASS_NAME, "exceedsLimit"); - String errorMsg = - String.format( - "Failed to get in progress limit - %s:%s in workflow :%s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - return false; - } - - @Override - public boolean removeTask(String taskId) { - TaskModel task = getTask(taskId); - if (task == null) { - LOGGER.warn("No such task found by id {}", taskId); - return false; - } - return removeTask(task); - } - - @Override - public TaskModel getTask(String taskId) { - try { - String workflowId = lookupWorkflowIdFromTaskId(taskId); - if (workflowId == null) { - return null; - } - // TODO: implement for query against multiple shards - - ResultSet resultSet = - session.execute( - selectTaskStatement.bind( - UUID.fromString(workflowId), DEFAULT_SHARD_ID, taskId)); - return Optional.ofNullable(resultSet.one()) - .map( - row -> { - TaskModel task = - readValue(row.getString(PAYLOAD_KEY), TaskModel.class); - recordCassandraDaoRequests( - "getTask", task.getTaskType(), task.getWorkflowType()); - recordCassandraDaoPayloadSize( - "getTask", - toJson(task).length(), - task.getTaskType(), - task.getWorkflowType()); - return task; - }) - .orElse(null); - } catch (ApplicationException ae) { - throw ae; - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getTask"); - String errorMsg = String.format("Error getting task by id: %s", taskId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @Override - public List getTasks(List taskIds) { - Preconditions.checkNotNull(taskIds); - Preconditions.checkArgument(taskIds.size() > 0, "Task ids list cannot be empty"); - String workflowId = lookupWorkflowIdFromTaskId(taskIds.get(0)); - if (workflowId == null) { - return null; - } - return getWorkflow(workflowId, true).getTasks().stream() - .filter(task -> taskIds.contains(task.getTaskId())) - .collect(Collectors.toList()); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getPendingTasksForTaskType(String taskType) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public List getTasksForWorkflow(String workflowId) { - return getWorkflow(workflowId, true).getTasks(); - } - - @Override - public String createWorkflow(WorkflowModel workflow) { - try { - List tasks = workflow.getTasks(); - workflow.setTasks(new LinkedList<>()); - String payload = toJson(workflow); - - recordCassandraDaoRequests("createWorkflow", "n/a", workflow.getWorkflowName()); - recordCassandraDaoPayloadSize( - "createWorkflow", payload.length(), "n/a", workflow.getWorkflowName()); - session.execute( - insertWorkflowStatement.bind( - UUID.fromString(workflow.getWorkflowId()), 1, "", payload, 0, 1)); - - workflow.setTasks(tasks); - return workflow.getWorkflowId(); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "createWorkflow"); - String errorMsg = - String.format("Error creating workflow: %s", workflow.getWorkflowId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public String updateWorkflow(WorkflowModel workflow) { - try { - List tasks = workflow.getTasks(); - workflow.setTasks(new LinkedList<>()); - String payload = toJson(workflow); - recordCassandraDaoRequests("updateWorkflow", "n/a", workflow.getWorkflowName()); - recordCassandraDaoPayloadSize( - "updateWorkflow", payload.length(), "n/a", workflow.getWorkflowName()); - session.execute( - updateWorkflowStatement.bind( - payload, UUID.fromString(workflow.getWorkflowId()))); - workflow.setTasks(tasks); - return workflow.getWorkflowId(); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "updateWorkflow"); - String errorMsg = - String.format("Failed to update workflow: %s", workflow.getWorkflowId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @Override - public boolean removeWorkflow(String workflowId) { - WorkflowModel workflow = getWorkflow(workflowId, true); - boolean removed = false; - // TODO: calculate number of shards and iterate - if (workflow != null) { - try { - recordCassandraDaoRequests("removeWorkflow", "n/a", workflow.getWorkflowName()); - ResultSet resultSet = - session.execute( - deleteWorkflowStatement.bind( - UUID.fromString(workflowId), DEFAULT_SHARD_ID)); - removed = resultSet.wasApplied(); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeWorkflow"); - String errorMsg = String.format("Failed to remove workflow: %s", workflowId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - workflow.getTasks().forEach(this::removeTaskLookup); - } - return removed; - } - - /** - * This is a dummy implementation and this feature is not yet implemented for Cassandra backed - * Conductor - */ - @Override - public boolean removeWorkflowWithExpiry(String workflowId, int ttlSeconds) { - throw new UnsupportedOperationException( - "This method is not currently implemented in CassandraExecutionDAO. Please use RedisDAO mode instead now for using TTLs."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public void removeFromPendingWorkflow(String workflowType, String workflowId) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public WorkflowModel getWorkflow(String workflowId) { - return getWorkflow(workflowId, true); - } - - @Override - public WorkflowModel getWorkflow(String workflowId, boolean includeTasks) { - WorkflowModel workflow = null; - try { - ResultSet resultSet; - if (includeTasks) { - resultSet = - session.execute( - selectWorkflowWithTasksStatement.bind( - UUID.fromString(workflowId), DEFAULT_SHARD_ID)); - List tasks = new ArrayList<>(); - - List rows = resultSet.all(); - if (rows.size() == 0) { - LOGGER.info("Workflow {} not found in datastore", workflowId); - return null; - } - for (Row row : rows) { - String entityKey = row.getString(ENTITY_KEY); - if (ENTITY_TYPE_WORKFLOW.equals(entityKey)) { - workflow = readValue(row.getString(PAYLOAD_KEY), WorkflowModel.class); - } else if (ENTITY_TYPE_TASK.equals(entityKey)) { - TaskModel task = readValue(row.getString(PAYLOAD_KEY), TaskModel.class); - tasks.add(task); - } else { - throw new ApplicationException( - ApplicationException.Code.INTERNAL_ERROR, - String.format( - "Invalid row with entityKey: %s found in datastore for workflow: %s", - entityKey, workflowId)); - } - } - - if (workflow != null) { - recordCassandraDaoRequests("getWorkflow", "n/a", workflow.getWorkflowName()); - tasks.sort(Comparator.comparingInt(TaskModel::getSeq)); - workflow.setTasks(tasks); - } - } else { - resultSet = - session.execute(selectWorkflowStatement.bind(UUID.fromString(workflowId))); - workflow = - Optional.ofNullable(resultSet.one()) - .map( - row -> { - WorkflowModel wf = - readValue( - row.getString(PAYLOAD_KEY), - WorkflowModel.class); - recordCassandraDaoRequests( - "getWorkflow", "n/a", wf.getWorkflowName()); - return wf; - }) - .orElse(null); - } - return workflow; - } catch (ApplicationException e) { - throw e; - } catch (IllegalArgumentException e) { - Monitors.error(CLASS_NAME, "getWorkflow"); - String errorMsg = String.format("Invalid workflow id: %s", workflowId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.INVALID_INPUT, errorMsg, e); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getWorkflow"); - String errorMsg = String.format("Failed to get workflow: %s", workflowId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getRunningWorkflowIds(String workflowName, int version) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getPendingWorkflowsByType(String workflowName, int version) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public long getPendingWorkflowCount(String workflowName) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public long getInProgressTaskCount(String taskDefName) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getWorkflowsByType( - String workflowName, Long startTime, Long endTime) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - /** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor - */ - @Override - public List getWorkflowsByCorrelationId( - String workflowName, String correlationId, boolean includeTasks) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public boolean canSearchAcrossWorkflows() { - return false; - } - - @Override - public boolean addEventExecution(EventExecution eventExecution) { - try { - String jsonPayload = toJson(eventExecution); - recordCassandraDaoEventRequests("addEventExecution", eventExecution.getEvent()); - recordCassandraDaoPayloadSize( - "addEventExecution", jsonPayload.length(), eventExecution.getEvent(), "n/a"); - return session.execute( - insertEventExecutionStatement.bind( - eventExecution.getMessageId(), - eventExecution.getName(), - eventExecution.getId(), - jsonPayload)) - .wasApplied(); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "addEventExecution"); - String errorMsg = - String.format( - "Failed to add event execution for event: %s, handler: %s", - eventExecution.getEvent(), eventExecution.getName()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @Override - public void updateEventExecution(EventExecution eventExecution) { - try { - String jsonPayload = toJson(eventExecution); - recordCassandraDaoEventRequests("updateEventExecution", eventExecution.getEvent()); - recordCassandraDaoPayloadSize( - "updateEventExecution", jsonPayload.length(), eventExecution.getEvent(), "n/a"); - session.execute( - updateEventExecutionStatement.bind( - eventExecutionsTTL, - jsonPayload, - eventExecution.getMessageId(), - eventExecution.getName(), - eventExecution.getId())); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "updateEventExecution"); - String errorMsg = - String.format( - "Failed to update event execution for event: %s, handler: %s", - eventExecution.getEvent(), eventExecution.getName()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @Override - public void removeEventExecution(EventExecution eventExecution) { - try { - recordCassandraDaoEventRequests("removeEventExecution", eventExecution.getEvent()); - session.execute( - deleteEventExecutionStatement.bind( - eventExecution.getMessageId(), - eventExecution.getName(), - eventExecution.getId())); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeEventExecution"); - String errorMsg = - String.format( - "Failed to remove event execution for event: %s, handler: %s", - eventExecution.getEvent(), eventExecution.getName()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @VisibleForTesting - List getEventExecutions( - String eventHandlerName, String eventName, String messageId) { - try { - return session - .execute(selectEventExecutionsStatement.bind(messageId, eventHandlerName)) - .all() - .stream() - .filter(row -> !row.isNull(PAYLOAD_KEY)) - .map(row -> readValue(row.getString(PAYLOAD_KEY), EventExecution.class)) - .collect(Collectors.toList()); - } catch (Exception e) { - String errorMsg = - String.format( - "Failed to fetch event executions for event: %s, handler: %s", - eventName, eventHandlerName); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @Override - public void addTaskToLimit(TaskModel task) { - try { - recordCassandraDaoRequests( - "addTaskToLimit", task.getTaskType(), task.getWorkflowType()); - session.execute( - updateTaskDefLimitStatement.bind( - UUID.fromString(task.getWorkflowInstanceId()), - task.getTaskDefName(), - UUID.fromString(task.getTaskId()))); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "addTaskToLimit"); - String errorMsg = - String.format( - "Error updating taskDefLimit for task - %s:%s in workflow: %s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public void removeTaskFromLimit(TaskModel task) { - try { - recordCassandraDaoRequests( - "removeTaskFromLimit", task.getTaskType(), task.getWorkflowType()); - session.execute( - deleteTaskDefLimitStatement.bind( - task.getTaskDefName(), UUID.fromString(task.getTaskId()))); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeTaskFromLimit"); - String errorMsg = - String.format( - "Error updating taskDefLimit for task - %s:%s in workflow: %s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private boolean removeTask(TaskModel task) { - // TODO: calculate shard number based on seq and maxTasksPerShard - try { - // get total tasks for this workflow - WorkflowMetadata workflowMetadata = getWorkflowMetadata(task.getWorkflowInstanceId()); - int totalTasks = workflowMetadata.getTotalTasks(); - - // remove from task_lookup table - removeTaskLookup(task); - - recordCassandraDaoRequests("removeTask", task.getTaskType(), task.getWorkflowType()); - // delete task from workflows table and decrement total tasks by 1 - BatchStatement batchStatement = new BatchStatement(); - batchStatement.add( - deleteTaskStatement.bind( - UUID.fromString(task.getWorkflowInstanceId()), - DEFAULT_SHARD_ID, - task.getTaskId())); - batchStatement.add( - updateTotalTasksStatement.bind( - totalTasks - 1, - UUID.fromString(task.getWorkflowInstanceId()), - DEFAULT_SHARD_ID)); - ResultSet resultSet = session.execute(batchStatement); - if (task.getTaskDefinition().isPresent() - && task.getTaskDefinition().get().concurrencyLimit() > 0) { - removeTaskFromLimit(task); - } - return resultSet.wasApplied(); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeTask"); - String errorMsg = String.format("Failed to remove task: %s", task.getTaskId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - private void removeTaskLookup(TaskModel task) { - try { - recordCassandraDaoRequests( - "removeTaskLookup", task.getTaskType(), task.getWorkflowType()); - if (task.getTaskDefinition().isPresent() - && task.getTaskDefinition().get().concurrencyLimit() > 0) { - removeTaskFromLimit(task); - } - session.execute(deleteTaskLookupStatement.bind(UUID.fromString(task.getTaskId()))); - } catch (ApplicationException ae) { - // no-op - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeTaskLookup"); - String errorMsg = String.format("Failed to remove task lookup: %s", task.getTaskId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg); - } - } - - @VisibleForTesting - void validateTasks(List tasks) { - Preconditions.checkNotNull(tasks, "Tasks object cannot be null"); - Preconditions.checkArgument(!tasks.isEmpty(), "Tasks object cannot be empty"); - tasks.forEach( - task -> { - Preconditions.checkNotNull(task, "task object cannot be null"); - Preconditions.checkNotNull(task.getTaskId(), "Task id cannot be null"); - Preconditions.checkNotNull( - task.getWorkflowInstanceId(), "Workflow instance id cannot be null"); - Preconditions.checkNotNull( - task.getReferenceTaskName(), "Task reference name cannot be null"); - }); - - String workflowId = tasks.get(0).getWorkflowInstanceId(); - Optional optionalTask = - tasks.stream() - .filter(task -> !workflowId.equals(task.getWorkflowInstanceId())) - .findAny(); - if (optionalTask.isPresent()) { - throw new ApplicationException( - Code.INTERNAL_ERROR, - "Tasks of multiple workflows cannot be created/updated simultaneously"); - } - } - - @VisibleForTesting - WorkflowMetadata getWorkflowMetadata(String workflowId) { - ResultSet resultSet = - session.execute(selectTotalStatement.bind(UUID.fromString(workflowId))); - recordCassandraDaoRequests("getWorkflowMetadata"); - return Optional.ofNullable(resultSet.one()) - .map( - row -> { - WorkflowMetadata workflowMetadata = new WorkflowMetadata(); - workflowMetadata.setTotalTasks(row.getInt(TOTAL_TASKS_KEY)); - workflowMetadata.setTotalPartitions(row.getInt(TOTAL_PARTITIONS_KEY)); - return workflowMetadata; - }) - .orElseThrow( - () -> - new ApplicationException( - Code.NOT_FOUND, - String.format( - "Workflow with id: %s not found in data store", - workflowId))); - } - - @VisibleForTesting - String lookupWorkflowIdFromTaskId(String taskId) { - try { - ResultSet resultSet = - session.execute(selectTaskLookupStatement.bind(UUID.fromString(taskId))); - return Optional.ofNullable(resultSet.one()) - .map(row -> row.getUUID(WORKFLOW_ID_KEY).toString()) - .orElse(null); - } catch (IllegalArgumentException iae) { - Monitors.error(CLASS_NAME, "lookupWorkflowIdFromTaskId"); - String errorMsg = String.format("Invalid task id: %s", taskId); - LOGGER.error(errorMsg, iae); - throw new ApplicationException(Code.INVALID_INPUT, errorMsg, iae); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "lookupWorkflowIdFromTaskId"); - String errorMsg = String.format("Failed to lookup workflowId from taskId: %s", taskId); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraMetadataDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraMetadataDAO.java deleted file mode 100644 index 0187034b5..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraMetadataDAO.java +++ /dev/null @@ -1,431 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.cassandra.config.CassandraProperties; -import com.netflix.conductor.cassandra.util.Statements; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.metrics.Monitors; - -import com.datastax.driver.core.PreparedStatement; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY; - -@Trace -public class CassandraMetadataDAO extends CassandraBaseDAO implements MetadataDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraMetadataDAO.class); - private static final String CLASS_NAME = CassandraMetadataDAO.class.getSimpleName(); - private static final String INDEX_DELIMITER = "/"; - - private Map taskDefCache = new HashMap<>(); - - private final PreparedStatement insertWorkflowDefStatement; - private final PreparedStatement insertWorkflowDefVersionIndexStatement; - private final PreparedStatement insertTaskDefStatement; - - private final PreparedStatement selectWorkflowDefStatement; - private final PreparedStatement selectAllWorkflowDefVersionsByNameStatement; - private final PreparedStatement selectAllWorkflowDefsStatement; - private final PreparedStatement selectTaskDefStatement; - private final PreparedStatement selectAllTaskDefsStatement; - - private final PreparedStatement updateWorkflowDefStatement; - - private final PreparedStatement deleteWorkflowDefStatement; - private final PreparedStatement deleteWorkflowDefIndexStatement; - private final PreparedStatement deleteTaskDefStatement; - - public CassandraMetadataDAO( - Session session, - ObjectMapper objectMapper, - CassandraProperties properties, - Statements statements) { - super(session, objectMapper, properties); - - this.insertWorkflowDefStatement = - session.prepare(statements.getInsertWorkflowDefStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.insertWorkflowDefVersionIndexStatement = - session.prepare(statements.getInsertWorkflowDefVersionIndexStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.insertTaskDefStatement = - session.prepare(statements.getInsertTaskDefStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - this.selectWorkflowDefStatement = - session.prepare(statements.getSelectWorkflowDefStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectAllWorkflowDefVersionsByNameStatement = - session.prepare(statements.getSelectAllWorkflowDefVersionsByNameStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectAllWorkflowDefsStatement = - session.prepare(statements.getSelectAllWorkflowDefsStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectTaskDefStatement = - session.prepare(statements.getSelectTaskDefStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - this.selectAllTaskDefsStatement = - session.prepare(statements.getSelectAllTaskDefsStatement()) - .setConsistencyLevel(properties.getReadConsistencyLevel()); - - this.updateWorkflowDefStatement = - session.prepare(statements.getUpdateWorkflowDefStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - this.deleteWorkflowDefStatement = - session.prepare(statements.getDeleteWorkflowDefStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteWorkflowDefIndexStatement = - session.prepare(statements.getDeleteWorkflowDefIndexStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - this.deleteTaskDefStatement = - session.prepare(statements.getDeleteTaskDefStatement()) - .setConsistencyLevel(properties.getWriteConsistencyLevel()); - - long cacheRefreshTime = properties.getTaskDefCacheRefreshInterval().getSeconds(); - Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay( - this::refreshTaskDefsCache, 0, cacheRefreshTime, TimeUnit.SECONDS); - } - - @Override - public void createTaskDef(TaskDef taskDef) { - insertOrUpdateTaskDef(taskDef); - } - - @Override - public String updateTaskDef(TaskDef taskDef) { - return insertOrUpdateTaskDef(taskDef); - } - - @Override - public TaskDef getTaskDef(String name) { - return Optional.ofNullable(taskDefCache.get(name)).orElseGet(() -> getTaskDefFromDB(name)); - } - - @Override - public List getAllTaskDefs() { - if (taskDefCache.size() == 0) { - refreshTaskDefsCache(); - } - return new ArrayList<>(taskDefCache.values()); - } - - @Override - public void removeTaskDef(String name) { - try { - recordCassandraDaoRequests("removeTaskDef"); - session.execute(deleteTaskDefStatement.bind(name)); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeTaskDef"); - String errorMsg = String.format("Failed to remove task definition: %s", name); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - refreshTaskDefsCache(); - } - - @Override - public void createWorkflowDef(WorkflowDef workflowDef) { - try { - String workflowDefinition = toJson(workflowDef); - if (!session.execute( - insertWorkflowDefStatement.bind( - workflowDef.getName(), - workflowDef.getVersion(), - workflowDefinition)) - .wasApplied()) { - throw new ApplicationException( - Code.CONFLICT, - String.format( - "Workflow: %s, version: %s already exists!", - workflowDef.getName(), workflowDef.getVersion())); - } - String workflowDefIndex = - getWorkflowDefIndexValue(workflowDef.getName(), workflowDef.getVersion()); - session.execute( - insertWorkflowDefVersionIndexStatement.bind( - workflowDefIndex, workflowDefIndex)); - recordCassandraDaoRequests("createWorkflowDef"); - recordCassandraDaoPayloadSize( - "createWorkflowDef", workflowDefinition.length(), "n/a", workflowDef.getName()); - } catch (ApplicationException ae) { - throw ae; - } catch (Exception e) { - Monitors.error(CLASS_NAME, "createWorkflowDef"); - String errorMsg = - String.format( - "Error creating workflow definition: %s/%d", - workflowDef.getName(), workflowDef.getVersion()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public void updateWorkflowDef(WorkflowDef workflowDef) { - try { - String workflowDefinition = toJson(workflowDef); - session.execute( - updateWorkflowDefStatement.bind( - workflowDefinition, workflowDef.getName(), workflowDef.getVersion())); - String workflowDefIndex = - getWorkflowDefIndexValue(workflowDef.getName(), workflowDef.getVersion()); - session.execute( - insertWorkflowDefVersionIndexStatement.bind( - workflowDefIndex, workflowDefIndex)); - recordCassandraDaoRequests("updateWorkflowDef"); - recordCassandraDaoPayloadSize( - "updateWorkflowDef", workflowDefinition.length(), "n/a", workflowDef.getName()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "updateWorkflowDef"); - String errorMsg = - String.format( - "Error updating workflow definition: %s/%d", - workflowDef.getName(), workflowDef.getVersion()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public Optional getLatestWorkflowDef(String name) { - List workflowDefList = getAllWorkflowDefVersions(name); - if (workflowDefList != null && workflowDefList.size() > 0) { - workflowDefList.sort(Comparator.comparingInt(WorkflowDef::getVersion)); - return Optional.of(workflowDefList.get(workflowDefList.size() - 1)); - } - return Optional.empty(); - } - - @Override - public Optional getWorkflowDef(String name, int version) { - try { - recordCassandraDaoRequests("getWorkflowDef"); - ResultSet resultSet = session.execute(selectWorkflowDefStatement.bind(name, version)); - WorkflowDef workflowDef = - Optional.ofNullable(resultSet.one()) - .map( - row -> - readValue( - row.getString(WORKFLOW_DEFINITION_KEY), - WorkflowDef.class)) - .orElse(null); - return Optional.ofNullable(workflowDef); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getTaskDef"); - String errorMsg = String.format("Error fetching workflow def: %s/%d", name, version); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @Override - public void removeWorkflowDef(String name, Integer version) { - try { - session.execute(deleteWorkflowDefStatement.bind(name, version)); - session.execute( - deleteWorkflowDefIndexStatement.bind( - WORKFLOW_DEF_INDEX_KEY, getWorkflowDefIndexValue(name, version))); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeWorkflowDef"); - String errorMsg = - String.format("Failed to remove workflow definition: %s/%d", name, version); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @SuppressWarnings("unchecked") - @Override - public List getAllWorkflowDefs() { - try { - ResultSet resultSet = - session.execute(selectAllWorkflowDefsStatement.bind(WORKFLOW_DEF_INDEX_KEY)); - List rows = resultSet.all(); - if (rows.size() == 0) { - LOGGER.info("No workflow definitions were found."); - return Collections.EMPTY_LIST; - } - return rows.stream() - .map( - row -> { - String defNameVersion = - row.getString(WORKFLOW_DEF_NAME_VERSION_KEY); - var nameVersion = getWorkflowNameAndVersion(defNameVersion); - return getWorkflowDef(nameVersion.getLeft(), nameVersion.getRight()) - .orElse(null); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getAllWorkflowDefs"); - String errorMsg = "Error retrieving all workflow defs"; - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private void refreshTaskDefsCache() { - if (session.isClosed()) { - LOGGER.warn("session is closed"); - return; - } - try { - Map map = new HashMap<>(); - getAllTaskDefsFromDB().forEach(taskDef -> map.put(taskDef.getName(), taskDef)); - this.taskDefCache = map; - LOGGER.debug("Refreshed task defs, total num: " + this.taskDefCache.size()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "refreshTaskDefs"); - LOGGER.error("refresh TaskDefs failed ", e); - } - } - - private TaskDef getTaskDefFromDB(String name) { - try { - ResultSet resultSet = session.execute(selectTaskDefStatement.bind(name)); - recordCassandraDaoRequests("getTaskDef"); - return Optional.ofNullable(resultSet.one()) - .map(row -> readValue(row.getString(TASK_DEFINITION_KEY), TaskDef.class)) - .orElse(null); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getTaskDef"); - String errorMsg = String.format("Failed to get task def: %s", name); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - @SuppressWarnings("unchecked") - private List getAllTaskDefsFromDB() { - try { - ResultSet resultSet = session.execute(selectAllTaskDefsStatement.bind(TASK_DEFS_KEY)); - List rows = resultSet.all(); - if (rows.size() == 0) { - LOGGER.info("No task definitions were found."); - return Collections.EMPTY_LIST; - } - return rows.stream() - .map(row -> readValue(row.getString(TASK_DEFINITION_KEY), TaskDef.class)) - .collect(Collectors.toList()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getAllTaskDefs"); - String errorMsg = "Failed to get all task defs"; - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private List getAllWorkflowDefVersions(String name) { - try { - ResultSet resultSet = - session.execute(selectAllWorkflowDefVersionsByNameStatement.bind(name)); - recordCassandraDaoRequests("getAllWorkflowDefVersions", "n/a", name); - List rows = resultSet.all(); - if (rows.size() == 0) { - LOGGER.info("Not workflow definitions were found for : {}", name); - return null; - } - return rows.stream() - .map( - row -> - readValue( - row.getString(WORKFLOW_DEFINITION_KEY), - WorkflowDef.class)) - .collect(Collectors.toList()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "getAllWorkflowDefVersions"); - String errorMsg = String.format("Failed to get workflows defs for : %s", name); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private String insertOrUpdateTaskDef(TaskDef taskDef) { - try { - String taskDefinition = toJson(taskDef); - session.execute(insertTaskDefStatement.bind(taskDef.getName(), taskDefinition)); - recordCassandraDaoRequests("storeTaskDef"); - recordCassandraDaoPayloadSize( - "storeTaskDef", taskDefinition.length(), taskDef.getName(), "n/a"); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "insertOrUpdateTaskDef"); - String errorMsg = - String.format("Error creating/updating task definition: %s", taskDef.getName()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - refreshTaskDefsCache(); - return taskDef.getName(); - } - - @VisibleForTesting - String getWorkflowDefIndexValue(String name, int version) { - return name + INDEX_DELIMITER + version; - } - - @VisibleForTesting - ImmutablePair getWorkflowNameAndVersion(String nameVersionStr) { - int lastIndexOfDelimiter = nameVersionStr.lastIndexOf(INDEX_DELIMITER); - - if (lastIndexOfDelimiter == -1) { - throw new IllegalStateException( - nameVersionStr - + " is not in the 'workflowName" - + INDEX_DELIMITER - + "version' pattern."); - } - - String workflowName = nameVersionStr.substring(0, lastIndexOfDelimiter); - String versionStr = nameVersionStr.substring(lastIndexOfDelimiter + 1); - - try { - return new ImmutablePair<>(workflowName, Integer.parseInt(versionStr)); - } catch (NumberFormatException e) { - throw new IllegalStateException( - versionStr + " in " + nameVersionStr + " is not a valid number."); - } - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraPollDataDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraPollDataDAO.java deleted file mode 100644 index 235dd44f4..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraPollDataDAO.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao; - -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.dao.PollDataDAO; - -/** - * This is a dummy implementation and this feature is not implemented for Cassandra backed - * Conductor. - */ -public class CassandraPollDataDAO implements PollDataDAO { - - @Override - public void updateLastPollData(String taskDefName, String domain, String workerId) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraPollDataDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public PollData getPollData(String taskDefName, String domain) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraPollDataDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public List getPollData(String taskDefName) { - throw new UnsupportedOperationException( - "This method is not implemented in CassandraPollDataDAO. Please use ExecutionDAOFacade instead."); - } -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Constants.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Constants.java deleted file mode 100644 index 473c23132..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Constants.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.util; - -public interface Constants { - - String DAO_NAME = "cassandra"; - - String TABLE_WORKFLOWS = "workflows"; - String TABLE_TASK_LOOKUP = "task_lookup"; - String TABLE_TASK_DEF_LIMIT = "task_def_limit"; - String TABLE_WORKFLOW_DEFS = "workflow_definitions"; - String TABLE_WORKFLOW_DEFS_INDEX = "workflow_defs_index"; - String TABLE_TASK_DEFS = "task_definitions"; - String TABLE_EVENT_HANDLERS = "event_handlers"; - String TABLE_EVENT_EXECUTIONS = "event_executions"; - - String WORKFLOW_ID_KEY = "workflow_id"; - String SHARD_ID_KEY = "shard_id"; - String TASK_ID_KEY = "task_id"; - String ENTITY_KEY = "entity"; - String PAYLOAD_KEY = "payload"; - String TOTAL_TASKS_KEY = "total_tasks"; - String TOTAL_PARTITIONS_KEY = "total_partitions"; - String TASK_DEF_NAME_KEY = "task_def_name"; - String WORKFLOW_DEF_NAME_KEY = "workflow_def_name"; - String WORKFLOW_VERSION_KEY = "version"; - String WORKFLOW_DEFINITION_KEY = "workflow_definition"; - String WORKFLOW_DEF_INDEX_KEY = "workflow_def_version_index"; - String WORKFLOW_DEF_INDEX_VALUE = "workflow_def_index_value"; - String WORKFLOW_DEF_NAME_VERSION_KEY = "workflow_def_name_version"; - String TASK_DEFS_KEY = "task_defs"; - String TASK_DEFINITION_KEY = "task_definition"; - String HANDLERS_KEY = "handlers"; - String EVENT_HANDLER_NAME_KEY = "event_handler_name"; - String EVENT_HANDLER_KEY = "event_handler"; - String MESSAGE_ID_KEY = "message_id"; - String EVENT_EXECUTION_ID_KEY = "event_execution_id"; - - String ENTITY_TYPE_WORKFLOW = "workflow"; - String ENTITY_TYPE_TASK = "task"; - - int DEFAULT_SHARD_ID = 1; - int DEFAULT_TOTAL_PARTITIONS = 1; -} diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java deleted file mode 100644 index 38eff7d84..000000000 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java +++ /dev/null @@ -1,596 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.util; - -import com.datastax.driver.core.querybuilder.QueryBuilder; - -import static com.netflix.conductor.cassandra.util.Constants.ENTITY_KEY; -import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_TASK; -import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_WORKFLOW; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_EXECUTION_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY; -import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.MESSAGE_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.PAYLOAD_KEY; -import static com.netflix.conductor.cassandra.util.Constants.SHARD_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_EXECUTIONS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_HANDLERS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEFS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEF_LIMIT; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_LOOKUP; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOWS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS; -import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS_INDEX; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_DEF_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TASK_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TOTAL_PARTITIONS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.TOTAL_TASKS_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_VALUE; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_ID_KEY; -import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_VERSION_KEY; - -import static com.datastax.driver.core.querybuilder.QueryBuilder.bindMarker; -import static com.datastax.driver.core.querybuilder.QueryBuilder.eq; -import static com.datastax.driver.core.querybuilder.QueryBuilder.set; - -/** - * DML statements - * - *

MetadataDAO - * - *

- * - * ExecutionDAO - * - * - * - * EventHandlerDAO - * - * - */ -public class Statements { - - private final String keyspace; - - public Statements(String keyspace) { - this.keyspace = keyspace; - } - - // MetadataDAO - // Insert Statements - - /** - * @return cql query statement to insert a new workflow definition into the - * "workflow_definitions" table - */ - public String getInsertWorkflowDefStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOW_DEFS) - .value(WORKFLOW_DEF_NAME_KEY, bindMarker()) - .value(WORKFLOW_VERSION_KEY, bindMarker()) - .value(WORKFLOW_DEFINITION_KEY, bindMarker()) - .ifNotExists() - .getQueryString(); - } - - /** - * @return cql query statement to insert a workflow def name version index into the - * "workflow_defs_index" table - */ - public String getInsertWorkflowDefVersionIndexStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOW_DEFS_INDEX) - .value(WORKFLOW_DEF_INDEX_KEY, WORKFLOW_DEF_INDEX_KEY) - .value(WORKFLOW_DEF_NAME_VERSION_KEY, bindMarker()) - .value(WORKFLOW_DEF_INDEX_VALUE, bindMarker()) - .getQueryString(); - } - - /** - * @return cql query statement to insert a new task definition into the "task_definitions" table - */ - public String getInsertTaskDefStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_TASK_DEFS) - .value(TASK_DEFS_KEY, TASK_DEFS_KEY) - .value(TASK_DEF_NAME_KEY, bindMarker()) - .value(TASK_DEFINITION_KEY, bindMarker()) - .getQueryString(); - } - - // Select Statements - - /** - * @return cql query statement to fetch a workflow definition by name and version from the - * "workflow_definitions" table - */ - public String getSelectWorkflowDefStatement() { - return QueryBuilder.select(WORKFLOW_DEFINITION_KEY) - .from(keyspace, TABLE_WORKFLOW_DEFS) - .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker())) - .and(eq(WORKFLOW_VERSION_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve all versions of a workflow definition by name from - * the "workflow_definitions" table - */ - public String getSelectAllWorkflowDefVersionsByNameStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_WORKFLOW_DEFS) - .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to fetch all workflow def names and version from the - * "workflow_defs_index" table - */ - public String getSelectAllWorkflowDefsStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_WORKFLOW_DEFS_INDEX) - .where(eq(WORKFLOW_DEF_INDEX_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to fetch a task definition by name from the "task_definitions" - * table - */ - public String getSelectTaskDefStatement() { - return QueryBuilder.select(TASK_DEFINITION_KEY) - .from(keyspace, TABLE_TASK_DEFS) - .where(eq(TASK_DEFS_KEY, TASK_DEFS_KEY)) - .and(eq(TASK_DEF_NAME_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve all task definitions from the "task_definitions" - * table - */ - public String getSelectAllTaskDefsStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_TASK_DEFS) - .where(eq(TASK_DEFS_KEY, bindMarker())) - .getQueryString(); - } - - // Update Statement - - /** - * @return cql query statement to update a workflow definitinos in the "workflow_definitions" - * table - */ - public String getUpdateWorkflowDefStatement() { - return QueryBuilder.update(keyspace, TABLE_WORKFLOW_DEFS) - .with(set(WORKFLOW_DEFINITION_KEY, bindMarker())) - .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker())) - .and(eq(WORKFLOW_VERSION_KEY, bindMarker())) - .getQueryString(); - } - - // Delete Statements - - /** - * @return cql query statement to delete a workflow definition by name and version from the - * "workflow_definitions" table - */ - public String getDeleteWorkflowDefStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_WORKFLOW_DEFS) - .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker())) - .and(eq(WORKFLOW_VERSION_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete a workflow def name/version from the - * "workflow_defs_index" table - */ - public String getDeleteWorkflowDefIndexStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_WORKFLOW_DEFS_INDEX) - .where(eq(WORKFLOW_DEF_INDEX_KEY, bindMarker())) - .and(eq(WORKFLOW_DEF_NAME_VERSION_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete a task definition by name from the "task_definitions" - * table - */ - public String getDeleteTaskDefStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_TASK_DEFS) - .where(eq(TASK_DEFS_KEY, TASK_DEFS_KEY)) - .and(eq(TASK_DEF_NAME_KEY, bindMarker())) - .getQueryString(); - } - - // ExecutionDAO - // Insert Statements - - /** - * @return cql query statement to insert a new workflow into the "workflows" table - */ - public String getInsertWorkflowStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS) - .value(WORKFLOW_ID_KEY, bindMarker()) - .value(SHARD_ID_KEY, bindMarker()) - .value(TASK_ID_KEY, bindMarker()) - .value(ENTITY_KEY, ENTITY_TYPE_WORKFLOW) - .value(PAYLOAD_KEY, bindMarker()) - .value(TOTAL_TASKS_KEY, bindMarker()) - .value(TOTAL_PARTITIONS_KEY, bindMarker()) - .getQueryString(); - } - - /** - * @return cql query statement to insert a new task into the "workflows" table - */ - public String getInsertTaskStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS) - .value(WORKFLOW_ID_KEY, bindMarker()) - .value(SHARD_ID_KEY, bindMarker()) - .value(TASK_ID_KEY, bindMarker()) - .value(ENTITY_KEY, ENTITY_TYPE_TASK) - .value(PAYLOAD_KEY, bindMarker()) - .getQueryString(); - } - - /** - * @return cql query statement to insert a new event execution into the "event_executions" table - */ - public String getInsertEventExecutionStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_EVENT_EXECUTIONS) - .value(MESSAGE_ID_KEY, bindMarker()) - .value(EVENT_HANDLER_NAME_KEY, bindMarker()) - .value(EVENT_EXECUTION_ID_KEY, bindMarker()) - .value(PAYLOAD_KEY, bindMarker()) - .ifNotExists() - .getQueryString(); - } - - // Select Statements - - /** - * @return cql query statement to retrieve the total_tasks and total_partitions for a workflow - * from the "workflows" table - */ - public String getSelectTotalStatement() { - return QueryBuilder.select(TOTAL_TASKS_KEY, TOTAL_PARTITIONS_KEY) - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, 1)) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve a task from the "workflows" table - */ - public String getSelectTaskStatement() { - return QueryBuilder.select(PAYLOAD_KEY) - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, bindMarker())) - .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK)) - .and(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve a workflow (without its tasks) from the "workflows" - * table - */ - public String getSelectWorkflowStatement() { - return QueryBuilder.select(PAYLOAD_KEY) - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, 1)) - .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW)) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve a workflow with its tasks from the "workflows" table - */ - public String getSelectWorkflowWithTasksStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve the workflow_id for a particular task_id from the - * "task_lookup" table - */ - public String getSelectTaskFromLookupTableStatement() { - return QueryBuilder.select(WORKFLOW_ID_KEY) - .from(keyspace, TABLE_TASK_LOOKUP) - .where(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve all task ids for a given taskDefName with concurrent - * execution limit configured from the "task_def_limit" table - */ - public String getSelectTasksFromTaskDefLimitStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_TASK_DEF_LIMIT) - .where(eq(TASK_DEF_NAME_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to retrieve all event executions for a given message and event - * handler from the "event_executions" table - */ - public String getSelectAllEventExecutionsForMessageFromEventExecutionsStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_EVENT_EXECUTIONS) - .where(eq(MESSAGE_ID_KEY, bindMarker())) - .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker())) - .getQueryString(); - } - - // Update Statements - - /** - * @return cql query statement to update a workflow in the "workflows" table - */ - public String getUpdateWorkflowStatement() { - return QueryBuilder.update(keyspace, TABLE_WORKFLOWS) - .with(set(PAYLOAD_KEY, bindMarker())) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, 1)) - .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW)) - .and(eq(TASK_ID_KEY, "")) - .getQueryString(); - } - - /** - * @return cql query statement to update the total_tasks in a shard for a workflow in the - * "workflows" table - */ - public String getUpdateTotalTasksStatement() { - return QueryBuilder.update(keyspace, TABLE_WORKFLOWS) - .with(set(TOTAL_TASKS_KEY, bindMarker())) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to update the total_partitions for a workflow in the "workflows" - * table - */ - public String getUpdateTotalPartitionsStatement() { - return QueryBuilder.update(keyspace, TABLE_WORKFLOWS) - .with(set(TOTAL_PARTITIONS_KEY, bindMarker())) - .and(set(TOTAL_TASKS_KEY, bindMarker())) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, 1)) - .getQueryString(); - } - - /** - * @return cql query statement to add a new task_id to workflow_id mapping to the "task_lookup" - * table - */ - public String getUpdateTaskLookupStatement() { - return QueryBuilder.update(keyspace, TABLE_TASK_LOOKUP) - .with(set(WORKFLOW_ID_KEY, bindMarker())) - .where(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to add a new task_id to the "task_def_limit" table - */ - public String getUpdateTaskDefLimitStatement() { - return QueryBuilder.update(keyspace, TABLE_TASK_DEF_LIMIT) - .with(set(WORKFLOW_ID_KEY, bindMarker())) - .where(eq(TASK_DEF_NAME_KEY, bindMarker())) - .and(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to update an event execution in the "event_executions" table - */ - public String getUpdateEventExecutionStatement() { - return QueryBuilder.update(keyspace, TABLE_EVENT_EXECUTIONS) - .using(QueryBuilder.ttl(bindMarker())) - .with(set(PAYLOAD_KEY, bindMarker())) - .where(eq(MESSAGE_ID_KEY, bindMarker())) - .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker())) - .and(eq(EVENT_EXECUTION_ID_KEY, bindMarker())) - .getQueryString(); - } - - // Delete statements - - /** - * @return cql query statement to delete a workflow from the "workflows" table - */ - public String getDeleteWorkflowStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete a task_id to workflow_id mapping from the "task_lookup" - * table - */ - public String getDeleteTaskLookupStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_TASK_LOOKUP) - .where(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete a task from the "workflows" table - */ - public String getDeleteTaskStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_WORKFLOWS) - .where(eq(WORKFLOW_ID_KEY, bindMarker())) - .and(eq(SHARD_ID_KEY, bindMarker())) - .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK)) - .and(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete a task_id from the "task_def_limit" table - */ - public String getDeleteTaskDefLimitStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_TASK_DEF_LIMIT) - .where(eq(TASK_DEF_NAME_KEY, bindMarker())) - .and(eq(TASK_ID_KEY, bindMarker())) - .getQueryString(); - } - - /** - * @return cql query statement to delete an event execution from the "event_execution" table - */ - public String getDeleteEventExecutionsStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_EVENT_EXECUTIONS) - .where(eq(MESSAGE_ID_KEY, bindMarker())) - .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker())) - .and(eq(EVENT_EXECUTION_ID_KEY, bindMarker())) - .getQueryString(); - } - - // EventHandlerDAO - // Insert Statements - - /** - * @return cql query statement to insert an event handler into the "event_handlers" table - */ - public String getInsertEventHandlerStatement() { - return QueryBuilder.insertInto(keyspace, TABLE_EVENT_HANDLERS) - .value(HANDLERS_KEY, HANDLERS_KEY) - .value(EVENT_HANDLER_NAME_KEY, bindMarker()) - .value(EVENT_HANDLER_KEY, bindMarker()) - .getQueryString(); - } - - // Select Statements - - /** - * @return cql query statement to retrieve all event handlers from the "event_handlers" table - */ - public String getSelectAllEventHandlersStatement() { - return QueryBuilder.select() - .all() - .from(keyspace, TABLE_EVENT_HANDLERS) - .where(eq(HANDLERS_KEY, bindMarker())) - .getQueryString(); - } - - // Delete Statements - - /** - * @return cql query statement to delete an event handler by name from the "event_handlers" - * table - */ - public String getDeleteEventHandlerStatement() { - return QueryBuilder.delete() - .from(keyspace, TABLE_EVENT_HANDLERS) - .where(eq(HANDLERS_KEY, HANDLERS_KEY)) - .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker())) - .getQueryString(); - } -} diff --git a/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index 8c1d52fe4..000000000 --- a/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.cassandra.write-consistency-level", - "defaultValue": "LOCAL_QUORUM" - }, - { - "name": "conductor.cassandra.read-consistency-level", - "defaultValue": "LOCAL_QUORUM" - } - ], - "hints": [ - { - "name": "conductor.cassandra.write-consistency-level", - "providers": [ - { - "name": "handle-as", - "parameters": { - "target": "java.lang.Enum" - } - } - ] - }, - { - "name": "conductor.cassandra.read-consistency-level", - "providers": [ - { - "name": "handle-as", - "parameters": { - "target": "java.lang.Enum" - } - } - ] - } - ] -} diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy deleted file mode 100644 index 214f3722d..000000000 --- a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao - -import com.netflix.conductor.common.metadata.events.EventExecution -import com.netflix.conductor.common.metadata.events.EventHandler - -import spock.lang.Subject - -class CassandraEventHandlerDAOSpec extends CassandraSpec { - - @Subject - CassandraEventHandlerDAO eventHandlerDAO - - CassandraExecutionDAO executionDAO - - def setup() { - eventHandlerDAO = new CassandraEventHandlerDAO(session, objectMapper, cassandraProperties, statements) - executionDAO = new CassandraExecutionDAO(session, objectMapper, cassandraProperties, statements) - } - - def testEventHandlerCRUD() { - given: - String event = "event" - String eventHandlerName1 = "event_handler1" - String eventHandlerName2 = "event_handler2" - - EventHandler eventHandler = new EventHandler() - eventHandler.setName(eventHandlerName1) - eventHandler.setEvent(event) - - when: // create event handler - eventHandlerDAO.addEventHandler(eventHandler) - List handlers = eventHandlerDAO.getEventHandlersForEvent(event, false) - - then: // fetch all event handlers for event - handlers != null && handlers.size() == 1 - eventHandler.name == handlers[0].name - eventHandler.event == handlers[0].event - !handlers[0].active - - and: // add an active event handler for the same event - EventHandler eventHandler1 = new EventHandler() - eventHandler1.setName(eventHandlerName2) - eventHandler1.setEvent(event) - eventHandler1.setActive(true) - eventHandlerDAO.addEventHandler(eventHandler1) - - when: // fetch all event handlers - handlers = eventHandlerDAO.getAllEventHandlers() - - then: - handlers != null && handlers.size() == 2 - - when: // fetch all event handlers for event - handlers = eventHandlerDAO.getEventHandlersForEvent(event, false) - - then: - handlers != null && handlers.size() == 2 - - when: // fetch only active handlers for event - handlers = eventHandlerDAO.getEventHandlersForEvent(event, true) - - then: - handlers != null && handlers.size() == 1 - eventHandler1.name == handlers[0].name - eventHandler1.event == handlers[0].event - handlers[0].active - - when: // remove event handler - eventHandlerDAO.removeEventHandler(eventHandlerName1) - handlers = eventHandlerDAO.getAllEventHandlers() - - then: - handlers != null && handlers.size() == 1 - } - - - - private static EventExecution getEventExecution(String id, String msgId, String name, String event) { - EventExecution eventExecution = new EventExecution(id, msgId); - eventExecution.setName(name); - eventExecution.setEvent(event); - eventExecution.setStatus(EventExecution.Status.IN_PROGRESS); - return eventExecution; - } -} diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraExecutionDAOSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraExecutionDAOSpec.groovy deleted file mode 100644 index 8500d9552..000000000 --- a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraExecutionDAOSpec.groovy +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao - -import com.netflix.conductor.common.metadata.events.EventExecution -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.core.utils.IDGenerator -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.model.WorkflowModel - -import spock.lang.Subject - -import static com.netflix.conductor.common.metadata.events.EventExecution.Status.COMPLETED -import static com.netflix.conductor.common.metadata.events.EventExecution.Status.IN_PROGRESS -import static com.netflix.conductor.core.exception.ApplicationException.Code.INVALID_INPUT - -class CassandraExecutionDAOSpec extends CassandraSpec { - - @Subject - CassandraExecutionDAO executionDAO - - def setup() { - executionDAO = new CassandraExecutionDAO(session, objectMapper, cassandraProperties, statements) - } - - def "verify if tasks are validated"() { - given: - def tasks = [] - - // create tasks for a workflow and add to list - TaskModel task1 = new TaskModel(workflowInstanceId: 'uuid', taskId: 'task1id', referenceTaskName: 'task1') - TaskModel task2 = new TaskModel(workflowInstanceId: 'uuid', taskId: 'task2id', referenceTaskName: 'task2') - tasks << task1 << task2 - - when: - executionDAO.validateTasks(tasks) - - then: - noExceptionThrown() - - and: - // add a task from a different workflow to the list - TaskModel task3 = new TaskModel(workflowInstanceId: 'other-uuid', taskId: 'task3id', referenceTaskName: 'task3') - tasks << task3 - - when: - executionDAO.validateTasks(tasks) - - then: - def ex = thrown(ApplicationException.class) - ex.message == "Tasks of multiple workflows cannot be created/updated simultaneously" - } - - def "workflow CRUD"() { - given: - String workflowId = new IDGenerator().generate() - WorkflowDef workflowDef = new WorkflowDef() - workflowDef.name = "def1" - workflowDef.setVersion(1) - WorkflowModel workflow = new WorkflowModel() - workflow.setWorkflowDefinition(workflowDef) - workflow.setWorkflowId(workflowId) - workflow.setInput(new HashMap<>()) - workflow.setStatus(WorkflowModel.Status.RUNNING) - workflow.setCreateTime(System.currentTimeMillis()) - - when: - // create a new workflow in the datastore - String id = executionDAO.createWorkflow(workflow) - - then: - workflowId == id - - when: - // read the workflow from the datastore - WorkflowModel found = executionDAO.getWorkflow(workflowId) - - then: - workflow == found - - and: - // update the workflow - workflow.setStatus(WorkflowModel.Status.COMPLETED) - executionDAO.updateWorkflow(workflow) - - when: - found = executionDAO.getWorkflow(workflowId) - - then: - workflow == found - - when: - // remove the workflow from datastore - boolean removed = executionDAO.removeWorkflow(workflowId) - - then: - removed - - when: - // read workflow again - workflow = executionDAO.getWorkflow(workflowId, true) - - then: - workflow == null - } - - def "create tasks and verify methods that read tasks and workflow"() { - given: 'we create a workflow' - String workflowId = new IDGenerator().generate() - WorkflowDef workflowDef = new WorkflowDef(name: 'def1', version: 1) - WorkflowModel workflow = new WorkflowModel(workflowDefinition: workflowDef, workflowId: workflowId, input: new HashMap(), status: WorkflowModel.Status.RUNNING, createTime: System.currentTimeMillis()) - executionDAO.createWorkflow(workflow) - - and: 'create tasks for this workflow' - TaskModel task1 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task1', referenceTaskName: 'task1', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task2 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task2', referenceTaskName: 'task2', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task3 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task3', referenceTaskName: 'task3', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - - def taskList = [task1, task2, task3] - - when: 'add the tasks to the datastore' - List tasks = executionDAO.createTasks(taskList) - - then: - tasks != null - taskList == tasks - - when: 'read the tasks from the datastore' - def retTask1 = executionDAO.getTask(task1.taskId) - def retTask2 = executionDAO.getTask(task2.taskId) - def retTask3 = executionDAO.getTask(task3.taskId) - - then: - task1 == retTask1 - task2 == retTask2 - task3 == retTask3 - - when: 'lookup workflowId for the task' - def foundId1 = executionDAO.lookupWorkflowIdFromTaskId(task1.taskId) - def foundId2 = executionDAO.lookupWorkflowIdFromTaskId(task2.taskId) - def foundId3 = executionDAO.lookupWorkflowIdFromTaskId(task3.taskId) - - then: - foundId1 == workflowId - foundId2 == workflowId - foundId3 == workflowId - - when: 'check the metadata' - def workflowMetadata = executionDAO.getWorkflowMetadata(workflowId) - - then: - workflowMetadata.totalTasks == 3 - workflowMetadata.totalPartitions == 1 - - when: 'check the getTasks api' - def fetchedTasks = executionDAO.getTasks([task1.taskId, task2.taskId, task3.taskId]) - - then: - fetchedTasks != null && fetchedTasks.size() == 3 - - when: 'get the tasks for the workflow' - fetchedTasks = executionDAO.getTasksForWorkflow(workflowId) - - then: - fetchedTasks != null && fetchedTasks.size() == 3 - - when: 'read workflow with tasks' - WorkflowModel found = executionDAO.getWorkflow(workflowId, true) - - then: - found != null - workflow.workflowId == found.workflowId - found.tasks != null && found.tasks.size() == 3 - found.getTaskByRefName('task1') == task1 - found.getTaskByRefName('task2') == task2 - found.getTaskByRefName('task3') == task3 - } - - def "verify tasks are updated"() { - given: 'we create a workflow' - String workflowId = new IDGenerator().generate() - WorkflowDef workflowDef = new WorkflowDef(name: 'def1', version: 1) - WorkflowModel workflow = new WorkflowModel(workflowDefinition: workflowDef, workflowId: workflowId, input: new HashMap(), status: WorkflowModel.Status.RUNNING, createTime: System.currentTimeMillis()) - executionDAO.createWorkflow(workflow) - - and: 'create tasks for this workflow' - TaskModel task1 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task1', referenceTaskName: 'task1', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task2 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task2', referenceTaskName: 'task2', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task3 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task3', referenceTaskName: 'task3', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - - and: 'add the tasks to the datastore' - executionDAO.createTasks([task1, task2, task3]) - - and: 'change the status of those tasks' - task1.setStatus(TaskModel.Status.IN_PROGRESS) - task2.setStatus(TaskModel.Status.COMPLETED) - task3.setStatus(TaskModel.Status.FAILED) - - when: 'update the tasks' - executionDAO.updateTask(task1) - executionDAO.updateTask(task2) - executionDAO.updateTask(task3) - - then: - executionDAO.getTask(task1.taskId).status == TaskModel.Status.IN_PROGRESS - executionDAO.getTask(task2.taskId).status == TaskModel.Status.COMPLETED - executionDAO.getTask(task3.taskId).status == TaskModel.Status.FAILED - - when: 'get pending tasks for the workflow' - List pendingTasks = executionDAO.getPendingTasksByWorkflow(task1.getTaskType(), workflowId) - - then: - pendingTasks != null && pendingTasks.size() == 1 - pendingTasks[0] == task1 - } - - def "verify tasks are removed"() { - given: 'we create a workflow' - String workflowId = new IDGenerator().generate() - WorkflowDef workflowDef = new WorkflowDef(name: 'def1', version: 1) - WorkflowModel workflow = new WorkflowModel(workflowDefinition: workflowDef, workflowId: workflowId, input: new HashMap(), status: WorkflowModel.Status.RUNNING, createTime: System.currentTimeMillis()) - executionDAO.createWorkflow(workflow) - - and: 'create tasks for this workflow' - TaskModel task1 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task1', referenceTaskName: 'task1', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task2 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task2', referenceTaskName: 'task2', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - TaskModel task3 = new TaskModel(workflowInstanceId: workflowId, taskType: 'task3', referenceTaskName: 'task3', status: TaskModel.Status.SCHEDULED, taskId: new IDGenerator().generate()) - - and: 'add the tasks to the datastore' - executionDAO.createTasks([task1, task2, task3]) - - when: - boolean removed = executionDAO.removeTask(task3.getTaskId()) - - then: - removed - def workflowMetadata = executionDAO.getWorkflowMetadata(workflowId) - workflowMetadata.totalTasks == 2 - workflowMetadata.totalPartitions == 1 - - when: 'read workflow with tasks again' - def found = executionDAO.getWorkflow(workflowId) - - then: - found != null - found.workflowId == workflowId - found.tasks.size() == 2 - found.getTaskByRefName('task1') == task1 - found.getTaskByRefName('task2') == task2 - - and: 'read workflowId for the deleted task id' - executionDAO.lookupWorkflowIdFromTaskId(task3.taskId) == null - - and: 'try to read removed task' - executionDAO.getTask(task3.getTaskId()) == null - - when: 'remove the workflow' - removed = executionDAO.removeWorkflow(workflowId) - - then: 'check task_lookup table' - removed - executionDAO.lookupWorkflowIdFromTaskId(task1.taskId) == null - executionDAO.lookupWorkflowIdFromTaskId(task2.taskId) == null - } - - def "CRUD on task def limit"() { - given: - String taskDefName = "test_task_def" - String taskId = new IDGenerator().generate() - - TaskDef taskDef = new TaskDef(concurrentExecLimit: 1) - WorkflowTask workflowTask = new WorkflowTask(taskDefinition: taskDef) - workflowTask.setTaskDefinition(taskDef) - - TaskModel task = new TaskModel() - task.taskDefName = taskDefName - task.taskId = taskId - task.workflowInstanceId = new IDGenerator().generate() - task.setWorkflowTask(workflowTask) - task.setTaskType("test_task") - task.setWorkflowType("test_workflow") - task.setStatus(TaskModel.Status.SCHEDULED) - - TaskModel newTask = new TaskModel() - newTask.setTaskDefName(taskDefName) - newTask.setTaskId(new IDGenerator().generate()) - newTask.setWorkflowInstanceId(new IDGenerator().generate()) - newTask.setWorkflowTask(workflowTask) - newTask.setTaskType("test_task") - newTask.setWorkflowType("test_workflow") - newTask.setStatus(TaskModel.Status.SCHEDULED) - - when: // no tasks are IN_PROGRESS - executionDAO.addTaskToLimit(task) - - then: - !executionDAO.exceedsLimit(task) - - when: // set a task to IN_PROGRESS - task.setStatus(TaskModel.Status.IN_PROGRESS) - executionDAO.addTaskToLimit(task) - - then: // same task is checked - !executionDAO.exceedsLimit(task) - - and: // check if new task can be added - executionDAO.exceedsLimit(newTask) - - when: // set IN_PROGRESS task to COMPLETED - task.setStatus(TaskModel.Status.COMPLETED) - executionDAO.removeTaskFromLimit(task) - - then: // check new task again - !executionDAO.exceedsLimit(newTask) - - when: // set new task to IN_PROGRESS - newTask.setStatus(TaskModel.Status.IN_PROGRESS) - executionDAO.addTaskToLimit(newTask) - - then: // check new task again - !executionDAO.exceedsLimit(newTask) - } - - def "verify if invalid identifiers throw correct exceptions"() { - when: 'verify that a non-conforming uuid throws an exception' - executionDAO.getTask('invalid_id') - - then: - def ex = thrown(ApplicationException.class) - ex && ex.code == INVALID_INPUT - - when: 'verify that a non-conforming uuid throws an exception' - executionDAO.getWorkflow('invalid_id', true) - - then: - ex = thrown(ApplicationException.class) - ex && ex.code == INVALID_INPUT - - and: 'verify that a non-existing generated id returns null' - executionDAO.getTask(new IDGenerator().generate()) == null - executionDAO.getWorkflow(new IDGenerator().generate(), true) == null - } - - def "CRUD on event execution"() throws Exception { - given: - String event = "test-event" - String executionId1 = "id_1" - String messageId1 = "message1" - String eventHandler1 = "test_eh_1" - EventExecution eventExecution1 = getEventExecution(executionId1, messageId1, eventHandler1, event) - - when: // create event execution explicitly - executionDAO.addEventExecution(eventExecution1) - List eventExecutionList = executionDAO.getEventExecutions(eventHandler1, event, messageId1) - - then: // fetch executions - eventExecutionList != null && eventExecutionList.size() == 1 - eventExecutionList[0] == eventExecution1 - - when: // add a different execution for same message - String executionId2 = "id_2" - EventExecution eventExecution2 = getEventExecution(executionId2, messageId1, eventHandler1, event) - executionDAO.addEventExecution(eventExecution2) - eventExecutionList = executionDAO.getEventExecutions(eventHandler1, event, messageId1) - - then: // fetch executions - eventExecutionList != null && eventExecutionList.size() == 2 - eventExecutionList[0] == eventExecution1 - eventExecutionList[1] == eventExecution2 - - when: // update the second execution - eventExecution2.setStatus(COMPLETED) - executionDAO.updateEventExecution(eventExecution2) - eventExecutionList = executionDAO.getEventExecutions(eventHandler1, event, messageId1) - - then: // fetch executions - eventExecutionList != null && eventExecutionList.size() == 2 - eventExecutionList[0].status == IN_PROGRESS - eventExecutionList[1].status == COMPLETED - - when: // sleep for 5 seconds (TTL) - Thread.sleep(5000L) - eventExecutionList = executionDAO.getEventExecutions(eventHandler1, event, messageId1) - - then: - eventExecutionList != null && eventExecutionList.size() == 1 - - when: // delete event execution - executionDAO.removeEventExecution(eventExecution1) - eventExecutionList = executionDAO.getEventExecutions(eventHandler1, event, messageId1) - - then: - eventExecutionList != null && eventExecutionList.empty - } - - private static EventExecution getEventExecution(String id, String msgId, String name, String event) { - EventExecution eventExecution = new EventExecution(id, msgId); - eventExecution.setName(name); - eventExecution.setEvent(event); - eventExecution.setStatus(EventExecution.Status.IN_PROGRESS); - return eventExecution; - } -} diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraMetadataDAOSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraMetadataDAOSpec.groovy deleted file mode 100644 index afca61fa7..000000000 --- a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraMetadataDAOSpec.groovy +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao - -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.WorkflowDef - -import spock.lang.Subject - -class CassandraMetadataDAOSpec extends CassandraSpec { - - @Subject - CassandraMetadataDAO metadataDAO - - def setup() { - metadataDAO = new CassandraMetadataDAO(session, objectMapper, cassandraProperties, statements) - } - - def cleanup() { - - } - - def "CRUD on WorkflowDef"() throws Exception { - given: - String name = "workflow_def_1" - int version = 1 - - WorkflowDef workflowDef = new WorkflowDef() - workflowDef.setName(name) - workflowDef.setVersion(version) - workflowDef.setOwnerEmail("test@junit.com") - - when: 'create workflow definition' - metadataDAO.createWorkflowDef(workflowDef) - - then: // fetch the workflow definition - def defOptional = metadataDAO.getWorkflowDef(name, version) - defOptional.present - defOptional.get() == workflowDef - - and: // register a higher version - int higherVersion = 2 - workflowDef.setVersion(higherVersion) - workflowDef.setDescription("higher version") - - when: // register the higher version definition - metadataDAO.createWorkflowDef(workflowDef) - defOptional = metadataDAO.getWorkflowDef(name, higherVersion) - - then: // fetch the higher version - defOptional.present - defOptional.get() == workflowDef - - when: // fetch latest version - defOptional = metadataDAO.getLatestWorkflowDef(name) - - then: - defOptional && defOptional.present - defOptional.get() == workflowDef - - when: // modify the definition - workflowDef.setOwnerEmail("test@junit.com") - metadataDAO.updateWorkflowDef(workflowDef) - defOptional = metadataDAO.getWorkflowDef(name, higherVersion) - - then: // fetch the workflow definition - defOptional.present - defOptional.get() == workflowDef - - when: // delete workflow def - metadataDAO.removeWorkflowDef(name, higherVersion) - defOptional = metadataDAO.getWorkflowDef(name, higherVersion) - - then: - defOptional.empty - } - - def "CRUD on TaskDef"() { - given: - String task1Name = "task1" - String task2Name = "task2" - - when: // fetch all task defs - def taskDefList = metadataDAO.getAllTaskDefs() - - then: - taskDefList.empty - - when: // register a task definition - TaskDef taskDef = new TaskDef() - taskDef.setName(task1Name) - metadataDAO.createTaskDef(taskDef) - taskDefList = metadataDAO.getAllTaskDefs() - - then: // fetch all task defs - taskDefList && taskDefList.size() == 1 - - when: // fetch the task def - def returnTaskDef = metadataDAO.getTaskDef(task1Name) - - then: - returnTaskDef == taskDef - - when: // register another task definition - TaskDef taskDef1 = new TaskDef() - taskDef1.setName(task2Name) - metadataDAO.createTaskDef(taskDef1) - // fetch all task defs - taskDefList = metadataDAO.getAllTaskDefs() - - then: - taskDefList && taskDefList.size() == 2 - - when: // update task def - taskDef.setOwnerEmail("juni@test.com") - metadataDAO.updateTaskDef(taskDef) - returnTaskDef = metadataDAO.getTaskDef(task1Name) - - then: - returnTaskDef == taskDef - - when: // delete task def - metadataDAO.removeTaskDef(task2Name) - taskDefList = metadataDAO.getAllTaskDefs() - - then: - taskDefList && taskDefList.size() == 1 - // fetch deleted task def - metadataDAO.getTaskDef(task2Name) == null - } - - def "parse index string"() { - expect: - def pair = metadataDAO.getWorkflowNameAndVersion(nameVersionStr) - pair.left == workflowName - pair.right == version - - where: - nameVersionStr << ['name/1', 'namespace/name/3', '/namespace/name_with_lodash/2', 'name//4', 'name-with$%/895'] - workflowName << ['name', 'namespace/name', '/namespace/name_with_lodash', 'name/', 'name-with$%'] - version << [1, 3, 2, 4, 895] - } - - def "parse index string - incorrect values"() { - when: - metadataDAO.getWorkflowNameAndVersion("name_with_no_version") - - then: - def ex = thrown(IllegalStateException.class) - println(ex.message) - - when: - metadataDAO.getWorkflowNameAndVersion("name_with_no_version/") - - then: - ex = thrown(IllegalStateException.class) - println(ex.message) - - when: - metadataDAO.getWorkflowNameAndVersion("name/non_number_version") - - then: - ex = thrown(IllegalStateException.class) - println(ex.message) - } -} diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy deleted file mode 100644 index a5393210b..000000000 --- a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.dao - -import java.time.Duration - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.test.context.ContextConfiguration -import org.testcontainers.containers.CassandraContainer -import org.testcontainers.spock.Testcontainers - -import com.netflix.conductor.cassandra.config.CassandraProperties -import com.netflix.conductor.cassandra.util.Statements -import com.netflix.conductor.common.config.TestObjectMapperConfiguration - -import com.datastax.driver.core.ConsistencyLevel -import com.datastax.driver.core.Session -import com.fasterxml.jackson.databind.ObjectMapper -import groovy.transform.PackageScope -import spock.lang.Shared -import spock.lang.Specification - -@ContextConfiguration(classes = [TestObjectMapperConfiguration.class]) -@Testcontainers -@PackageScope -abstract class CassandraSpec extends Specification { - - @Shared - CassandraContainer cassandra = new CassandraContainer() - - @Shared - Session session - - @Autowired - ObjectMapper objectMapper - - CassandraProperties cassandraProperties - Statements statements - - def setupSpec() { - session = cassandra.cluster.newSession() - } - - def setup() { - String keyspaceName = "junit" - cassandraProperties = Mock(CassandraProperties.class) { - getKeyspace() >> keyspaceName - getReplicationStrategy() >> "SimpleStrategy" - getReplicationFactorKey() >> "replication_factor" - getReplicationFactorValue() >> 1 - getReadConsistencyLevel() >> ConsistencyLevel.LOCAL_ONE - getWriteConsistencyLevel() >> ConsistencyLevel.LOCAL_ONE - getTaskDefCacheRefreshInterval() >> Duration.ofSeconds(60) - getEventHandlerCacheRefreshInterval() >> Duration.ofSeconds(60) - getEventExecutionPersistenceTtl() >> Duration.ofSeconds(5) - } - - statements = new Statements(keyspaceName) - } -} diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy deleted file mode 100644 index f826a3620..000000000 --- a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.cassandra.util - -import spock.lang.Specification -import spock.lang.Subject - -class StatementsSpec extends Specification { - - @Subject - Statements subject - - def setup() { - subject = new Statements('test') - } - - def "verify statements"() { - when: - subject - - then: - with(subject) { - insertWorkflowDefStatement == "INSERT INTO test.workflow_definitions (workflow_def_name,version,workflow_definition) VALUES (?,?,?) IF NOT EXISTS;" - insertTaskDefStatement == "INSERT INTO test.task_definitions (task_defs,task_def_name,task_definition) VALUES ('task_defs',?,?);" - selectWorkflowDefStatement == "SELECT workflow_definition FROM test.workflow_definitions WHERE workflow_def_name=? AND version=?;" - selectAllWorkflowDefVersionsByNameStatement == "SELECT * FROM test.workflow_definitions WHERE workflow_def_name=?;" - selectAllWorkflowDefsStatement == "SELECT * FROM test.workflow_defs_index WHERE workflow_def_version_index=?;" - selectTaskDefStatement == "SELECT task_definition FROM test.task_definitions WHERE task_defs='task_defs' AND task_def_name=?;" - selectAllTaskDefsStatement == "SELECT * FROM test.task_definitions WHERE task_defs=?;" - updateWorkflowDefStatement == "UPDATE test.workflow_definitions SET workflow_definition=? WHERE workflow_def_name=? AND version=?;" - deleteWorkflowDefStatement == "DELETE FROM test.workflow_definitions WHERE workflow_def_name=? AND version=?;" - deleteWorkflowDefIndexStatement == "DELETE FROM test.workflow_defs_index WHERE workflow_def_version_index=? AND workflow_def_name_version=?;" - deleteTaskDefStatement == "DELETE FROM test.task_definitions WHERE task_defs='task_defs' AND task_def_name=?;" - insertWorkflowStatement == "INSERT INTO test.workflows (workflow_id,shard_id,task_id,entity,payload,total_tasks,total_partitions) VALUES (?,?,?,'workflow',?,?,?);" - insertTaskStatement == "INSERT INTO test.workflows (workflow_id,shard_id,task_id,entity,payload) VALUES (?,?,?,'task',?);" - insertEventExecutionStatement == "INSERT INTO test.event_executions (message_id,event_handler_name,event_execution_id,payload) VALUES (?,?,?,?) IF NOT EXISTS;" - selectTotalStatement == "SELECT total_tasks,total_partitions FROM test.workflows WHERE workflow_id=? AND shard_id=1;" - selectTaskStatement == "SELECT payload FROM test.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;" - selectWorkflowStatement == "SELECT payload FROM test.workflows WHERE workflow_id=? AND shard_id=1 AND entity='workflow';" - selectWorkflowWithTasksStatement == "SELECT * FROM test.workflows WHERE workflow_id=? AND shard_id=?;" - selectTaskFromLookupTableStatement == "SELECT workflow_id FROM test.task_lookup WHERE task_id=?;" - selectTasksFromTaskDefLimitStatement == "SELECT * FROM test.task_def_limit WHERE task_def_name=?;" - selectAllEventExecutionsForMessageFromEventExecutionsStatement == "SELECT * FROM test.event_executions WHERE message_id=? AND event_handler_name=?;" - updateWorkflowStatement == "UPDATE test.workflows SET payload=? WHERE workflow_id=? AND shard_id=1 AND entity='workflow' AND task_id='';" - updateTotalTasksStatement == "UPDATE test.workflows SET total_tasks=? WHERE workflow_id=? AND shard_id=?;" - updateTotalPartitionsStatement == "UPDATE test.workflows SET total_partitions=?,total_tasks=? WHERE workflow_id=? AND shard_id=1;" - updateTaskLookupStatement == "UPDATE test.task_lookup SET workflow_id=? WHERE task_id=?;" - updateTaskDefLimitStatement == "UPDATE test.task_def_limit SET workflow_id=? WHERE task_def_name=? AND task_id=?;" - updateEventExecutionStatement == "UPDATE test.event_executions USING TTL ? SET payload=? WHERE message_id=? AND event_handler_name=? AND event_execution_id=?;" - deleteWorkflowStatement == "DELETE FROM test.workflows WHERE workflow_id=? AND shard_id=?;" - deleteTaskLookupStatement == "DELETE FROM test.task_lookup WHERE task_id=?;" - deleteTaskStatement == "DELETE FROM test.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;" - deleteTaskDefLimitStatement == "DELETE FROM test.task_def_limit WHERE task_def_name=? AND task_id=?;" - deleteEventExecutionsStatement == "DELETE FROM test.event_executions WHERE message_id=? AND event_handler_name=? AND event_execution_id=?;" - insertEventHandlerStatement == "INSERT INTO test.event_handlers (handlers,event_handler_name,event_handler) VALUES ('handlers',?,?);" - selectAllEventHandlersStatement == "SELECT * FROM test.event_handlers WHERE handlers=?;" - deleteEventHandlerStatement == "DELETE FROM test.event_handlers WHERE handlers='handlers' AND event_handler_name=?;" - } - } -} diff --git a/client-spring/build.gradle b/client-spring/build.gradle deleted file mode 100644 index f975f701c..000000000 --- a/client-spring/build.gradle +++ /dev/null @@ -1,9 +0,0 @@ - -dependencies { - - implementation project(':conductor-common') - api project(':conductor-client') - - implementation "com.netflix.eureka:eureka-client:${revEurekaClient}" - implementation 'org.springframework.boot:spring-boot-starter' -} diff --git a/client-spring/dependencies.lock b/client-spring/dependencies.lock deleted file mode 100644 index b04d70e85..000000000 --- a/client-spring/dependencies.lock +++ /dev/null @@ -1,361 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "project": true - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.7" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.7.36" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "testCompileClasspath": { - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "project": true - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.7" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.7.36" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/client-spring/src/main/java/com/netflix/conductor/client/spring/ClientProperties.java b/client-spring/src/main/java/com/netflix/conductor/client/spring/ClientProperties.java deleted file mode 100644 index 55c82e576..000000000 --- a/client-spring/src/main/java/com/netflix/conductor/client/spring/ClientProperties.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.spring; - -import java.time.Duration; -import java.util.HashMap; -import java.util.Map; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -@ConfigurationProperties("conductor.client") -public class ClientProperties { - - private String rootUri; - - private String workerNamePrefix = "workflow-worker-%d"; - - private int threadCount = 1; - - private Duration sleepWhenRetryDuration = Duration.ofMillis(500); - - private int updateRetryCount = 3; - - private Map taskToDomain = new HashMap<>(); - - private Map taskThreadCount = new HashMap<>(); - - private int shutdownGracePeriodSeconds = 10; - - public String getRootUri() { - return rootUri; - } - - public void setRootUri(String rootUri) { - this.rootUri = rootUri; - } - - public String getWorkerNamePrefix() { - return workerNamePrefix; - } - - public void setWorkerNamePrefix(String workerNamePrefix) { - this.workerNamePrefix = workerNamePrefix; - } - - public int getThreadCount() { - return threadCount; - } - - public void setThreadCount(int threadCount) { - this.threadCount = threadCount; - } - - public Duration getSleepWhenRetryDuration() { - return sleepWhenRetryDuration; - } - - public void setSleepWhenRetryDuration(Duration sleepWhenRetryDuration) { - this.sleepWhenRetryDuration = sleepWhenRetryDuration; - } - - public int getUpdateRetryCount() { - return updateRetryCount; - } - - public void setUpdateRetryCount(int updateRetryCount) { - this.updateRetryCount = updateRetryCount; - } - - public Map getTaskToDomain() { - return taskToDomain; - } - - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - - public int getShutdownGracePeriodSeconds() { - return shutdownGracePeriodSeconds; - } - - public void setShutdownGracePeriodSeconds(int shutdownGracePeriodSeconds) { - this.shutdownGracePeriodSeconds = shutdownGracePeriodSeconds; - } - - public Map getTaskThreadCount() { - return taskThreadCount; - } - - public void setTaskThreadCount(Map taskThreadCount) { - this.taskThreadCount = taskThreadCount; - } -} diff --git a/client-spring/src/main/java/com/netflix/conductor/client/spring/ConductorClientAutoConfiguration.java b/client-spring/src/main/java/com/netflix/conductor/client/spring/ConductorClientAutoConfiguration.java deleted file mode 100644 index c70aeb4c1..000000000 --- a/client-spring/src/main/java/com/netflix/conductor/client/spring/ConductorClientAutoConfiguration.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.spring; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.client.automator.TaskRunnerConfigurer; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.discovery.EurekaClient; - -@Configuration(proxyBeanMethods = false) -@EnableConfigurationProperties(ClientProperties.class) -public class ConductorClientAutoConfiguration { - - @Autowired(required = false) - private EurekaClient eurekaClient; - - @Autowired(required = false) - private List workers = new ArrayList<>(); - - @ConditionalOnMissingBean - @Bean - public TaskClient taskClient(ClientProperties clientProperties) { - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI(clientProperties.getRootUri()); - return taskClient; - } - - @ConditionalOnMissingBean - @Bean(initMethod = "init", destroyMethod = "shutdown") - public TaskRunnerConfigurer taskRunnerConfigurer( - TaskClient taskClient, ClientProperties clientProperties) { - return new TaskRunnerConfigurer.Builder(taskClient, workers) - .withTaskThreadCount(clientProperties.getTaskThreadCount()) - .withThreadCount(clientProperties.getThreadCount()) - .withSleepWhenRetry((int) clientProperties.getSleepWhenRetryDuration().toMillis()) - .withUpdateRetryCount(clientProperties.getUpdateRetryCount()) - .withTaskToDomain(clientProperties.getTaskToDomain()) - .withShutdownGracePeriodSeconds(clientProperties.getShutdownGracePeriodSeconds()) - .withEurekaClient(eurekaClient) - .build(); - } -} diff --git a/client-spring/src/main/resources/META-INF/spring.factories b/client-spring/src/main/resources/META-INF/spring.factories deleted file mode 100644 index 329c69abd..000000000 --- a/client-spring/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,2 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ - com.netflix.conductor.client.spring.ConductorClientAutoConfiguration diff --git a/client-spring/src/test/java/com/netflix/conductor/client/spring/ExampleClient.java b/client-spring/src/test/java/com/netflix/conductor/client/spring/ExampleClient.java deleted file mode 100644 index 772c04dc0..000000000 --- a/client-spring/src/test/java/com/netflix/conductor/client/spring/ExampleClient.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.spring; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.context.annotation.Bean; - -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; - -@SpringBootApplication -public class ExampleClient { - - public static void main(String[] args) { - - SpringApplication.run(ExampleClient.class, args); - } - - @Bean - public Worker worker() { - return new Worker() { - @Override - public String getTaskDefName() { - return "taskDef"; - } - - @Override - public TaskResult execute(Task task) { - return new TaskResult(task); - } - }; - } -} diff --git a/client/build.gradle b/client/build.gradle deleted file mode 100644 index 477fea1f3..000000000 --- a/client/build.gradle +++ /dev/null @@ -1,43 +0,0 @@ -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" - } - } - dependencies { - classpath "gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:4.7.5" - } -} - -apply plugin: 'groovy' - -configurations.all { - exclude group: 'amazon', module: 'aws-java-sdk' -} - -dependencies { - implementation project(':conductor-common') - - implementation "com.sun.jersey:jersey-client:${revJersey}" - - implementation "com.netflix.spectator:spectator-api:${revSpectator}" - implementation ("com.netflix.eureka:eureka-client:${revEurekaClient}") { - exclude group: 'com.google.guava', module: 'guava' - } - implementation "com.amazonaws:aws-java-sdk-core:${revAwsSdk}" - - implementation "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider" - implementation "com.fasterxml.jackson.datatype:jackson-datatype-jsr310" - - implementation "org.apache.commons:commons-lang3" - implementation "commons-io:commons-io:${revCommonsIo}" - - implementation "org.slf4j:slf4j-api" - - testImplementation "org.powermock:powermock-module-junit4:${revPowerMock}" - testImplementation "org.powermock:powermock-api-mockito2:${revPowerMock}" - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" -} diff --git a/client/dependencies.lock b/client/dependencies.lock deleted file mode 100644 index 52da1b208..000000000 --- a/client/dependencies.lock +++ /dev/null @@ -1,349 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "locked": "2.13.2" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "locked": "2.13.2" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - } - }, - "testCompileClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "locked": "2.13.2" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.powermock:powermock-api-mockito2": { - "locked": "2.0.9" - }, - "org.powermock:powermock-module-junit4": { - "locked": "2.0.9" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "locked": "2.13.2" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.eureka:eureka-client": { - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.powermock:powermock-api-mockito2": { - "locked": "2.0.9" - }, - "org.powermock:powermock-module-junit4": { - "locked": "2.0.9" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/client/spotbugsExclude.xml b/client/spotbugsExclude.xml deleted file mode 100644 index 29b642ca7..000000000 --- a/client/spotbugsExclude.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - diff --git a/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java b/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java deleted file mode 100644 index c57e3ec5a..000000000 --- a/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.util.concurrent.Semaphore; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A class wrapping a semaphore which holds the number of permits available for polling and - * executing tasks. - */ -class PollingSemaphore { - - private static final Logger LOGGER = LoggerFactory.getLogger(PollingSemaphore.class); - private final Semaphore semaphore; - - PollingSemaphore(int numSlots) { - LOGGER.debug("Polling semaphore initialized with {} permits", numSlots); - semaphore = new Semaphore(numSlots); - } - - /** - * Signals if polling is allowed based on whether a permit can be acquired. - * - * @return {@code true} - if permit is acquired {@code false} - if permit could not be acquired - */ - boolean canPoll() { - boolean acquired = semaphore.tryAcquire(); - LOGGER.debug("Trying to acquire permit: {}", acquired); - return acquired; - } - - /** Signals that processing is complete and the permit can be released. */ - void complete() { - LOGGER.debug("Completed execution; releasing permit"); - semaphore.release(); - } - - /** - * Gets the number of threads available for processing. - * - * @return number of available permits - */ - int availableThreads() { - int available = semaphore.availablePermits(); - LOGGER.debug("Number of available permits: {}", available); - return available; - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java b/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java deleted file mode 100644 index d2299826f..000000000 --- a/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.*; -import java.util.function.Function; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.apache.commons.lang3.time.StopWatch; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.appinfo.InstanceInfo.InstanceStatus; -import com.netflix.conductor.client.config.PropertyFactory; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.telemetry.MetricsContainer; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.discovery.EurekaClient; -import com.netflix.spectator.api.Registry; -import com.netflix.spectator.api.Spectator; -import com.netflix.spectator.api.patterns.ThreadPoolMonitor; - -/** - * Manages the threadpool used by the workers for execution and server communication (polling and - * task update). - */ -class TaskPollExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskPollExecutor.class); - - private static final Registry REGISTRY = Spectator.globalRegistry(); - - private final EurekaClient eurekaClient; - private final TaskClient taskClient; - private final int updateRetryCount; - private final ExecutorService executorService; - private final Map pollingSemaphoreMap; - private final Map taskToDomain; - - private static final String DOMAIN = "domain"; - private static final String OVERRIDE_DISCOVERY = "pollOutOfDiscovery"; - private static final String ALL_WORKERS = "all"; - - TaskPollExecutor( - EurekaClient eurekaClient, - TaskClient taskClient, - int threadCount, - int updateRetryCount, - Map taskToDomain, - String workerNamePrefix, - Map taskThreadCount) { - this.eurekaClient = eurekaClient; - this.taskClient = taskClient; - this.updateRetryCount = updateRetryCount; - this.taskToDomain = taskToDomain; - - this.pollingSemaphoreMap = new HashMap<>(); - int totalThreadCount = 0; - if (!taskThreadCount.isEmpty()) { - for (Map.Entry entry : taskThreadCount.entrySet()) { - String taskType = entry.getKey(); - int count = entry.getValue(); - totalThreadCount += count; - pollingSemaphoreMap.put(taskType, new PollingSemaphore(count)); - } - } else { - totalThreadCount = threadCount; - // shared poll for all workers - pollingSemaphoreMap.put(ALL_WORKERS, new PollingSemaphore(threadCount)); - } - - LOGGER.info("Initialized the TaskPollExecutor with {} threads", totalThreadCount); - this.executorService = - Executors.newFixedThreadPool( - totalThreadCount, - new BasicThreadFactory.Builder() - .namingPattern(workerNamePrefix) - .uncaughtExceptionHandler(uncaughtExceptionHandler) - .build()); - ThreadPoolMonitor.attach(REGISTRY, (ThreadPoolExecutor) executorService, workerNamePrefix); - } - - void pollAndExecute(Worker worker) { - Boolean discoveryOverride = - Optional.ofNullable( - PropertyFactory.getBoolean( - worker.getTaskDefName(), OVERRIDE_DISCOVERY, null)) - .orElseGet( - () -> - PropertyFactory.getBoolean( - ALL_WORKERS, OVERRIDE_DISCOVERY, false)); - - if (eurekaClient != null - && !eurekaClient.getInstanceRemoteStatus().equals(InstanceStatus.UP) - && !discoveryOverride) { - LOGGER.debug("Instance is NOT UP in discovery - will not poll"); - return; - } - - if (worker.paused()) { - MetricsContainer.incrementTaskPausedCount(worker.getTaskDefName()); - LOGGER.debug("Worker {} has been paused. Not polling anymore!", worker.getClass()); - return; - } - - String taskType = worker.getTaskDefName(); - PollingSemaphore pollingSemaphore = getPollingSemaphore(taskType); - - Task task; - try { - if (!pollingSemaphore.canPoll()) { - return; - } - - String domain = - Optional.ofNullable(PropertyFactory.getString(taskType, DOMAIN, null)) - .orElseGet( - () -> - Optional.ofNullable( - PropertyFactory.getString( - ALL_WORKERS, DOMAIN, null)) - .orElse(taskToDomain.get(taskType))); - - LOGGER.debug("Polling task of type: {} in domain: '{}'", taskType, domain); - task = - MetricsContainer.getPollTimer(taskType) - .record( - () -> - taskClient.pollTask( - taskType, worker.getIdentity(), domain)); - - if (Objects.nonNull(task) && StringUtils.isNotBlank(task.getTaskId())) { - MetricsContainer.incrementTaskPollCount(taskType, 1); - LOGGER.debug( - "Polled task: {} of type: {} in domain: '{}', from worker: {}", - task.getTaskId(), - taskType, - domain, - worker.getIdentity()); - - CompletableFuture taskCompletableFuture = - CompletableFuture.supplyAsync( - () -> processTask(task, worker, pollingSemaphore), executorService); - - taskCompletableFuture.whenComplete(this::finalizeTask); - } else { - // no task was returned in the poll, release the permit - pollingSemaphore.complete(); - } - } catch (Exception e) { - // release the permit if exception is thrown during polling, because the thread would - // not be busy - pollingSemaphore.complete(); - MetricsContainer.incrementTaskPollErrorCount(worker.getTaskDefName(), e); - LOGGER.error("Error when polling for tasks", e); - } - } - - void shutdownExecutorService(ExecutorService executorService, int timeout) { - try { - executorService.shutdown(); - if (executorService.awaitTermination(timeout, TimeUnit.SECONDS)) { - LOGGER.debug("tasks completed, shutting down"); - } else { - LOGGER.warn(String.format("forcing shutdown after waiting for %s second", timeout)); - executorService.shutdownNow(); - } - } catch (InterruptedException ie) { - LOGGER.warn("shutdown interrupted, invoking shutdownNow"); - executorService.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - @SuppressWarnings("FieldCanBeLocal") - private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler = - (thread, error) -> { - // JVM may be in unstable state, try to send metrics then exit - MetricsContainer.incrementUncaughtExceptionCount(); - LOGGER.error("Uncaught exception. Thread {} will exit now", thread, error); - }; - - private Task processTask(Task task, Worker worker, PollingSemaphore pollingSemaphore) { - LOGGER.debug( - "Executing task: {} of type: {} in worker: {} at {}", - task.getTaskId(), - task.getTaskDefName(), - worker.getClass().getSimpleName(), - worker.getIdentity()); - try { - executeTask(worker, task); - } catch (Throwable t) { - task.setStatus(Task.Status.FAILED); - TaskResult result = new TaskResult(task); - handleException(t, result, worker, task); - } finally { - pollingSemaphore.complete(); - } - return task; - } - - private void executeTask(Worker worker, Task task) { - StopWatch stopwatch = new StopWatch(); - stopwatch.start(); - TaskResult result = null; - try { - LOGGER.debug( - "Executing task: {} in worker: {} at {}", - task.getTaskId(), - worker.getClass().getSimpleName(), - worker.getIdentity()); - result = worker.execute(task); - result.setWorkflowInstanceId(task.getWorkflowInstanceId()); - result.setTaskId(task.getTaskId()); - result.setWorkerId(worker.getIdentity()); - } catch (Exception e) { - LOGGER.error( - "Unable to execute task: {} of type: {}", - task.getTaskId(), - task.getTaskDefName(), - e); - if (result == null) { - task.setStatus(Task.Status.FAILED); - result = new TaskResult(task); - } - handleException(e, result, worker, task); - } finally { - stopwatch.stop(); - MetricsContainer.getExecutionTimer(worker.getTaskDefName()) - .record(stopwatch.getTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); - } - - LOGGER.debug( - "Task: {} executed by worker: {} at {} with status: {}", - task.getTaskId(), - worker.getClass().getSimpleName(), - worker.getIdentity(), - result.getStatus()); - updateTaskResult(updateRetryCount, task, result, worker); - } - - private void finalizeTask(Task task, Throwable throwable) { - if (throwable != null) { - LOGGER.error( - "Error processing task: {} of type: {}", - task.getTaskId(), - task.getTaskType(), - throwable); - MetricsContainer.incrementTaskExecutionErrorCount(task.getTaskType(), throwable); - } else { - LOGGER.debug( - "Task:{} of type:{} finished processing with status:{}", - task.getTaskId(), - task.getTaskDefName(), - task.getStatus()); - } - } - - private void updateTaskResult(int count, Task task, TaskResult result, Worker worker) { - try { - // upload if necessary - Optional optionalExternalStorageLocation = - retryOperation( - (TaskResult taskResult) -> upload(taskResult, task.getTaskType()), - count, - result, - "evaluateAndUploadLargePayload"); - - if (optionalExternalStorageLocation.isPresent()) { - result.setExternalOutputPayloadStoragePath(optionalExternalStorageLocation.get()); - result.setOutputData(null); - } - - retryOperation( - (TaskResult taskResult) -> { - taskClient.updateTask(taskResult); - return null; - }, - count, - result, - "updateTask"); - } catch (Exception e) { - worker.onErrorUpdate(task); - MetricsContainer.incrementTaskUpdateErrorCount(worker.getTaskDefName(), e); - LOGGER.error( - String.format( - "Failed to update result: %s for task: %s in worker: %s", - result.toString(), task.getTaskDefName(), worker.getIdentity()), - e); - } - } - - private Optional upload(TaskResult result, String taskType) { - try { - return taskClient.evaluateAndUploadLargePayload(result.getOutputData(), taskType); - } catch (IllegalArgumentException iae) { - result.setReasonForIncompletion(iae.getMessage()); - result.setOutputData(null); - result.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); - return Optional.empty(); - } - } - - private R retryOperation(Function operation, int count, T input, String opName) { - int index = 0; - while (index < count) { - try { - return operation.apply(input); - } catch (Exception e) { - index++; - try { - Thread.sleep(500L); - } catch (InterruptedException ie) { - LOGGER.error("Retry interrupted", ie); - } - } - } - throw new RuntimeException("Exhausted retries performing " + opName); - } - - private void handleException(Throwable t, TaskResult result, Worker worker, Task task) { - LOGGER.error(String.format("Error while executing task %s", task.toString()), t); - MetricsContainer.incrementTaskExecutionErrorCount(worker.getTaskDefName(), t); - result.setStatus(TaskResult.Status.FAILED); - result.setReasonForIncompletion("Error while executing the task: " + t); - - StringWriter stringWriter = new StringWriter(); - t.printStackTrace(new PrintWriter(stringWriter)); - result.log(stringWriter.toString()); - - updateTaskResult(updateRetryCount, task, result, worker); - } - - private PollingSemaphore getPollingSemaphore(String taskType) { - if (pollingSemaphoreMap.containsKey(taskType)) { - return pollingSemaphoreMap.get(taskType); - } else { - return pollingSemaphoreMap.get(ALL_WORKERS); - } - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/automator/TaskRunnerConfigurer.java b/client/src/main/java/com/netflix/conductor/client/automator/TaskRunnerConfigurer.java deleted file mode 100644 index aa98ec635..000000000 --- a/client/src/main/java/com/netflix/conductor/client/automator/TaskRunnerConfigurer.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.Validate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.discovery.EurekaClient; - -/** Configures automated polling of tasks and execution via the registered {@link Worker}s. */ -public class TaskRunnerConfigurer { - private static final Logger LOGGER = LoggerFactory.getLogger(TaskRunnerConfigurer.class); - private static final String INVALID_THREAD_COUNT = - "Invalid worker thread count specified, use either shared thread pool or config thread count per task"; - private static final String MISSING_TASK_THREAD_COUNT = - "Missing task thread count config for %s"; - - private ScheduledExecutorService scheduledExecutorService; - - private final EurekaClient eurekaClient; - private final TaskClient taskClient; - private final List workers = new LinkedList<>(); - private final int sleepWhenRetry; - private final int updateRetryCount; - private final int threadCount; - private final int shutdownGracePeriodSeconds; - private final String workerNamePrefix; - private final Map taskToDomain; - private final Map taskThreadCount; - - private TaskPollExecutor taskPollExecutor; - - /** - * @see TaskRunnerConfigurer.Builder - * @see TaskRunnerConfigurer#init() - */ - private TaskRunnerConfigurer(Builder builder) { - // only allow either shared thread pool or per task thread pool - if (builder.threadCount != -1 && !builder.taskThreadCount.isEmpty()) { - LOGGER.error(INVALID_THREAD_COUNT); - throw new ConductorClientException(INVALID_THREAD_COUNT); - } else if (!builder.taskThreadCount.isEmpty()) { - for (Worker worker : builder.workers) { - if (!builder.taskThreadCount.containsKey(worker.getTaskDefName())) { - String message = - String.format(MISSING_TASK_THREAD_COUNT, worker.getTaskDefName()); - LOGGER.error(message); - throw new ConductorClientException(message); - } - workers.add(worker); - } - this.taskThreadCount = builder.taskThreadCount; - this.threadCount = -1; - } else { - builder.workers.forEach(workers::add); - this.taskThreadCount = builder.taskThreadCount; - this.threadCount = (builder.threadCount == -1) ? workers.size() : builder.threadCount; - } - - this.eurekaClient = builder.eurekaClient; - this.taskClient = builder.taskClient; - this.sleepWhenRetry = builder.sleepWhenRetry; - this.updateRetryCount = builder.updateRetryCount; - this.workerNamePrefix = builder.workerNamePrefix; - this.taskToDomain = builder.taskToDomain; - this.shutdownGracePeriodSeconds = builder.shutdownGracePeriodSeconds; - } - - /** Builder used to create the instances of TaskRunnerConfigurer */ - public static class Builder { - - private String workerNamePrefix = "workflow-worker-%d"; - private int sleepWhenRetry = 500; - private int updateRetryCount = 3; - private int threadCount = -1; - private int shutdownGracePeriodSeconds = 10; - private final Iterable workers; - private EurekaClient eurekaClient; - private final TaskClient taskClient; - private Map taskToDomain = new HashMap<>(); - private Map taskThreadCount = new HashMap<>(); - - public Builder(TaskClient taskClient, Iterable workers) { - Validate.notNull(taskClient, "TaskClient cannot be null"); - Validate.notNull(workers, "Workers cannot be null"); - this.taskClient = taskClient; - this.workers = workers; - } - - /** - * @param workerNamePrefix prefix to be used for worker names, defaults to workflow-worker- - * if not supplied. - * @return Returns the current instance. - */ - public Builder withWorkerNamePrefix(String workerNamePrefix) { - this.workerNamePrefix = workerNamePrefix; - return this; - } - - /** - * @param sleepWhenRetry time in milliseconds, for which the thread should sleep when task - * update call fails, before retrying the operation. - * @return Returns the current instance. - */ - public Builder withSleepWhenRetry(int sleepWhenRetry) { - this.sleepWhenRetry = sleepWhenRetry; - return this; - } - - /** - * @param updateRetryCount number of times to retry the failed updateTask operation - * @return Builder instance - * @see #withSleepWhenRetry(int) - */ - public Builder withUpdateRetryCount(int updateRetryCount) { - this.updateRetryCount = updateRetryCount; - return this; - } - - /** - * @param threadCount # of threads assigned to the workers. Should be at-least the size of - * taskWorkers to avoid starvation in a busy system. - * @return Builder instance - */ - public Builder withThreadCount(int threadCount) { - if (threadCount < 1) { - throw new IllegalArgumentException("No. of threads cannot be less than 1"); - } - this.threadCount = threadCount; - return this; - } - - /** - * @param shutdownGracePeriodSeconds waiting seconds before forcing shutdown of your worker - * @return Builder instance - */ - public Builder withShutdownGracePeriodSeconds(int shutdownGracePeriodSeconds) { - if (shutdownGracePeriodSeconds < 1) { - throw new IllegalArgumentException( - "Seconds of shutdownGracePeriod cannot be less than 1"); - } - this.shutdownGracePeriodSeconds = shutdownGracePeriodSeconds; - return this; - } - - /** - * @param eurekaClient Eureka client - used to identify if the server is in discovery or - * not. When the server goes out of discovery, the polling is terminated. If passed - * null, discovery check is not done. - * @return Builder instance - */ - public Builder withEurekaClient(EurekaClient eurekaClient) { - this.eurekaClient = eurekaClient; - return this; - } - - public Builder withTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - return this; - } - - public Builder withTaskThreadCount(Map taskThreadCount) { - this.taskThreadCount = taskThreadCount; - return this; - } - - /** - * Builds an instance of the TaskRunnerConfigurer. - * - *

Please see {@link TaskRunnerConfigurer#init()} method. The method must be called after - * this constructor for the polling to start. - */ - public TaskRunnerConfigurer build() { - return new TaskRunnerConfigurer(this); - } - } - - /** - * @return Thread Count for the shared executor pool - */ - public int getThreadCount() { - return threadCount; - } - - /** - * @return Thread Count for individual task type - */ - public Map getTaskThreadCount() { - return taskThreadCount; - } - - /** - * @return seconds before forcing shutdown of worker - */ - public int getShutdownGracePeriodSeconds() { - return shutdownGracePeriodSeconds; - } - - /** - * @return sleep time in millisecond before task update retry is done when receiving error from - * the Conductor server - */ - public int getSleepWhenRetry() { - return sleepWhenRetry; - } - - /** - * @return Number of times updateTask should be retried when receiving error from Conductor - * server - */ - public int getUpdateRetryCount() { - return updateRetryCount; - } - - /** - * @return prefix used for worker names - */ - public String getWorkerNamePrefix() { - return workerNamePrefix; - } - - /** - * Starts the polling. Must be called after {@link TaskRunnerConfigurer.Builder#build()} method. - */ - public synchronized void init() { - this.taskPollExecutor = - new TaskPollExecutor( - eurekaClient, - taskClient, - threadCount, - updateRetryCount, - taskToDomain, - workerNamePrefix, - taskThreadCount); - - this.scheduledExecutorService = Executors.newScheduledThreadPool(workers.size()); - workers.forEach( - worker -> - scheduledExecutorService.scheduleWithFixedDelay( - () -> taskPollExecutor.pollAndExecute(worker), - worker.getPollingInterval(), - worker.getPollingInterval(), - TimeUnit.MILLISECONDS)); - } - - /** - * Invoke this method within a PreDestroy block within your application to facilitate a graceful - * shutdown of your worker, during process termination. - */ - public void shutdown() { - taskPollExecutor.shutdownExecutorService( - scheduledExecutorService, shutdownGracePeriodSeconds); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/config/ConductorClientConfiguration.java b/client/src/main/java/com/netflix/conductor/client/config/ConductorClientConfiguration.java deleted file mode 100644 index 6c3029fa1..000000000 --- a/client/src/main/java/com/netflix/conductor/client/config/ConductorClientConfiguration.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.config; - -public interface ConductorClientConfiguration { - - /** - * @return the workflow input payload size threshold in KB, beyond which the payload will be - * processed based on {@link - * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}. - */ - int getWorkflowInputPayloadThresholdKB(); - - /** - * @return the max value of workflow input payload size threshold in KB, beyond which the - * payload will be rejected regardless external payload storage is enabled. - */ - int getWorkflowInputMaxPayloadThresholdKB(); - - /** - * @return the task output payload size threshold in KB, beyond which the payload will be - * processed based on {@link - * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}. - */ - int getTaskOutputPayloadThresholdKB(); - - /** - * @return the max value of task output payload size threshold in KB, beyond which the payload - * will be rejected regardless external payload storage is enabled. - */ - int getTaskOutputMaxPayloadThresholdKB(); - - /** - * @return the flag which controls the use of external storage for storing workflow/task input - * and output JSON payloads with size greater than threshold. If it is set to true, the - * payload is stored in external location. If it is set to false, the payload is rejected - * and the task/workflow execution fails. - */ - boolean isExternalPayloadStorageEnabled(); -} diff --git a/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java b/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java deleted file mode 100644 index f15cf3bab..000000000 --- a/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.config; - -/** - * A default implementation of {@link ConductorClientConfiguration} where external payload storage - * is disabled. - */ -public class DefaultConductorClientConfiguration implements ConductorClientConfiguration { - - @Override - public int getWorkflowInputPayloadThresholdKB() { - return 5120; - } - - @Override - public int getWorkflowInputMaxPayloadThresholdKB() { - return 10240; - } - - @Override - public int getTaskOutputPayloadThresholdKB() { - return 3072; - } - - @Override - public int getTaskOutputMaxPayloadThresholdKB() { - return 10240; - } - - @Override - public boolean isExternalPayloadStorageEnabled() { - return false; - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java b/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java deleted file mode 100644 index 443b85481..000000000 --- a/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.config; - -import java.util.concurrent.ConcurrentHashMap; - -import com.netflix.config.DynamicProperty; - -/** Used to configure the Conductor workers using properties. */ -public class PropertyFactory { - - private final DynamicProperty global; - private final DynamicProperty local; - - private static final String PROPERTY_PREFIX = "conductor.worker"; - - private static final ConcurrentHashMap PROPERTY_FACTORY_MAP = - new ConcurrentHashMap<>(); - - private PropertyFactory(String prefix, String propName, String workerName) { - this.global = DynamicProperty.getInstance(prefix + "." + propName); - this.local = DynamicProperty.getInstance(prefix + "." + workerName + "." + propName); - } - - /** - * @param defaultValue Default Value - * @return Returns the value as integer. If not value is set (either global or worker specific), - * then returns the default value. - */ - public Integer getInteger(int defaultValue) { - Integer value = local.getInteger(); - if (value == null) { - value = global.getInteger(defaultValue); - } - return value; - } - - /** - * @param defaultValue Default Value - * @return Returns the value as String. If not value is set (either global or worker specific), - * then returns the default value. - */ - public String getString(String defaultValue) { - String value = local.getString(); - if (value == null) { - value = global.getString(defaultValue); - } - return value; - } - - /** - * @param defaultValue Default Value - * @return Returns the value as Boolean. If not value is set (either global or worker specific), - * then returns the default value. - */ - public Boolean getBoolean(Boolean defaultValue) { - Boolean value = local.getBoolean(); - if (value == null) { - value = global.getBoolean(defaultValue); - } - return value; - } - - public static Integer getInteger(String workerName, String property, Integer defaultValue) { - return getPropertyFactory(workerName, property).getInteger(defaultValue); - } - - public static Boolean getBoolean(String workerName, String property, Boolean defaultValue) { - return getPropertyFactory(workerName, property).getBoolean(defaultValue); - } - - public static String getString(String workerName, String property, String defaultValue) { - return getPropertyFactory(workerName, property).getString(defaultValue); - } - - private static PropertyFactory getPropertyFactory(String workerName, String property) { - String key = property + "." + workerName; - return PROPERTY_FACTORY_MAP.computeIfAbsent( - key, t -> new PropertyFactory(PROPERTY_PREFIX, property, workerName)); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/exception/ConductorClientException.java b/client/src/main/java/com/netflix/conductor/client/exception/ConductorClientException.java deleted file mode 100644 index 5f3c79c00..000000000 --- a/client/src/main/java/com/netflix/conductor/client/exception/ConductorClientException.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.exception; - -import java.util.List; - -import com.netflix.conductor.common.validation.ErrorResponse; -import com.netflix.conductor.common.validation.ValidationError; - -/** Client exception thrown from Conductor api clients. */ -public class ConductorClientException extends RuntimeException { - - private int status; - private String message; - private String instance; - private String code; - private boolean retryable; - - public List getValidationErrors() { - return validationErrors; - } - - public void setValidationErrors(List validationErrors) { - this.validationErrors = validationErrors; - } - - private List validationErrors; - - public ConductorClientException() { - super(); - } - - public ConductorClientException(String message) { - super(message); - this.message = message; - } - - public ConductorClientException(String message, Throwable cause) { - super(message, cause); - this.message = message; - } - - public ConductorClientException(int status, String message) { - super(message); - this.status = status; - this.message = message; - } - - public ConductorClientException(int status, ErrorResponse errorResponse) { - super(errorResponse.getMessage()); - this.status = status; - this.retryable = errorResponse.isRetryable(); - this.message = errorResponse.getMessage(); - this.code = errorResponse.getCode(); - this.instance = errorResponse.getInstance(); - this.validationErrors = errorResponse.getValidationErrors(); - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - - builder.append(getClass().getName()).append(": "); - - if (this.message != null) { - builder.append(message); - } - - if (status > 0) { - builder.append(" {status=").append(status); - if (this.code != null) { - builder.append(", code='").append(code).append("'"); - } - - builder.append(", retryable: ").append(retryable); - } - - if (this.instance != null) { - builder.append(", instance: ").append(instance); - } - - if (this.validationErrors != null) { - builder.append(", validationErrors: ").append(validationErrors.toString()); - } - - builder.append("}"); - return builder.toString(); - } - - public String getCode() { - return code; - } - - public void setCode(String code) { - this.code = code; - } - - public void setStatus(int status) { - this.status = status; - } - - public void setMessage(String message) { - this.message = message; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - public boolean isRetryable() { - return retryable; - } - - public void setRetryable(boolean retryable) { - this.retryable = retryable; - } - - @Override - public String getMessage() { - return this.message; - } - - public int getStatus() { - return this.status; - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/exception/RequestHandlerException.java b/client/src/main/java/com/netflix/conductor/client/exception/RequestHandlerException.java deleted file mode 100644 index 7553d499c..000000000 --- a/client/src/main/java/com/netflix/conductor/client/exception/RequestHandlerException.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.exception; - -import java.io.InputStream; - -public class RequestHandlerException extends RuntimeException { - - private InputStream response; - private int status; - - public RequestHandlerException(InputStream response, int status) { - this.response = response; - this.status = status; - } - - public RequestHandlerException(String message, Throwable cause) { - super(message, cause); - } - - public RequestHandlerException(String message) { - super(message); - } - - public InputStream getResponse() { - return response; - } - - public int getStatus() { - return status; - } - - public boolean hasResponse() { - return response != null; - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java b/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java deleted file mode 100644 index e04f96feb..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java +++ /dev/null @@ -1,447 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Map; - -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriBuilder; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.config.ConductorClientConfiguration; -import com.netflix.conductor.client.config.DefaultConductorClientConfiguration; -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.exception.RequestHandlerException; -import com.netflix.conductor.client.http.jersey.JerseyRequestHandler; -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.validation.ErrorResponse; - -import com.fasterxml.jackson.core.Version; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import com.sun.jersey.api.client.config.DefaultClientConfig; - -/** Abstract client for the REST server */ -public abstract class ClientBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(ClientBase.class); - - private final RequestHandler requestHandler; - - private String root = ""; - - protected final ObjectMapper objectMapper; - - private final PayloadStorage payloadStorage; - - protected final ConductorClientConfiguration conductorClientConfiguration; - - protected ClientBase( - RequestHandler requestHandler, ConductorClientConfiguration clientConfiguration) { - this.objectMapper = new ObjectMapperProvider().getObjectMapper(); - - if (requestHandler == null) { - // https://github.com/FasterXML/jackson-databind/issues/2683 - if (isNewerJacksonVersion()) { - objectMapper.registerModule(new JavaTimeModule()); - } - this.requestHandler = - new JerseyRequestHandler(new DefaultClientConfig(), null, objectMapper); - } else { - this.requestHandler = requestHandler; - } - - this.conductorClientConfiguration = - ObjectUtils.defaultIfNull( - clientConfiguration, new DefaultConductorClientConfiguration()); - this.payloadStorage = new PayloadStorage(); - } - - public void setRootURI(String root) { - this.root = root; - } - - protected void delete(String url, Object... uriVariables) { - deleteWithUriVariables(url, null, uriVariables); - } - - protected void deleteWithUriVariables( - String url, Object[] queryParams, Object... uriVariables) { - URI uri = getURIBuilder(getFullUrl(url), queryParams).build(uriVariables); - try { - requestHandler.delete(uri); - } catch (RequestHandlerException rhe) { - throw createClientException(rhe); - } - } - - protected void put(String url, Object[] queryParams, Object request, Object... uriVariables) { - URI uri = getURIBuilder(getFullUrl(url), queryParams).build(uriVariables); - try { - requestHandler.put(uri, request); - } catch (RequestHandlerException rhe) { - throw createClientException(rhe); - } - } - - protected void post(String url, Object request) { - postForEntity(url, request, null, null); - } - - protected void postWithUriVariables(String url, Object... uriVariables) { - postForEntity(url, null, null, null, uriVariables); - } - - protected T postForEntity( - String url, - Object request, - Object[] queryParams, - Class responseType, - Object... uriVariables) { - URI uri = getURIBuilder(getFullUrl(url), queryParams).build(uriVariables); - - try { - InputStream response = requestHandler.post(uri, request); - if (responseType == null) { - return null; - } - return convertToType(response, responseType); - } catch (RequestHandlerException rhe) { - throw createClientException(rhe); - } - } - - protected String postForString( - String url, Object request, Object[] queryParams, Object... uriVariables) { - URI uri = getURIBuilder(getFullUrl(url), queryParams).build(uriVariables); - try { - InputStream response = requestHandler.post(uri, request); - return convertToString(response); - } catch (RequestHandlerException rhe) { - throw createClientException(rhe); - } - } - - protected T getForEntity( - String url, Object[] queryParams, Class responseType, Object... uriVariables) { - InputStream response = getForEntity(url, queryParams, uriVariables); - return convertToType(response, responseType); - } - - protected T getForEntity( - String url, - Object[] queryParams, - TypeReference responseType, - Object... uriVariables) { - InputStream response = getForEntity(url, queryParams, uriVariables); - return convertToType(response, responseType); - } - - /** - * Uses the {@link PayloadStorage} for storing large payloads. Gets the uri for storing the - * payload from the server and then uploads to this location. - * - * @param payloadType the {@link - * com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType} to be uploaded. - * @param payloadBytes the byte array containing the payload. - * @param payloadSize the size of the payload. - * @return the path where the payload is stored in external storage. - */ - protected String uploadToExternalPayloadStorage( - ExternalPayloadStorage.PayloadType payloadType, byte[] payloadBytes, long payloadSize) { - Validate.isTrue( - payloadType.equals(ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT) - || payloadType.equals(ExternalPayloadStorage.PayloadType.TASK_OUTPUT), - "Payload type must be workflow input or task output"); - ExternalStorageLocation externalStorageLocation = - payloadStorage.getLocation(ExternalPayloadStorage.Operation.WRITE, payloadType, ""); - payloadStorage.upload( - externalStorageLocation.getUri(), - new ByteArrayInputStream(payloadBytes), - payloadSize); - return externalStorageLocation.getPath(); - } - - /** - * Uses the {@link PayloadStorage} for downloading large payloads to be used by the client. Gets - * the uri of the payload fom the server and then downloads from this location. - * - * @param payloadType the {@link - * com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType} to be downloaded - * @param path the relative of the payload in external storage - * @return the payload object that is stored in external storage - */ - @SuppressWarnings("unchecked") - protected Map downloadFromExternalStorage( - ExternalPayloadStorage.PayloadType payloadType, String path) { - Validate.notBlank(path, "uri cannot be blank"); - ExternalStorageLocation externalStorageLocation = - payloadStorage.getLocation( - ExternalPayloadStorage.Operation.READ, payloadType, path); - try (InputStream inputStream = payloadStorage.download(externalStorageLocation.getUri())) { - return objectMapper.readValue(inputStream, Map.class); - } catch (IOException e) { - String errorMsg = - String.format( - "Unable to download payload from external storage location: %s", path); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } - } - - private String getFullUrl(String url) { - return root + url; - } - - private UriBuilder getURIBuilder(String path, Object[] queryParams) { - path = StringUtils.trimToEmpty(path); - - UriBuilder builder = UriBuilder.fromPath(path); - if (queryParams != null) { - for (int i = 0; i < queryParams.length; i += 2) { - String param = queryParams[i].toString(); - Object value = queryParams[i + 1]; - if (value != null) { - if (value instanceof Collection) { - Object[] values = ((Collection) value).toArray(); - builder.queryParam(param, values); - } else { - builder.queryParam(param, value); - } - } - } - } - return builder; - } - - private boolean isNewerJacksonVersion() { - Version version = com.fasterxml.jackson.databind.cfg.PackageVersion.VERSION; - return version.getMajorVersion() == 2 && version.getMinorVersion() >= 12; - } - - private InputStream getForEntity(String url, Object[] queryParams, Object... uriVariables) { - URI uri = getURIBuilder(getFullUrl(url), queryParams).build(uriVariables); - try { - return requestHandler.get(uri); - } catch (RequestHandlerException rhe) { - throw createClientException(rhe); - } - } - - private ConductorClientException createClientException(RequestHandlerException rhe) { - if (rhe.hasResponse()) { - ErrorResponse errorResponse = convertToType(rhe.getResponse(), ErrorResponse.class); - if (errorResponse != null) { - return new ConductorClientException(rhe.getStatus(), errorResponse); - } - } - - return new ConductorClientException(rhe.getMessage(), rhe.getCause()); - } - - private String convertToString(InputStream inputStream) { - try { - return IOUtils.toString(inputStream, StandardCharsets.UTF_8); - } catch (IOException e) { - throw new ConductorClientException("Error converting response to String", e); - } finally { - IOUtils.closeQuietly(inputStream, (e) -> LOGGER.error("Error closing input stream", e)); - } - } - - private T convertToType(InputStream inputStream, Class responseType) { - try { - String value = convertToString(inputStream); - return StringUtils.isNotBlank(value) - ? objectMapper.readValue(value, responseType) - : null; - } catch (IOException e) { - throw new ConductorClientException("Error converting response to " + responseType, e); - } - } - - private T convertToType(InputStream inputStream, TypeReference responseType) { - try { - String value = convertToString(inputStream); - return StringUtils.isNotBlank(value) - ? objectMapper.readValue(value, responseType) - : null; - } catch (IOException e) { - throw new ConductorClientException("Error converting response to " + responseType, e); - } - } - - /** An implementation of {@link ExternalPayloadStorage} for storing large JSON payload data. */ - class PayloadStorage implements ExternalPayloadStorage { - - /** - * This method is not intended to be used in the client. The client makes a request to the - * server to get the {@link ExternalStorageLocation} - */ - @Override - public ExternalStorageLocation getLocation( - Operation operation, PayloadType payloadType, String path) { - String url; - switch (payloadType) { - case WORKFLOW_INPUT: - case WORKFLOW_OUTPUT: - url = "workflow"; - break; - case TASK_INPUT: - case TASK_OUTPUT: - url = "tasks"; - break; - default: - throw new ConductorClientException( - String.format( - "Invalid payload type: %s for operation: %s", - payloadType, operation.toString())); - } - return getForEntity( - url + "/externalstoragelocation", - new Object[] { - "path", - path, - "operation", - operation.toString(), - "payloadType", - payloadType.toString() - }, - ExternalStorageLocation.class); - } - - /** - * Uploads the payload to the uri specified. - * - * @param uri the location to which the object is to be uploaded - * @param payload an {@link InputStream} containing the json payload which is to be uploaded - * @param payloadSize the size of the json payload in bytes - * @throws ConductorClientException if the upload fails due to an invalid path or an error - * from external storage - */ - @Override - public void upload(String uri, InputStream payload, long payloadSize) { - HttpURLConnection connection = null; - try { - URL url = new URI(uri).toURL(); - - connection = (HttpURLConnection) url.openConnection(); - connection.setDoOutput(true); - connection.setRequestMethod("PUT"); - - try (BufferedOutputStream bufferedOutputStream = - new BufferedOutputStream(connection.getOutputStream())) { - long count = IOUtils.copy(payload, bufferedOutputStream); - bufferedOutputStream.flush(); - // Check the HTTP response code - int responseCode = connection.getResponseCode(); - if (Response.Status.fromStatusCode(responseCode).getFamily() - != Response.Status.Family.SUCCESSFUL) { - String errorMsg = - String.format("Unable to upload. Response code: %d", responseCode); - LOGGER.error(errorMsg); - throw new ConductorClientException(errorMsg); - } - LOGGER.debug( - "Uploaded {} bytes to uri: {}, with HTTP response code: {}", - count, - uri, - responseCode); - } - } catch (URISyntaxException | MalformedURLException e) { - String errorMsg = String.format("Invalid path specified: %s", uri); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } catch (IOException e) { - String errorMsg = String.format("Error uploading to path: %s", uri); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } finally { - if (connection != null) { - connection.disconnect(); - } - try { - if (payload != null) { - payload.close(); - } - } catch (IOException e) { - LOGGER.warn("Unable to close inputstream when uploading to uri: {}", uri); - } - } - } - - /** - * Downloads the payload from the given uri. - * - * @param uri the location from where the object is to be downloaded - * @return an inputstream of the payload in the external storage - * @throws ConductorClientException if the download fails due to an invalid path or an error - * from external storage - */ - @Override - public InputStream download(String uri) { - HttpURLConnection connection = null; - String errorMsg; - try { - URL url = new URI(uri).toURL(); - connection = (HttpURLConnection) url.openConnection(); - connection.setDoOutput(false); - - // Check the HTTP response code - int responseCode = connection.getResponseCode(); - if (responseCode == HttpURLConnection.HTTP_OK) { - LOGGER.debug( - "Download completed with HTTP response code: {}", - connection.getResponseCode()); - return org.apache.commons.io.IOUtils.toBufferedInputStream( - connection.getInputStream()); - } - errorMsg = String.format("Unable to download. Response code: %d", responseCode); - LOGGER.error(errorMsg); - throw new ConductorClientException(errorMsg); - } catch (URISyntaxException | MalformedURLException e) { - errorMsg = String.format("Invalid uri specified: %s", uri); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } catch (IOException e) { - errorMsg = String.format("Error downloading from uri: %s", uri); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } finally { - if (connection != null) { - connection.disconnect(); - } - } - } - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/EventClient.java b/client/src/main/java/com/netflix/conductor/client/http/EventClient.java deleted file mode 100644 index eed33bace..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/EventClient.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.util.List; - -import org.apache.commons.lang3.Validate; - -import com.netflix.conductor.client.config.ConductorClientConfiguration; -import com.netflix.conductor.common.metadata.events.EventHandler; - -import com.fasterxml.jackson.core.type.TypeReference; - -// Client class for all Event Handler operations -public class EventClient extends ClientBase { - - private static final TypeReference> eventHandlerList = - new TypeReference>() {}; - - /** Creates a default metadata client */ - public EventClient() { - this(null); - } - - public EventClient(RequestHandler requestHandler) { - this(requestHandler, null); - } - - public EventClient( - RequestHandler requestHandler, ConductorClientConfiguration clientConfiguration) { - super(requestHandler, clientConfiguration); - } - - /** - * Register an event handler with the server. - * - * @param eventHandler the eventHandler definition. - */ - public void registerEventHandler(EventHandler eventHandler) { - Validate.notNull(eventHandler, "Event Handler definition cannot be null"); - post("event", eventHandler); - } - - /** - * Updates an event handler with the server. - * - * @param eventHandler the eventHandler definition. - */ - public void updateEventHandler(EventHandler eventHandler) { - Validate.notNull(eventHandler, "Event Handler definition cannot be null"); - put("event", null, eventHandler); - } - - /** - * @param event name of the event. - * @param activeOnly if true, returns only the active handlers. - * @return Returns the list of all the event handlers for a given event. - */ - public List getEventHandlers(String event, boolean activeOnly) { - Validate.notBlank(event, "Event cannot be blank"); - - return getForEntity( - "event/{event}", new Object[] {"activeOnly", activeOnly}, eventHandlerList, event); - } - - /** - * Removes the event handler definition from the conductor server - * - * @param name the name of the event handler to be unregistered - */ - public void unregisterEventHandler(String name) { - Validate.notBlank(name, "Event handler name cannot be blank"); - delete("event/{name}", name); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/MetadataClient.java b/client/src/main/java/com/netflix/conductor/client/http/MetadataClient.java deleted file mode 100644 index b67a9f22a..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/MetadataClient.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.util.List; - -import org.apache.commons.lang3.Validate; - -import com.netflix.conductor.client.config.ConductorClientConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; - -public class MetadataClient extends ClientBase { - - /** Creates a default metadata client */ - public MetadataClient() { - this(null); - } - - public MetadataClient(RequestHandler requestHandler) { - this(requestHandler, null); - } - - public MetadataClient( - RequestHandler requestHandler, ConductorClientConfiguration clientConfiguration) { - super(requestHandler, clientConfiguration); - } - - // Workflow Metadata Operations - - /** - * Register a workflow definition with the server - * - * @param workflowDef the workflow definition - */ - public void registerWorkflowDef(WorkflowDef workflowDef) { - Validate.notNull(workflowDef, "Worfklow definition cannot be null"); - post("metadata/workflow", workflowDef); - } - - /** - * Updates a list of existing workflow definitions - * - * @param workflowDefs List of workflow definitions to be updated - */ - public void updateWorkflowDefs(List workflowDefs) { - Validate.notNull(workflowDefs, "Worfklow defs list cannot be null"); - put("metadata/workflow", null, workflowDefs); - } - - /** - * Retrieve the workflow definition - * - * @param name the name of the workflow - * @param version the version of the workflow def - * @return Workflow definition for the given workflow and version - */ - public WorkflowDef getWorkflowDef(String name, Integer version) { - Validate.notBlank(name, "name cannot be blank"); - return getForEntity( - "metadata/workflow/{name}", - new Object[] {"version", version}, - WorkflowDef.class, - name); - } - - /** - * Removes the workflow definition of a workflow from the conductor server. It does not remove - * associated workflows. Use with caution. - * - * @param name Name of the workflow to be unregistered. - * @param version Version of the workflow definition to be unregistered. - */ - public void unregisterWorkflowDef(String name, Integer version) { - Validate.notBlank(name, "Workflow name cannot be blank"); - Validate.notNull(version, "Version cannot be null"); - delete("metadata/workflow/{name}/{version}", name, version); - } - - // Task Metadata Operations - - /** - * Registers a list of task types with the conductor server - * - * @param taskDefs List of task types to be registered. - */ - public void registerTaskDefs(List taskDefs) { - Validate.notNull(taskDefs, "Task defs list cannot be null"); - post("metadata/taskdefs", taskDefs); - } - - /** - * Updates an existing task definition - * - * @param taskDef the task definition to be updated - */ - public void updateTaskDef(TaskDef taskDef) { - Validate.notNull(taskDef, "Task definition cannot be null"); - put("metadata/taskdefs", null, taskDef); - } - - /** - * Retrieve the task definition of a given task type - * - * @param taskType type of task for which to retrieve the definition - * @return Task Definition for the given task type - */ - public TaskDef getTaskDef(String taskType) { - Validate.notBlank(taskType, "Task type cannot be blank"); - return getForEntity("metadata/taskdefs/{tasktype}", null, TaskDef.class, taskType); - } - - /** - * Removes the task definition of a task type from the conductor server. Use with caution. - * - * @param taskType Task type to be unregistered. - */ - public void unregisterTaskDef(String taskType) { - Validate.notBlank(taskType, "Task type cannot be blank"); - delete("metadata/taskdefs/{tasktype}", taskType); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/RequestHandler.java b/client/src/main/java/com/netflix/conductor/client/http/RequestHandler.java deleted file mode 100644 index 76692e17b..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/RequestHandler.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.io.InputStream; -import java.net.URI; - -import com.netflix.conductor.client.exception.RequestHandlerException; - -public interface RequestHandler { - void delete(URI uri) throws RequestHandlerException; - - InputStream put(URI uri, Object body) throws RequestHandlerException; - - InputStream post(URI uri, Object body) throws RequestHandlerException; - - InputStream get(URI uri) throws RequestHandlerException; -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java b/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java deleted file mode 100644 index 40c7528e6..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java +++ /dev/null @@ -1,440 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.config.ConductorClientConfiguration; -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.telemetry.MetricsContainer; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType; - -import com.fasterxml.jackson.core.type.TypeReference; - -/** Client for conductor task management including polling for task, updating task status etc. */ -public class TaskClient extends ClientBase { - - private static final TypeReference> taskList = new TypeReference>() {}; - - private static final TypeReference> taskExecLogList = - new TypeReference>() {}; - - private static final TypeReference> pollDataList = - new TypeReference>() {}; - - private static final TypeReference> searchResultTaskSummary = - new TypeReference>() {}; - - private static final TypeReference> searchResultTask = - new TypeReference>() {}; - - private static final TypeReference> queueSizeMap = - new TypeReference>() {}; - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskClient.class); - - /** Creates a default task client */ - public TaskClient() { - this(null); - } - - public TaskClient(RequestHandler requestHandler) { - this(requestHandler, null); - } - - public TaskClient( - RequestHandler requestHandler, ConductorClientConfiguration clientConfiguration) { - super(requestHandler, clientConfiguration); - } - - /** - * Perform a poll for a task of a specific task type. - * - * @param taskType The taskType to poll for - * @param domain The domain of the task type - * @param workerId Name of the client worker. Used for logging. - * @return Task waiting to be executed. - */ - public Task pollTask(String taskType, String workerId, String domain) { - Validate.notBlank(taskType, "Task type cannot be blank"); - Validate.notBlank(workerId, "Worker id cannot be blank"); - - Object[] params = new Object[] {"workerid", workerId, "domain", domain}; - Task task = - ObjectUtils.defaultIfNull( - getForEntity("tasks/poll/{taskType}", params, Task.class, taskType), - new Task()); - populateTaskPayloads(task); - return task; - } - - /** - * Perform a batch poll for tasks by task type. Batch size is configurable by count. - * - * @param taskType Type of task to poll for - * @param workerId Name of the client worker. Used for logging. - * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be - * less than this number. - * @param timeoutInMillisecond Long poll wait timeout. - * @return List of tasks awaiting to be executed. - */ - public List batchPollTasksByTaskType( - String taskType, String workerId, int count, int timeoutInMillisecond) { - Validate.notBlank(taskType, "Task type cannot be blank"); - Validate.notBlank(workerId, "Worker id cannot be blank"); - Validate.isTrue(count > 0, "Count must be greater than 0"); - - Object[] params = - new Object[] { - "workerid", workerId, "count", count, "timeout", timeoutInMillisecond - }; - List tasks = getForEntity("tasks/poll/batch/{taskType}", params, taskList, taskType); - tasks.forEach(this::populateTaskPayloads); - return tasks; - } - - /** - * Batch poll for tasks in a domain. Batch size is configurable by count. - * - * @param taskType Type of task to poll for - * @param domain The domain of the task type - * @param workerId Name of the client worker. Used for logging. - * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be - * less than this number. - * @param timeoutInMillisecond Long poll wait timeout. - * @return List of tasks awaiting to be executed. - */ - public List batchPollTasksInDomain( - String taskType, String domain, String workerId, int count, int timeoutInMillisecond) { - Validate.notBlank(taskType, "Task type cannot be blank"); - Validate.notBlank(workerId, "Worker id cannot be blank"); - Validate.isTrue(count > 0, "Count must be greater than 0"); - - Object[] params = - new Object[] { - "workerid", - workerId, - "count", - count, - "timeout", - timeoutInMillisecond, - "domain", - domain - }; - List tasks = getForEntity("tasks/poll/batch/{taskType}", params, taskList, taskType); - tasks.forEach(this::populateTaskPayloads); - return tasks; - } - - /** - * Populates the task input/output from external payload storage if the external storage path is - * specified. - * - * @param task the task for which the input is to be populated. - */ - private void populateTaskPayloads(Task task) { - if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) { - MetricsContainer.incrementExternalPayloadUsedCount( - task.getTaskDefName(), - ExternalPayloadStorage.Operation.READ.name(), - ExternalPayloadStorage.PayloadType.TASK_INPUT.name()); - task.setInputData( - downloadFromExternalStorage( - ExternalPayloadStorage.PayloadType.TASK_INPUT, - task.getExternalInputPayloadStoragePath())); - task.setExternalInputPayloadStoragePath(null); - } - if (StringUtils.isNotBlank(task.getExternalOutputPayloadStoragePath())) { - MetricsContainer.incrementExternalPayloadUsedCount( - task.getTaskDefName(), - ExternalPayloadStorage.Operation.READ.name(), - PayloadType.TASK_OUTPUT.name()); - task.setOutputData( - downloadFromExternalStorage( - ExternalPayloadStorage.PayloadType.TASK_OUTPUT, - task.getExternalOutputPayloadStoragePath())); - task.setExternalOutputPayloadStoragePath(null); - } - } - - /** - * Updates the result of a task execution. If the size of the task output payload is bigger than - * {@link ConductorClientConfiguration#getTaskOutputPayloadThresholdKB()}, it is uploaded to - * {@link ExternalPayloadStorage}, if enabled, else the task is marked as - * FAILED_WITH_TERMINAL_ERROR. - * - * @param taskResult the {@link TaskResult} of the executed task to be updated. - */ - public void updateTask(TaskResult taskResult) { - Validate.notNull(taskResult, "Task result cannot be null"); - post("tasks", taskResult); - } - - public Optional evaluateAndUploadLargePayload( - Map taskOutputData, String taskType) { - try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { - objectMapper.writeValue(byteArrayOutputStream, taskOutputData); - byte[] taskOutputBytes = byteArrayOutputStream.toByteArray(); - long taskResultSize = taskOutputBytes.length; - MetricsContainer.recordTaskResultPayloadSize(taskType, taskResultSize); - - long payloadSizeThreshold = - conductorClientConfiguration.getTaskOutputPayloadThresholdKB() * 1024L; - if (taskResultSize > payloadSizeThreshold) { - if (!conductorClientConfiguration.isExternalPayloadStorageEnabled() - || taskResultSize - > conductorClientConfiguration.getTaskOutputMaxPayloadThresholdKB() - * 1024L) { - throw new IllegalArgumentException( - String.format( - "The TaskResult payload size: %d is greater than the permissible %d bytes", - taskResultSize, payloadSizeThreshold)); - } - MetricsContainer.incrementExternalPayloadUsedCount( - taskType, - ExternalPayloadStorage.Operation.WRITE.name(), - ExternalPayloadStorage.PayloadType.TASK_OUTPUT.name()); - return Optional.of( - uploadToExternalPayloadStorage( - PayloadType.TASK_OUTPUT, taskOutputBytes, taskResultSize)); - } - return Optional.empty(); - } catch (IOException e) { - String errorMsg = String.format("Unable to update task: %s with task result", taskType); - LOGGER.error(errorMsg, e); - throw new ConductorClientException(errorMsg, e); - } - } - - /** - * Ack for the task poll. - * - * @param taskId Id of the task to be polled - * @param workerId user identified worker. - * @return true if the task was found with the given ID and acknowledged. False otherwise. If - * the server returns false, the client should NOT attempt to ack again. - */ - public Boolean ack(String taskId, String workerId) { - Validate.notBlank(taskId, "Task id cannot be blank"); - - String response = - postForString( - "tasks/{taskId}/ack", null, new Object[] {"workerid", workerId}, taskId); - return Boolean.valueOf(response); - } - - /** - * Log execution messages for a task. - * - * @param taskId id of the task - * @param logMessage the message to be logged - */ - public void logMessageForTask(String taskId, String logMessage) { - Validate.notBlank(taskId, "Task id cannot be blank"); - post("tasks/" + taskId + "/log", logMessage); - } - - /** - * Fetch execution logs for a task. - * - * @param taskId id of the task. - */ - public List getTaskLogs(String taskId) { - Validate.notBlank(taskId, "Task id cannot be blank"); - return getForEntity("tasks/{taskId}/log", null, taskExecLogList, taskId); - } - - /** - * Retrieve information about the task - * - * @param taskId ID of the task - * @return Task details - */ - public Task getTaskDetails(String taskId) { - Validate.notBlank(taskId, "Task id cannot be blank"); - return getForEntity("tasks/{taskId}", null, Task.class, taskId); - } - - /** - * Removes a task from a taskType queue - * - * @param taskType the taskType to identify the queue - * @param taskId the id of the task to be removed - */ - public void removeTaskFromQueue(String taskType, String taskId) { - Validate.notBlank(taskType, "Task type cannot be blank"); - Validate.notBlank(taskId, "Task id cannot be blank"); - - delete("tasks/queue/{taskType}/{taskId}", taskType, taskId); - } - - public int getQueueSizeForTask(String taskType) { - Validate.notBlank(taskType, "Task type cannot be blank"); - - Integer queueSize = - getForEntity( - "tasks/queue/size", - new Object[] {"taskType", taskType}, - new TypeReference() {}); - return queueSize != null ? queueSize : 0; - } - - public int getQueueSizeForTask( - String taskType, String domain, String isolationGroupId, String executionNamespace) { - Validate.notBlank(taskType, "Task type cannot be blank"); - - List params = new LinkedList<>(); - params.add("taskType"); - params.add(taskType); - - if (StringUtils.isNotBlank(domain)) { - params.add("domain"); - params.add(domain); - } - - if (StringUtils.isNotBlank(isolationGroupId)) { - params.add("isolationGroupId"); - params.add(isolationGroupId); - } - - if (StringUtils.isNotBlank(executionNamespace)) { - params.add("executionNamespace"); - params.add(executionNamespace); - } - - Integer queueSize = - getForEntity( - "tasks/queue/size", - params.toArray(new Object[0]), - new TypeReference() {}); - return queueSize != null ? queueSize : 0; - } - - /** - * Get last poll data for a given task type - * - * @param taskType the task type for which poll data is to be fetched - * @return returns the list of poll data for the task type - */ - public List getPollData(String taskType) { - Validate.notBlank(taskType, "Task type cannot be blank"); - - Object[] params = new Object[] {"taskType", taskType}; - return getForEntity("tasks/queue/polldata", params, pollDataList); - } - - /** - * Get the last poll data for all task types - * - * @return returns a list of poll data for all task types - */ - public List getAllPollData() { - return getForEntity("tasks/queue/polldata/all", null, pollDataList); - } - - /** - * Requeue pending tasks for all running workflows - * - * @return returns the number of tasks that have been requeued - */ - public String requeueAllPendingTasks() { - return postForString("tasks/queue/requeue", null, null); - } - - /** - * Requeue pending tasks of a specific task type - * - * @return returns the number of tasks that have been requeued - */ - public String requeuePendingTasksByTaskType(String taskType) { - Validate.notBlank(taskType, "Task type cannot be blank"); - return postForString("tasks/queue/requeue/{taskType}", null, null, taskType); - } - - /** - * Search for tasks based on payload - * - * @param query the search string - * @return returns the {@link SearchResult} containing the {@link TaskSummary} matching the - * query - */ - public SearchResult search(String query) { - return getForEntity("tasks/search", new Object[] {"query", query}, searchResultTaskSummary); - } - - /** - * Search for tasks based on payload - * - * @param query the search string - * @return returns the {@link SearchResult} containing the {@link Task} matching the query - */ - public SearchResult searchV2(String query) { - return getForEntity("tasks/search-v2", new Object[] {"query", query}, searchResultTask); - } - - /** - * Paginated search for tasks based on payload - * - * @param start start value of page - * @param size number of tasks to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link TaskSummary} that match the query - */ - public SearchResult search( - Integer start, Integer size, String sort, String freeText, String query) { - Object[] params = - new Object[] { - "start", start, "size", size, "sort", sort, "freeText", freeText, "query", query - }; - return getForEntity("tasks/search", params, searchResultTaskSummary); - } - - /** - * Paginated search for tasks based on payload - * - * @param start start value of page - * @param size number of tasks to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link Task} that match the query - */ - public SearchResult searchV2( - Integer start, Integer size, String sort, String freeText, String query) { - Object[] params = - new Object[] { - "start", start, "size", size, "sort", sort, "freeText", freeText, "query", query - }; - return getForEntity("tasks/search-v2", params, searchResultTask); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java b/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java deleted file mode 100644 index 60b16470a..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java +++ /dev/null @@ -1,440 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.List; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.config.ConductorClientConfiguration; -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.telemetry.MetricsContainer; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.model.BulkResponse; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; - -import com.fasterxml.jackson.core.type.TypeReference; - -public class WorkflowClient extends ClientBase { - - private static final TypeReference> searchResultWorkflowSummary = - new TypeReference>() {}; - - private static final TypeReference> searchResultWorkflow = - new TypeReference>() {}; - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowClient.class); - - /** Creates a default workflow client */ - public WorkflowClient() { - this(null); - } - - public WorkflowClient(RequestHandler requestHandler) { - this(requestHandler, null); - } - - public WorkflowClient( - RequestHandler requestHandler, ConductorClientConfiguration clientConfiguration) { - super(requestHandler, clientConfiguration); - } - - /** - * Starts a workflow. If the size of the workflow input payload is bigger than {@link - * ConductorClientConfiguration#getWorkflowInputPayloadThresholdKB()}, it is uploaded to {@link - * ExternalPayloadStorage}, if enabled, else the workflow is rejected. - * - * @param startWorkflowRequest the {@link StartWorkflowRequest} object to start the workflow - * @return the id of the workflow instance that can be used for tracking - * @throws ConductorClientException if {@link ExternalPayloadStorage} is disabled or if the - * payload size is greater than {@link - * ConductorClientConfiguration#getWorkflowInputMaxPayloadThresholdKB()} - */ - public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { - Validate.notNull(startWorkflowRequest, "StartWorkflowRequest cannot be null"); - Validate.notBlank(startWorkflowRequest.getName(), "Workflow name cannot be null or empty"); - Validate.isTrue( - StringUtils.isBlank(startWorkflowRequest.getExternalInputPayloadStoragePath()), - "External Storage Path must not be set"); - - String version = - startWorkflowRequest.getVersion() != null - ? startWorkflowRequest.getVersion().toString() - : "latest"; - try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { - objectMapper.writeValue(byteArrayOutputStream, startWorkflowRequest.getInput()); - byte[] workflowInputBytes = byteArrayOutputStream.toByteArray(); - long workflowInputSize = workflowInputBytes.length; - MetricsContainer.recordWorkflowInputPayloadSize( - startWorkflowRequest.getName(), version, workflowInputSize); - if (workflowInputSize - > conductorClientConfiguration.getWorkflowInputPayloadThresholdKB() * 1024L) { - if (!conductorClientConfiguration.isExternalPayloadStorageEnabled() - || (workflowInputSize - > conductorClientConfiguration - .getWorkflowInputMaxPayloadThresholdKB() - * 1024L)) { - String errorMsg = - String.format( - "Input payload larger than the allowed threshold of: %d KB", - conductorClientConfiguration - .getWorkflowInputPayloadThresholdKB()); - throw new ConductorClientException(errorMsg); - } else { - MetricsContainer.incrementExternalPayloadUsedCount( - startWorkflowRequest.getName(), - ExternalPayloadStorage.Operation.WRITE.name(), - ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.name()); - String externalStoragePath = - uploadToExternalPayloadStorage( - ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT, - workflowInputBytes, - workflowInputSize); - startWorkflowRequest.setExternalInputPayloadStoragePath(externalStoragePath); - startWorkflowRequest.setInput(null); - } - } - } catch (IOException e) { - String errorMsg = - String.format( - "Unable to start workflow:%s, version:%s", - startWorkflowRequest.getName(), version); - LOGGER.error(errorMsg, e); - MetricsContainer.incrementWorkflowStartErrorCount(startWorkflowRequest.getName(), e); - throw new ConductorClientException(errorMsg, e); - } - try { - return postForString( - "workflow", startWorkflowRequest, null, startWorkflowRequest.getName()); - } catch (ConductorClientException e) { - String errorMsg = - String.format( - "Unable to send start workflow request:%s, version:%s", - startWorkflowRequest.getName(), version); - LOGGER.error(errorMsg, e); - MetricsContainer.incrementWorkflowStartErrorCount(startWorkflowRequest.getName(), e); - throw e; - } - } - - /** - * Retrieve a workflow by workflow id - * - * @param workflowId the id of the workflow - * @param includeTasks specify if the tasks in the workflow need to be returned - * @return the requested workflow - */ - public Workflow getWorkflow(String workflowId, boolean includeTasks) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - Workflow workflow = - getForEntity( - "workflow/{workflowId}", - new Object[] {"includeTasks", includeTasks}, - Workflow.class, - workflowId); - populateWorkflowOutput(workflow); - return workflow; - } - - /** - * Retrieve all workflows for a given correlation id and name - * - * @param name the name of the workflow - * @param correlationId the correlation id - * @param includeClosed specify if all workflows are to be returned or only running workflows - * @param includeTasks specify if the tasks in the workflow need to be returned - * @return list of workflows for the given correlation id and name - */ - public List getWorkflows( - String name, String correlationId, boolean includeClosed, boolean includeTasks) { - Validate.notBlank(name, "name cannot be blank"); - Validate.notBlank(correlationId, "correlationId cannot be blank"); - - Object[] params = - new Object[] {"includeClosed", includeClosed, "includeTasks", includeTasks}; - List workflows = - getForEntity( - "workflow/{name}/correlated/{correlationId}", - params, - new TypeReference>() {}, - name, - correlationId); - workflows.forEach(this::populateWorkflowOutput); - return workflows; - } - - /** - * Populates the workflow output from external payload storage if the external storage path is - * specified. - * - * @param workflow the workflow for which the output is to be populated. - */ - private void populateWorkflowOutput(Workflow workflow) { - if (StringUtils.isNotBlank(workflow.getExternalOutputPayloadStoragePath())) { - MetricsContainer.incrementExternalPayloadUsedCount( - workflow.getWorkflowName(), - ExternalPayloadStorage.Operation.READ.name(), - ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.name()); - workflow.setOutput( - downloadFromExternalStorage( - ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT, - workflow.getExternalOutputPayloadStoragePath())); - } - } - - /** - * Removes a workflow from the system - * - * @param workflowId the id of the workflow to be deleted - * @param archiveWorkflow flag to indicate if the workflow should be archived before deletion - */ - public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { - Validate.notBlank(workflowId, "Workflow id cannot be blank"); - - Object[] params = new Object[] {"archiveWorkflow", archiveWorkflow}; - deleteWithUriVariables("workflow/{workflowId}/remove", params, workflowId); - } - - /** - * Terminates the execution of all given workflows instances - * - * @param workflowIds the ids of the workflows to be terminated - * @param reason the reason to be logged and displayed - * @return the {@link BulkResponse} contains bulkErrorResults and bulkSuccessfulResults - */ - public BulkResponse terminateWorkflows(List workflowIds, String reason) { - Validate.isTrue(!workflowIds.isEmpty(), "workflow id cannot be blank"); - return postForEntity( - "workflow/bulk/terminate", - workflowIds, - new Object[] {"reason", reason}, - BulkResponse.class); - } - - /** - * Retrieve all running workflow instances for a given name and version - * - * @param workflowName the name of the workflow - * @param version the version of the wokflow definition. Defaults to 1. - * @return the list of running workflow instances - */ - public List getRunningWorkflow(String workflowName, Integer version) { - Validate.notBlank(workflowName, "Workflow name cannot be blank"); - return getForEntity( - "workflow/running/{name}", - new Object[] {"version", version}, - new TypeReference>() {}, - workflowName); - } - - /** - * Retrieve all workflow instances for a given workflow name between a specific time period - * - * @param workflowName the name of the workflow - * @param version the version of the workflow definition. Defaults to 1. - * @param startTime the start time of the period - * @param endTime the end time of the period - * @return returns a list of workflows created during the specified during the time period - */ - public List getWorkflowsByTimePeriod( - String workflowName, int version, Long startTime, Long endTime) { - Validate.notBlank(workflowName, "Workflow name cannot be blank"); - Validate.notNull(startTime, "Start time cannot be null"); - Validate.notNull(endTime, "End time cannot be null"); - - Object[] params = - new Object[] {"version", version, "startTime", startTime, "endTime", endTime}; - return getForEntity( - "workflow/running/{name}", - params, - new TypeReference>() {}, - workflowName); - } - - /** - * Starts the decision task for the given workflow instance - * - * @param workflowId the id of the workflow instance - */ - public void runDecider(String workflowId) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - put("workflow/decide/{workflowId}", null, null, workflowId); - } - - /** - * Pause a workflow by workflow id - * - * @param workflowId the workflow id of the workflow to be paused - */ - public void pauseWorkflow(String workflowId) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - put("workflow/{workflowId}/pause", null, null, workflowId); - } - - /** - * Resume a paused workflow by workflow id - * - * @param workflowId the workflow id of the paused workflow - */ - public void resumeWorkflow(String workflowId) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - put("workflow/{workflowId}/resume", null, null, workflowId); - } - - /** - * Skips a given task from a current RUNNING workflow - * - * @param workflowId the id of the workflow instance - * @param taskReferenceName the reference name of the task to be skipped - */ - public void skipTaskFromWorkflow(String workflowId, String taskReferenceName) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - Validate.notBlank(taskReferenceName, "Task reference name cannot be blank"); - - put( - "workflow/{workflowId}/skiptask/{taskReferenceName}", - null, - null, - workflowId, - taskReferenceName); - } - - /** - * Reruns the workflow from a specific task - * - * @param workflowId the id of the workflow - * @param rerunWorkflowRequest the request containing the task to rerun from - * @return the id of the workflow - */ - public String rerunWorkflow(String workflowId, RerunWorkflowRequest rerunWorkflowRequest) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - Validate.notNull(rerunWorkflowRequest, "RerunWorkflowRequest cannot be null"); - - return postForString("workflow/{workflowId}/rerun", rerunWorkflowRequest, null, workflowId); - } - - /** - * Restart a completed workflow - * - * @param workflowId the workflow id of the workflow to be restarted - * @param useLatestDefinitions if true, use the latest workflow and task definitions when - * restarting the workflow if false, use the workflow and task definitions embedded in the - * workflow execution when restarting the workflow - */ - public void restart(String workflowId, boolean useLatestDefinitions) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - Object[] params = new Object[] {"useLatestDefinitions", useLatestDefinitions}; - postForEntity("workflow/{workflowId}/restart", null, params, Void.TYPE, workflowId); - } - - /** - * Retries the last failed task in a workflow - * - * @param workflowId the workflow id of the workflow with the failed task - */ - public void retryLastFailedTask(String workflowId) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - postWithUriVariables("workflow/{workflowId}/retry", workflowId); - } - - /** - * Resets the callback times of all IN PROGRESS tasks to 0 for the given workflow - * - * @param workflowId the id of the workflow - */ - public void resetCallbacksForInProgressTasks(String workflowId) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - postWithUriVariables("workflow/{workflowId}/resetcallbacks", workflowId); - } - - /** - * Terminates the execution of the given workflow instance - * - * @param workflowId the id of the workflow to be terminated - * @param reason the reason to be logged and displayed - */ - public void terminateWorkflow(String workflowId, String reason) { - Validate.notBlank(workflowId, "workflow id cannot be blank"); - deleteWithUriVariables( - "workflow/{workflowId}", new Object[] {"reason", reason}, workflowId); - } - - /** - * Search for workflows based on payload - * - * @param query the search query - * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query - */ - public SearchResult search(String query) { - return getForEntity( - "workflow/search", new Object[] {"query", query}, searchResultWorkflowSummary); - } - - /** - * Search for workflows based on payload - * - * @param query the search query - * @return the {@link SearchResult} containing the {@link Workflow} that match the query - */ - public SearchResult searchV2(String query) { - return getForEntity( - "workflow/search-v2", new Object[] {"query", query}, searchResultWorkflow); - } - - /** - * Paginated search for workflows based on payload - * - * @param start start value of page - * @param size number of workflows to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query - */ - public SearchResult search( - Integer start, Integer size, String sort, String freeText, String query) { - Object[] params = - new Object[] { - "start", start, "size", size, "sort", sort, "freeText", freeText, "query", query - }; - return getForEntity("workflow/search", params, searchResultWorkflowSummary); - } - - /** - * Paginated search for workflows based on payload - * - * @param start start value of page - * @param size number of workflows to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link Workflow} that match the query - */ - public SearchResult searchV2( - Integer start, Integer size, String sort, String freeText, String query) { - Object[] params = - new Object[] { - "start", start, "size", size, "sort", sort, "freeText", freeText, "query", query - }; - return getForEntity("workflow/search-v2", params, searchResultWorkflow); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/http/jersey/JerseyRequestHandler.java b/client/src/main/java/com/netflix/conductor/client/http/jersey/JerseyRequestHandler.java deleted file mode 100644 index 05a34127d..000000000 --- a/client/src/main/java/com/netflix/conductor/client/http/jersey/JerseyRequestHandler.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http.jersey; - -import java.io.InputStream; -import java.net.URI; - -import javax.ws.rs.core.MediaType; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.exception.RequestHandlerException; -import com.netflix.conductor.client.http.RequestHandler; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientHandler; -import com.sun.jersey.api.client.ClientHandlerException; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.UniformInterfaceException; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.ClientConfig; -import com.sun.jersey.api.client.filter.ClientFilter; - -/** A {@link RequestHandler} implementation that uses the Jersey HTTP Client. */ -public class JerseyRequestHandler implements RequestHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(JerseyRequestHandler.class); - - private final Client client; - - public JerseyRequestHandler( - ClientConfig config, - ClientHandler handler, - ObjectMapper objectMapper, - ClientFilter... filters) { - - JacksonJsonProvider provider = new JacksonJsonProvider(objectMapper); - config.getSingletons().add(provider); - - if (handler == null) { - this.client = Client.create(config); - } else { - this.client = new Client(handler, config); - } - - for (ClientFilter filter : filters) { - this.client.addFilter(filter); - } - } - - @Override - public void delete(URI uri) { - try { - client.resource(uri).delete(); - } catch (UniformInterfaceException e) { - handleUniformInterfaceException(e, uri); - } catch (RuntimeException e) { - handleRuntimeException(e, uri); - } - } - - @Override - public InputStream put(URI uri, Object body) { - ClientResponse clientResponse; - try { - clientResponse = getWebResourceBuilder(uri, body).put(ClientResponse.class); - return clientResponse.getEntityInputStream(); - } catch (RuntimeException e) { - handleException(uri, e); - } - - return null; - } - - @Override - public InputStream post(URI uri, Object body) { - ClientResponse clientResponse; - try { - clientResponse = getWebResourceBuilder(uri, body).post(ClientResponse.class); - return clientResponse.getEntityInputStream(); - } catch (UniformInterfaceException e) { - handleUniformInterfaceException(e, uri); - } catch (RuntimeException e) { - handleRuntimeException(e, uri); - } - return null; - } - - @Override - public InputStream get(URI uri) { - ClientResponse clientResponse; - try { - clientResponse = - client.resource(uri) - .accept(MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN) - .get(ClientResponse.class); - if (clientResponse.getStatus() < 300) { - return clientResponse.getEntityInputStream(); - } else { - throw new UniformInterfaceException(clientResponse); - } - } catch (UniformInterfaceException e) { - handleUniformInterfaceException(e, uri); - } catch (RuntimeException e) { - handleRuntimeException(e, uri); - } - - return null; - } - - private void handleClientHandlerException(ClientHandlerException exception, URI uri) { - String errorMessage = - String.format( - "Unable to invoke Conductor API with uri: %s, failure to process request or response", - uri); - LOGGER.error(errorMessage, exception); - throw new RequestHandlerException(errorMessage, exception); - } - - private void handleRuntimeException(RuntimeException exception, URI uri) { - String errorMessage = - String.format( - "Unable to invoke Conductor API with uri: %s, runtime exception occurred", - uri); - LOGGER.error(errorMessage, exception); - throw new RequestHandlerException(errorMessage, exception); - } - - private void handleUniformInterfaceException(UniformInterfaceException exception, URI uri) { - ClientResponse clientResponse = exception.getResponse(); - if (clientResponse == null) { - throw new RequestHandlerException( - String.format("Unable to invoke Conductor API with uri: %s", uri)); - } - try { - if (clientResponse.getStatus() < 300) { - return; - } - LOGGER.warn( - "Unable to invoke Conductor API with uri: {}, unexpected response from server: statusCode={}", - uri, - clientResponse.getStatus()); - throw new RequestHandlerException( - clientResponse.getEntityInputStream(), clientResponse.getStatus()); - } catch (RequestHandlerException e) { - throw e; - } catch (ClientHandlerException e) { - handleClientHandlerException(e, uri); - } catch (RuntimeException e) { - handleRuntimeException(e, uri); - } finally { - clientResponse.close(); - } - } - - private void handleException(URI uri, RuntimeException e) { - if (e instanceof UniformInterfaceException) { - handleUniformInterfaceException(((UniformInterfaceException) e), uri); - } else if (e instanceof ClientHandlerException) { - handleClientHandlerException((ClientHandlerException) e, uri); - } else { - handleRuntimeException(e, uri); - } - } - - private WebResource.Builder getWebResourceBuilder(URI URI, Object entity) { - return client.resource(URI) - .type(MediaType.APPLICATION_JSON) - .entity(entity) - .accept(MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/telemetry/MetricsContainer.java b/client/src/main/java/com/netflix/conductor/client/telemetry/MetricsContainer.java deleted file mode 100644 index a6724af6d..000000000 --- a/client/src/main/java/com/netflix/conductor/client/telemetry/MetricsContainer.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.telemetry; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import com.netflix.spectator.api.BasicTag; -import com.netflix.spectator.api.Counter; -import com.netflix.spectator.api.Id; -import com.netflix.spectator.api.Registry; -import com.netflix.spectator.api.Spectator; -import com.netflix.spectator.api.Tag; -import com.netflix.spectator.api.Timer; -import com.netflix.spectator.api.patterns.PolledMeter; - -public class MetricsContainer { - - private static final String TASK_TYPE = "taskType"; - private static final String WORKFLOW_TYPE = "workflowType"; - private static final String WORKFLOW_VERSION = "version"; - private static final String EXCEPTION = "exception"; - private static final String ENTITY_NAME = "entityName"; - private static final String OPERATION = "operation"; - private static final String PAYLOAD_TYPE = "payload_type"; - - private static final String TASK_EXECUTION_QUEUE_FULL = "task_execution_queue_full"; - private static final String TASK_POLL_ERROR = "task_poll_error"; - private static final String TASK_PAUSED = "task_paused"; - private static final String TASK_EXECUTE_ERROR = "task_execute_error"; - private static final String TASK_ACK_FAILED = "task_ack_failed"; - private static final String TASK_ACK_ERROR = "task_ack_error"; - private static final String TASK_UPDATE_ERROR = "task_update_error"; - private static final String TASK_POLL_COUNTER = "task_poll_counter"; - private static final String TASK_EXECUTE_TIME = "task_execute_time"; - private static final String TASK_POLL_TIME = "task_poll_time"; - private static final String TASK_RESULT_SIZE = "task_result_size"; - private static final String WORKFLOW_INPUT_SIZE = "workflow_input_size"; - private static final String EXTERNAL_PAYLOAD_USED = "external_payload_used"; - private static final String WORKFLOW_START_ERROR = "workflow_start_error"; - private static final String THREAD_UNCAUGHT_EXCEPTION = "thread_uncaught_exceptions"; - - private static final Registry REGISTRY = Spectator.globalRegistry(); - private static final Map TIMERS = new ConcurrentHashMap<>(); - private static final Map COUNTERS = new ConcurrentHashMap<>(); - private static final Map GAUGES = new ConcurrentHashMap<>(); - private static final String CLASS_NAME = MetricsContainer.class.getSimpleName(); - - private MetricsContainer() {} - - public static Timer getPollTimer(String taskType) { - return getTimer(TASK_POLL_TIME, TASK_TYPE, taskType); - } - - public static Timer getExecutionTimer(String taskType) { - return getTimer(TASK_EXECUTE_TIME, TASK_TYPE, taskType); - } - - private static Timer getTimer(String name, String... additionalTags) { - String key = CLASS_NAME + "." + name + "." + String.join(",", additionalTags); - return TIMERS.computeIfAbsent( - key, - k -> { - List tagList = getTags(additionalTags); - tagList.add(new BasicTag("unit", TimeUnit.MILLISECONDS.name())); - return REGISTRY.timer(name, tagList); - }); - } - - @SuppressWarnings({"rawtypes", "unchecked"}) - private static List getTags(String[] additionalTags) { - List tagList = new ArrayList(); - tagList.add(new BasicTag("class", CLASS_NAME)); - for (int j = 0; j < additionalTags.length - 1; j++) { - tagList.add(new BasicTag(additionalTags[j], additionalTags[j + 1])); - j++; - } - return tagList; - } - - private static void incrementCount(String name, String... additionalTags) { - getCounter(name, additionalTags).increment(); - } - - private static Counter getCounter(String name, String... additionalTags) { - String key = CLASS_NAME + "." + name + "." + String.join(",", additionalTags); - return COUNTERS.computeIfAbsent( - key, - k -> { - List tags = getTags(additionalTags); - return REGISTRY.counter(name, tags); - }); - } - - private static AtomicLong getGauge(String name, String... additionalTags) { - String key = CLASS_NAME + "." + name + "." + String.join(",", additionalTags); - return GAUGES.computeIfAbsent( - key, - pollTimer -> { - Id id = REGISTRY.createId(name, getTags(additionalTags)); - return PolledMeter.using(REGISTRY).withId(id).monitorValue(new AtomicLong(0)); - }); - } - - public static void incrementTaskExecutionQueueFullCount(String taskType) { - incrementCount(TASK_EXECUTION_QUEUE_FULL, TASK_TYPE, taskType); - } - - public static void incrementUncaughtExceptionCount() { - incrementCount(THREAD_UNCAUGHT_EXCEPTION); - } - - public static void incrementTaskPollErrorCount(String taskType, Exception e) { - incrementCount( - TASK_POLL_ERROR, TASK_TYPE, taskType, EXCEPTION, e.getClass().getSimpleName()); - } - - public static void incrementTaskPausedCount(String taskType) { - incrementCount(TASK_PAUSED, TASK_TYPE, taskType); - } - - public static void incrementTaskExecutionErrorCount(String taskType, Throwable e) { - incrementCount( - TASK_EXECUTE_ERROR, TASK_TYPE, taskType, EXCEPTION, e.getClass().getSimpleName()); - } - - public static void incrementTaskAckFailedCount(String taskType) { - incrementCount(TASK_ACK_FAILED, TASK_TYPE, taskType); - } - - public static void incrementTaskAckErrorCount(String taskType, Exception e) { - incrementCount( - TASK_ACK_ERROR, TASK_TYPE, taskType, EXCEPTION, e.getClass().getSimpleName()); - } - - public static void recordTaskResultPayloadSize(String taskType, long payloadSize) { - getGauge(TASK_RESULT_SIZE, TASK_TYPE, taskType).getAndSet(payloadSize); - } - - public static void incrementTaskUpdateErrorCount(String taskType, Throwable t) { - incrementCount( - TASK_UPDATE_ERROR, TASK_TYPE, taskType, EXCEPTION, t.getClass().getSimpleName()); - } - - public static void incrementTaskPollCount(String taskType, int taskCount) { - getCounter(TASK_POLL_COUNTER, TASK_TYPE, taskType).increment(taskCount); - } - - public static void recordWorkflowInputPayloadSize( - String workflowType, String version, long payloadSize) { - getGauge(WORKFLOW_INPUT_SIZE, WORKFLOW_TYPE, workflowType, WORKFLOW_VERSION, version) - .getAndSet(payloadSize); - } - - public static void incrementExternalPayloadUsedCount( - String name, String operation, String payloadType) { - incrementCount( - EXTERNAL_PAYLOAD_USED, - ENTITY_NAME, - name, - OPERATION, - operation, - PAYLOAD_TYPE, - payloadType); - } - - public static void incrementWorkflowStartErrorCount(String workflowType, Throwable t) { - incrementCount( - WORKFLOW_START_ERROR, - WORKFLOW_TYPE, - workflowType, - EXCEPTION, - t.getClass().getSimpleName()); - } -} diff --git a/client/src/main/java/com/netflix/conductor/client/worker/Worker.java b/client/src/main/java/com/netflix/conductor/client/worker/Worker.java deleted file mode 100644 index 1936b1371..000000000 --- a/client/src/main/java/com/netflix/conductor/client/worker/Worker.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.worker; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.function.Function; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.config.PropertyFactory; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; - -import com.amazonaws.util.EC2MetadataUtils; - -public interface Worker { - - /** - * Retrieve the name of the task definition the worker is currently working on. - * - * @return the name of the task definition. - */ - String getTaskDefName(); - - /** - * Executes a task and returns the updated task. - * - * @param task Task to be executed. - * @return the {@link TaskResult} object If the task is not completed yet, return with the - * status as IN_PROGRESS. - */ - TaskResult execute(Task task); - - /** - * Called when the task coordinator fails to update the task to the server. Client should store - * the task id (in a database) and retry the update later - * - * @param task Task which cannot be updated back to the server. - */ - default void onErrorUpdate(Task task) {} - - /** - * Override this method to pause the worker from polling. - * - * @return true if the worker is paused and no more tasks should be polled from server. - */ - default boolean paused() { - return PropertyFactory.getBoolean(getTaskDefName(), "paused", false); - } - - /** - * Override this method to app specific rules. - * - * @return returns the serverId as the id of the instance that the worker is running. - */ - default String getIdentity() { - String serverId; - try { - serverId = InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - serverId = System.getenv("HOSTNAME"); - } - if (serverId == null) { - serverId = - (EC2MetadataUtils.getInstanceId() == null) - ? System.getProperty("user.name") - : EC2MetadataUtils.getInstanceId(); - } - LoggerHolder.logger.debug("Setting worker id to {}", serverId); - return serverId; - } - - /** - * Override this method to change the interval between polls. - * - * @return interval in millisecond at which the server should be polled for worker tasks. - */ - default int getPollingInterval() { - return PropertyFactory.getInteger(getTaskDefName(), "pollInterval", 1000); - } - - static Worker create(String taskType, Function executor) { - return new Worker() { - - @Override - public String getTaskDefName() { - return taskType; - } - - @Override - public TaskResult execute(Task task) { - return executor.apply(task); - } - - @Override - public boolean paused() { - return Worker.super.paused(); - } - }; - } -} - -final class LoggerHolder { - - static final Logger logger = LoggerFactory.getLogger(Worker.class); -} diff --git a/client/src/test/groovy/com/netflix/conductor/client/http/ClientSpecification.groovy b/client/src/test/groovy/com/netflix/conductor/client/http/ClientSpecification.groovy deleted file mode 100644 index 40fb3d688..000000000 --- a/client/src/test/groovy/com/netflix/conductor/client/http/ClientSpecification.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http - -import java.nio.charset.StandardCharsets - -import org.apache.commons.io.IOUtils - -import com.netflix.conductor.common.config.ObjectMapperProvider - -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification - -abstract class ClientSpecification extends Specification { - - protected static final String ROOT_URL = "dummyroot/" - - protected static URI createURI(String path) { - URI.create(ROOT_URL + path) - } - - protected RequestHandler requestHandler - protected ObjectMapper objectMapper - - def setup() { - requestHandler = Mock(RequestHandler.class) - - objectMapper = new ObjectMapperProvider().getObjectMapper() - } - - protected InputStream toInputStream(Object value) { - return IOUtils.toInputStream(objectMapper.writeValueAsString(value), StandardCharsets.UTF_8) - } - - protected static InputStream toInputStream(String value) { - return IOUtils.toInputStream(value, StandardCharsets.UTF_8) - } -} diff --git a/client/src/test/groovy/com/netflix/conductor/client/http/EventClientSpec.groovy b/client/src/test/groovy/com/netflix/conductor/client/http/EventClientSpec.groovy deleted file mode 100644 index e70eea267..000000000 --- a/client/src/test/groovy/com/netflix/conductor/client/http/EventClientSpec.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http - - -import com.netflix.conductor.common.metadata.events.EventHandler - -import spock.lang.Subject -import spock.lang.Unroll - -class EventClientSpec extends ClientSpecification { - - @Subject - EventClient eventClient - - def setup() { - eventClient = new EventClient(requestHandler) - eventClient.setRootURI(ROOT_URL) - } - - def "register event handler"() { - given: - EventHandler handler = new EventHandler() - URI uri = createURI("event") - - when: - eventClient.registerEventHandler(handler) - - then: - 1 * requestHandler.post(uri, handler) - } - - def "update event handler"() { - given: - EventHandler handler = new EventHandler() - URI uri = createURI("event") - - when: - eventClient.updateEventHandler(handler) - - then: - 1 * requestHandler.put(uri, handler) - } - - def "unregister event handler"() { - given: - String eventName = "test" - URI uri = createURI("event/$eventName") - - when: - eventClient.unregisterEventHandler(eventName) - - then: - 1 * requestHandler.delete(uri) - } - - @Unroll - def "get event handlers activeOnly=#activeOnly"() { - given: - def handlers = [new EventHandler(), new EventHandler()] - String eventName = "test" - URI uri = createURI("event/$eventName?activeOnly=$activeOnly") - InputStream json = toInputStream(handlers) - - when: - def eventHandlers = eventClient.getEventHandlers(eventName, activeOnly) - - then: - eventHandlers && eventHandlers.size() == 2 - 1 * requestHandler.get(uri) >> json - - where: - activeOnly << [true, false] - } - -} diff --git a/client/src/test/groovy/com/netflix/conductor/client/http/MetadataClientSpec.groovy b/client/src/test/groovy/com/netflix/conductor/client/http/MetadataClientSpec.groovy deleted file mode 100644 index a8bda1c5d..000000000 --- a/client/src/test/groovy/com/netflix/conductor/client/http/MetadataClientSpec.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http - -import com.netflix.conductor.client.exception.ConductorClientException -import com.netflix.conductor.client.exception.RequestHandlerException - -import spock.lang.Subject - -class MetadataClientSpec extends ClientSpecification { - - @Subject - MetadataClient metadataClient - - def setup() { - metadataClient = new MetadataClient(requestHandler) - metadataClient.setRootURI(ROOT_URL) - } - - def "workflow delete"() { - given: - String workflowName = 'test' - int version = 1 - URI uri = createURI("metadata/workflow/$workflowName/$version") - - when: - metadataClient.unregisterWorkflowDef(workflowName, version) - - then: - 1 * requestHandler.delete(uri) - } - - def "workflow delete throws exception"() { - given: - String workflowName = 'test' - int version = 1 - InputStream errorResponse = toInputStream""" - { - "status": 404, - "message": "No such workflow definition: $workflowName version: $version", - "instance": "conductor-server", - "retryable": false - } - """ - URI uri = createURI("metadata/workflow/$workflowName/$version") - - when: - metadataClient.unregisterWorkflowDef(workflowName, version) - - then: - 1 * requestHandler.delete(uri) >> { throw new RequestHandlerException(errorResponse, 404) } - def ex = thrown(ConductorClientException.class) - ex && ex.status == 404 - ex.message == "No such workflow definition: $workflowName version: $version" - } - - def "workflow delete version missing"() { - when: - metadataClient.unregisterWorkflowDef("some name", null) - - then: - thrown(NullPointerException.class) - } - - def "workflow delete name missing"() { - when: - metadataClient.unregisterWorkflowDef(null, 1) - - then: - thrown(IllegalArgumentException.class) - } -} diff --git a/client/src/test/groovy/com/netflix/conductor/client/http/TaskClientSpec.groovy b/client/src/test/groovy/com/netflix/conductor/client/http/TaskClientSpec.groovy deleted file mode 100644 index 1181ceb1d..000000000 --- a/client/src/test/groovy/com/netflix/conductor/client/http/TaskClientSpec.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http - - -import java.nio.charset.StandardCharsets - -import org.apache.commons.io.IOUtils - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.SearchResult -import com.netflix.conductor.common.run.TaskSummary - -import spock.lang.Subject - -class TaskClientSpec extends ClientSpecification { - - @Subject - TaskClient taskClient - - def setup() { - taskClient = new TaskClient(requestHandler) - taskClient.setRootURI(ROOT_URL) - } - - def "search"() { - given: - String query = 'my_complex_query' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new TaskSummary()] - - InputStream json = toInputStream(result) - URI uri = createURI("tasks/search?query=$query") - - when: - SearchResult searchResult = taskClient.search(query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof TaskSummary - } - - def "searchV2"() { - given: - String query = 'my_complex_query' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new Task()] - - InputStream json = toInputStream(result) - URI uri = createURI("tasks/search-v2?query=$query") - - when: - SearchResult searchResult = taskClient.searchV2('my_complex_query') - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof Task - } - - def "search with params"() { - given: - String query = 'my_complex_query' - int start = 0 - int size = 10 - String sort = 'sort' - String freeText = 'text' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new TaskSummary()] - - InputStream json = toInputStream(result) - URI uri = createURI("tasks/search?start=$start&size=$size&sort=$sort&freeText=$freeText&query=$query") - - when: - SearchResult searchResult = taskClient.search(start, size, sort, freeText, query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof TaskSummary - } - - def "searchV2 with params"() { - given: - String query = 'my_complex_query' - int start = 0 - int size = 10 - String sort = 'sort' - String freeText = 'text' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new Task()] - - InputStream json = toInputStream(result) - URI uri = createURI("tasks/search-v2?start=$start&size=$size&sort=$sort&freeText=$freeText&query=$query") - - when: - SearchResult searchResult = taskClient.searchV2(start, size, sort, freeText, query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof Task - } -} diff --git a/client/src/test/groovy/com/netflix/conductor/client/http/WorkflowClientSpec.groovy b/client/src/test/groovy/com/netflix/conductor/client/http/WorkflowClientSpec.groovy deleted file mode 100644 index d1e0de774..000000000 --- a/client/src/test/groovy/com/netflix/conductor/client/http/WorkflowClientSpec.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.http - - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.run.SearchResult -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.common.run.WorkflowSummary - -import spock.lang.Subject - -class WorkflowClientSpec extends ClientSpecification { - - @Subject - WorkflowClient workflowClient - - def setup() { - workflowClient = new WorkflowClient(requestHandler) - workflowClient.setRootURI(ROOT_URL) - } - - def "search"() { - given: - String query = 'my_complex_query' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new WorkflowSummary()] - - InputStream json = toInputStream(result) - URI uri = createURI("workflow/search?query=$query") - - when: - SearchResult searchResult = workflowClient.search(query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof WorkflowSummary - } - - def "searchV2"() { - given: - String query = 'my_complex_query' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new Workflow(workflowDefinition: new WorkflowDef(), createTime: System.currentTimeMillis() )] - - InputStream json = toInputStream(result) - URI uri = createURI("workflow/search-v2?query=$query") - - when: - SearchResult searchResult = workflowClient.searchV2('my_complex_query') - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof Workflow - } - - def "search with params"() { - given: - String query = 'my_complex_query' - int start = 0 - int size = 10 - String sort = 'sort' - String freeText = 'text' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new WorkflowSummary()] - - InputStream json = toInputStream(result) - URI uri = createURI("workflow/search?start=$start&size=$size&sort=$sort&freeText=$freeText&query=$query") - - when: - SearchResult searchResult = workflowClient.search(start, size, sort, freeText, query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof WorkflowSummary - } - - def "searchV2 with params"() { - given: - String query = 'my_complex_query' - int start = 0 - int size = 10 - String sort = 'sort' - String freeText = 'text' - SearchResult result = new SearchResult<>() - result.totalHits = 1 - result.results = [new Workflow(workflowDefinition: new WorkflowDef(), createTime: System.currentTimeMillis() )] - - InputStream json = toInputStream(result) - URI uri = createURI("workflow/search-v2?start=$start&size=$size&sort=$sort&freeText=$freeText&query=$query") - - when: - SearchResult searchResult = workflowClient.searchV2(start, size, sort, freeText, query) - - then: - 1 * requestHandler.get(uri) >> json - - searchResult.totalHits == result.totalHits - searchResult.results && searchResult.results.size() == 1 - searchResult.results[0] instanceof Workflow - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/automator/PollingSemaphoreTest.java b/client/src/test/java/com/netflix/conductor/client/automator/PollingSemaphoreTest.java deleted file mode 100644 index 508b3a3c7..000000000 --- a/client/src/test/java/com/netflix/conductor/client/automator/PollingSemaphoreTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.stream.IntStream; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class PollingSemaphoreTest { - - @Test - public void testBlockAfterAvailablePermitsExhausted() throws Exception { - int threads = 5; - ExecutorService executorService = Executors.newFixedThreadPool(threads); - PollingSemaphore pollingSemaphore = new PollingSemaphore(threads); - - List> futuresList = new ArrayList<>(); - IntStream.range(0, threads) - .forEach( - t -> - futuresList.add( - CompletableFuture.runAsync( - pollingSemaphore::canPoll, executorService))); - - CompletableFuture allFutures = - CompletableFuture.allOf( - futuresList.toArray(new CompletableFuture[futuresList.size()])); - - allFutures.get(); - - assertEquals(0, pollingSemaphore.availableThreads()); - assertFalse(pollingSemaphore.canPoll()); - - executorService.shutdown(); - } - - @Test - public void testAllowsPollingWhenPermitBecomesAvailable() throws Exception { - int threads = 5; - ExecutorService executorService = Executors.newFixedThreadPool(threads); - PollingSemaphore pollingSemaphore = new PollingSemaphore(threads); - - List> futuresList = new ArrayList<>(); - IntStream.range(0, threads) - .forEach( - t -> - futuresList.add( - CompletableFuture.runAsync( - pollingSemaphore::canPoll, executorService))); - - CompletableFuture allFutures = - CompletableFuture.allOf( - futuresList.toArray(new CompletableFuture[futuresList.size()])); - allFutures.get(); - - assertEquals(0, pollingSemaphore.availableThreads()); - pollingSemaphore.complete(); - - assertTrue(pollingSemaphore.availableThreads() > 0); - assertTrue(pollingSemaphore.canPoll()); - - executorService.shutdown(); - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/automator/TaskPollExecutorTest.java b/client/src/test/java/com/netflix/conductor/client/automator/TaskPollExecutorTest.java deleted file mode 100644 index 19791e385..000000000 --- a/client/src/test/java/com/netflix/conductor/client/automator/TaskPollExecutorTest.java +++ /dev/null @@ -1,536 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.*; - -import org.junit.Test; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -import com.netflix.appinfo.InstanceInfo; -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.discovery.EurekaClient; - -import static com.netflix.conductor.common.metadata.tasks.TaskResult.Status.IN_PROGRESS; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -public class TaskPollExecutorTest { - - private static final String TEST_TASK_DEF_NAME = "test"; - - @Test - public void testTaskExecutionException() throws InterruptedException { - Worker worker = - Worker.create( - TEST_TASK_DEF_NAME, - task -> { - throw new NoSuchMethodError(); - }); - TaskClient taskClient = Mockito.mock(TaskClient.class); - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 1, new HashMap<>(), "test-worker-%d", new HashMap<>()); - - when(taskClient.pollTask(any(), any(), any())).thenReturn(testTask()); - when(taskClient.ack(any(), any())).thenReturn(true); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - assertEquals("test-worker-1", Thread.currentThread().getName()); - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(TaskResult.Status.FAILED, result.getStatus()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).updateTask(any()); - } - - @SuppressWarnings("rawtypes") - @Test - public void testMultipleTasksExecution() throws InterruptedException { - String outputKey = "KEY"; - Task task = testTask(); - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); - when(worker.execute(any())) - .thenAnswer( - new Answer() { - private int count = 0; - Map outputMap = new HashMap<>(); - - public TaskResult answer(InvocationOnMock invocation) - throws InterruptedException { - // Sleep for 2 seconds to simulate task execution - Thread.sleep(2000L); - TaskResult taskResult = new TaskResult(task); - outputMap.put(outputKey, count++); - taskResult.setOutputData(outputMap); - return taskResult; - } - }); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - when(taskClient.pollTask(any(), any(), any())).thenReturn(task); - when(taskClient.ack(any(), any())).thenReturn(true); - CountDownLatch latch = new CountDownLatch(3); - doAnswer( - new Answer() { - private int count = 0; - - public TaskResult answer(InvocationOnMock invocation) { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(count, result.getOutputData().get(outputKey)); - count++; - latch.countDown(); - return null; - } - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - latch.await(); - - // execute() is called 3 times on the worker (once for each task) - verify(worker, times(3)).execute(any()); - verify(taskClient, times(3)).updateTask(any()); - } - - @SuppressWarnings("unchecked") - @Test - public void testLargePayloadCanFailUpdateWithRetry() throws InterruptedException { - Task task = testTask(); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(task); - when(taskClient.ack(any(), any())).thenReturn(true); - - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertNull(result.getReasonForIncompletion()); - result.setReasonForIncompletion("some_reason_1"); - throw new ConductorClientException(); - }) - .when(taskClient) - .evaluateAndUploadLargePayload(any(Map.class), any()); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 3, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(worker) - .onErrorUpdate(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - latch.await(); - - // When evaluateAndUploadLargePayload fails indefinitely, task update shouldn't be called. - verify(taskClient, times(0)).updateTask(any()); - } - - @Test - public void testLargePayloadLocationUpdate() throws InterruptedException { - Task task = testTask(); - String largePayloadLocation = "large_payload_location"; - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(task); - when(taskClient.ack(any(), any())).thenReturn(true); - //noinspection unchecked - when(taskClient.evaluateAndUploadLargePayload(any(Map.class), any())) - .thenReturn(Optional.of(largePayloadLocation)); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 3, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertNull(result.getOutputData()); - assertEquals( - largePayloadLocation, - result.getExternalOutputPayloadStoragePath()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - latch.await(); - - verify(taskClient, times(1)).updateTask(any()); - } - - @Test - public void testTaskPollException() throws InterruptedException { - Task task = testTask(); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("test"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())) - .thenThrow(ConductorClientException.class) - .thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).updateTask(any()); - } - - @Test - public void testTaskPoll() throws InterruptedException { - Task task = testTask(); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("test"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(new Task()).thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).updateTask(any()); - } - - @Test - public void testTaskPollDomain() throws InterruptedException { - TaskClient taskClient = Mockito.mock(TaskClient.class); - String testDomain = "foo"; - Map taskToDomain = new HashMap<>(); - taskToDomain.put(TEST_TASK_DEF_NAME, testDomain); - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, 1, 1, taskToDomain, "test-worker-", new HashMap<>()); - - String workerName = "test-worker"; - Worker worker = mock(Worker.class); - when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); - when(worker.getIdentity()).thenReturn(workerName); - - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(taskClient) - .pollTask(TEST_TASK_DEF_NAME, workerName, testDomain); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).pollTask(TEST_TASK_DEF_NAME, workerName, testDomain); - } - - @Test - public void testPollOutOfDiscoveryForTask() throws InterruptedException { - Task task = testTask(); - - EurekaClient client = mock(EurekaClient.class); - when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("task_run_always"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(new Task()).thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - client, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).updateTask(any()); - } - - @Test - public void testPollOutOfDiscoveryAsDefaultFalseForTask() - throws ExecutionException, InterruptedException { - Task task = testTask(); - - EurekaClient client = mock(EurekaClient.class); - when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("task_do_not_run_always"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(new Task()).thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - client, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - ScheduledFuture f = - Executors.newSingleThreadScheduledExecutor() - .schedule( - () -> taskPollExecutor.pollAndExecute(worker), 0, TimeUnit.SECONDS); - - f.get(); - verify(taskClient, times(0)).updateTask(any()); - } - - @Test - public void testPollOutOfDiscoveryAsExplicitFalseForTask() - throws ExecutionException, InterruptedException { - Task task = testTask(); - - EurekaClient client = mock(EurekaClient.class); - when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("task_explicit_do_not_run_always"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(new Task()).thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - client, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - ScheduledFuture f = - Executors.newSingleThreadScheduledExecutor() - .schedule( - () -> taskPollExecutor.pollAndExecute(worker), 0, TimeUnit.SECONDS); - - f.get(); - verify(taskClient, times(0)).updateTask(any()); - } - - @Test - public void testPollOutOfDiscoveryIsIgnoredWhenDiscoveryIsUp() throws InterruptedException { - Task task = testTask(); - - EurekaClient client = mock(EurekaClient.class); - when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UP); - - Worker worker = mock(Worker.class); - when(worker.getPollingInterval()).thenReturn(3000); - when(worker.getTaskDefName()).thenReturn("task_ignore_override"); - when(worker.execute(any())).thenReturn(new TaskResult(task)); - - TaskClient taskClient = Mockito.mock(TaskClient.class); - when(taskClient.pollTask(any(), any(), any())).thenReturn(new Task()).thenReturn(task); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - client, taskClient, 1, 1, new HashMap<>(), "test-worker-", new HashMap<>()); - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(IN_PROGRESS, result.getStatus()); - assertEquals(task.getTaskId(), result.getTaskId()); - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).updateTask(any()); - } - - @Test - public void testTaskThreadCount() throws InterruptedException { - TaskClient taskClient = Mockito.mock(TaskClient.class); - - Map taskThreadCount = new HashMap<>(); - taskThreadCount.put(TEST_TASK_DEF_NAME, 1); - - TaskPollExecutor taskPollExecutor = - new TaskPollExecutor( - null, taskClient, -1, 1, new HashMap<>(), "test-worker-", taskThreadCount); - - String workerName = "test-worker"; - Worker worker = mock(Worker.class); - when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); - when(worker.getIdentity()).thenReturn(workerName); - - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(taskClient) - .pollTask(TEST_TASK_DEF_NAME, workerName, null); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate( - () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); - - latch.await(); - verify(taskClient).pollTask(TEST_TASK_DEF_NAME, workerName, null); - } - - private Task testTask() { - Task task = new Task(); - task.setTaskId(UUID.randomUUID().toString()); - task.setStatus(Task.Status.IN_PROGRESS); - task.setTaskDefName(TEST_TASK_DEF_NAME); - return task; - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/automator/TaskRunnerConfigurerTest.java b/client/src/test/java/com/netflix/conductor/client/automator/TaskRunnerConfigurerTest.java deleted file mode 100644 index 49736b9cb..000000000 --- a/client/src/test/java/com/netflix/conductor/client/automator/TaskRunnerConfigurerTest.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.automator; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicInteger; - -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; - -import static com.netflix.conductor.common.metadata.tasks.TaskResult.Status.COMPLETED; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class TaskRunnerConfigurerTest { - - private static final String TEST_TASK_DEF_NAME = "test"; - - @Test(expected = NullPointerException.class) - public void testNoWorkersException() { - new TaskRunnerConfigurer.Builder(null, null).build(); - } - - @Test(expected = ConductorClientException.class) - public void testInvalidThreadConfig() { - Worker worker1 = Worker.create("task1", TaskResult::new); - Worker worker2 = Worker.create("task2", TaskResult::new); - Map taskThreadCount = new HashMap<>(); - taskThreadCount.put(worker1.getTaskDefName(), 2); - taskThreadCount.put(worker2.getTaskDefName(), 3); - new TaskRunnerConfigurer.Builder(new TaskClient(), Arrays.asList(worker1, worker2)) - .withThreadCount(10) - .withTaskThreadCount(taskThreadCount) - .build(); - } - - @Test(expected = ConductorClientException.class) - public void testMissingTaskThreadConfig() { - Worker worker1 = Worker.create("task1", TaskResult::new); - Worker worker2 = Worker.create("task2", TaskResult::new); - Map taskThreadCount = new HashMap<>(); - taskThreadCount.put(worker1.getTaskDefName(), 2); - new TaskRunnerConfigurer.Builder(new TaskClient(), Arrays.asList(worker1, worker2)) - .withTaskThreadCount(taskThreadCount) - .build(); - } - - @Test - public void testPerTaskThreadPool() { - Worker worker1 = Worker.create("task1", TaskResult::new); - Worker worker2 = Worker.create("task2", TaskResult::new); - Map taskThreadCount = new HashMap<>(); - taskThreadCount.put(worker1.getTaskDefName(), 2); - taskThreadCount.put(worker2.getTaskDefName(), 3); - TaskRunnerConfigurer configurer = - new TaskRunnerConfigurer.Builder(new TaskClient(), Arrays.asList(worker1, worker2)) - .withTaskThreadCount(taskThreadCount) - .build(); - configurer.init(); - assertEquals(-1, configurer.getThreadCount()); - assertEquals(2, configurer.getTaskThreadCount().get("task1").intValue()); - assertEquals(3, configurer.getTaskThreadCount().get("task2").intValue()); - } - - @Test - public void testSharedThreadPool() { - Worker worker = Worker.create(TEST_TASK_DEF_NAME, TaskResult::new); - TaskRunnerConfigurer configurer = - new TaskRunnerConfigurer.Builder( - new TaskClient(), Arrays.asList(worker, worker, worker)) - .build(); - configurer.init(); - assertEquals(3, configurer.getThreadCount()); - assertEquals(500, configurer.getSleepWhenRetry()); - assertEquals(3, configurer.getUpdateRetryCount()); - assertEquals(10, configurer.getShutdownGracePeriodSeconds()); - assertTrue(configurer.getTaskThreadCount().isEmpty()); - - configurer = - new TaskRunnerConfigurer.Builder( - new TaskClient(), Collections.singletonList(worker)) - .withThreadCount(100) - .withSleepWhenRetry(100) - .withUpdateRetryCount(10) - .withShutdownGracePeriodSeconds(15) - .withWorkerNamePrefix("test-worker-") - .build(); - assertEquals(100, configurer.getThreadCount()); - configurer.init(); - assertEquals(100, configurer.getThreadCount()); - assertEquals(100, configurer.getSleepWhenRetry()); - assertEquals(10, configurer.getUpdateRetryCount()); - assertEquals(15, configurer.getShutdownGracePeriodSeconds()); - assertEquals("test-worker-", configurer.getWorkerNamePrefix()); - assertTrue(configurer.getTaskThreadCount().isEmpty()); - } - - @Test - public void testMultipleWorkersExecution() throws Exception { - String task1Name = "task1"; - Worker worker1 = mock(Worker.class); - when(worker1.getPollingInterval()).thenReturn(3000); - when(worker1.getTaskDefName()).thenReturn(task1Name); - when(worker1.getIdentity()).thenReturn("worker1"); - when(worker1.execute(any())) - .thenAnswer( - invocation -> { - // Sleep for 2 seconds to simulate task execution - Thread.sleep(2000); - TaskResult taskResult = new TaskResult(); - taskResult.setStatus(COMPLETED); - return taskResult; - }); - - String task2Name = "task2"; - Worker worker2 = mock(Worker.class); - when(worker2.getPollingInterval()).thenReturn(3000); - when(worker2.getTaskDefName()).thenReturn(task2Name); - when(worker2.getIdentity()).thenReturn("worker2"); - when(worker2.execute(any())) - .thenAnswer( - invocation -> { - // Sleep for 2 seconds to simulate task execution - Thread.sleep(2000); - TaskResult taskResult = new TaskResult(); - taskResult.setStatus(COMPLETED); - return taskResult; - }); - - Task task1 = testTask(task1Name); - Task task2 = testTask(task2Name); - TaskClient taskClient = Mockito.mock(TaskClient.class); - TaskRunnerConfigurer configurer = - new TaskRunnerConfigurer.Builder(taskClient, Arrays.asList(worker1, worker2)) - .withThreadCount(2) - .withSleepWhenRetry(100000) - .withUpdateRetryCount(1) - .withWorkerNamePrefix("test-worker-") - .build(); - when(taskClient.pollTask(any(), any(), any())) - .thenAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - String taskName = args[0].toString(); - if (taskName.equals(task1Name)) { - return task1; - } else if (taskName.equals(task2Name)) { - return task2; - } else { - return null; - } - }); - when(taskClient.ack(any(), any())).thenReturn(true); - - AtomicInteger task1Counter = new AtomicInteger(0); - AtomicInteger task2Counter = new AtomicInteger(0); - CountDownLatch latch = new CountDownLatch(2); - doAnswer( - invocation -> { - Object[] args = invocation.getArguments(); - TaskResult result = (TaskResult) args[0]; - assertEquals(COMPLETED, result.getStatus()); - if (result.getWorkerId().equals("worker1")) { - task1Counter.incrementAndGet(); - } else if (result.getWorkerId().equals("worker2")) { - task2Counter.incrementAndGet(); - } - latch.countDown(); - return null; - }) - .when(taskClient) - .updateTask(any()); - configurer.init(); - latch.await(); - - assertEquals(1, task1Counter.get()); - assertEquals(1, task2Counter.get()); - } - - private Task testTask(String taskDefName) { - Task task = new Task(); - task.setTaskId(UUID.randomUUID().toString()); - task.setStatus(Task.Status.IN_PROGRESS); - task.setTaskDefName(taskDefName); - return task; - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/config/TestPropertyFactory.java b/client/src/test/java/com/netflix/conductor/client/config/TestPropertyFactory.java deleted file mode 100644 index 87b6f40e4..000000000 --- a/client/src/test/java/com/netflix/conductor/client/config/TestPropertyFactory.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.config; - -import org.junit.Test; - -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.TaskResult; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class TestPropertyFactory { - - @Test - public void testIdentity() { - Worker worker = Worker.create("Test2", TaskResult::new); - assertNotNull(worker.getIdentity()); - boolean paused = worker.paused(); - assertFalse("Paused? " + paused, paused); - } - - @Test - public void test() { - - int val = PropertyFactory.getInteger("workerB", "pollingInterval", 100); - assertEquals("got: " + val, 2, val); - assertEquals( - 100, PropertyFactory.getInteger("workerB", "propWithoutValue", 100).intValue()); - - assertFalse( - PropertyFactory.getBoolean( - "workerB", "paused", true)); // Global value set to 'false' - assertTrue( - PropertyFactory.getBoolean( - "workerA", "paused", false)); // WorkerA value set to 'true' - - assertEquals( - 42, - PropertyFactory.getInteger("workerA", "batchSize", 42) - .intValue()); // No global value set, so will return the default value - // supplied - assertEquals( - 84, - PropertyFactory.getInteger("workerB", "batchSize", 42) - .intValue()); // WorkerB's value set to 84 - - assertEquals("domainA", PropertyFactory.getString("workerA", "domain", null)); - assertEquals("domainB", PropertyFactory.getString("workerB", "domain", null)); - assertNull(PropertyFactory.getString("workerC", "domain", null)); // Non Existent - } - - @Test - public void testProperty() { - Worker worker = Worker.create("Test", TaskResult::new); - boolean paused = worker.paused(); - assertTrue("Paused? " + paused, paused); - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/sample/Main.java b/client/src/test/java/com/netflix/conductor/client/sample/Main.java deleted file mode 100644 index 6fbbb00d1..000000000 --- a/client/src/test/java/com/netflix/conductor/client/sample/Main.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.sample; - -import java.util.Arrays; - -import com.netflix.conductor.client.automator.TaskRunnerConfigurer; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; - -public class Main { - - public static void main(String[] args) { - - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI("http://localhost:8080/api/"); // Point this to the server API - - int threadCount = - 2; // number of threads used to execute workers. To avoid starvation, should be - // same or more than number of workers - - Worker worker1 = new SampleWorker("task_1"); - Worker worker2 = new SampleWorker("task_5"); - - // Create TaskRunnerConfigurer - TaskRunnerConfigurer configurer = - new TaskRunnerConfigurer.Builder(taskClient, Arrays.asList(worker1, worker2)) - .withThreadCount(threadCount) - .build(); - - // Start the polling and execution of tasks - configurer.init(); - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java b/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java deleted file mode 100644 index cc2cbda60..000000000 --- a/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.sample; - -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskResult.Status; - -public class SampleWorker implements Worker { - - private final String taskDefName; - - public SampleWorker(String taskDefName) { - this.taskDefName = taskDefName; - } - - @Override - public String getTaskDefName() { - return taskDefName; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - result.setStatus(Status.COMPLETED); - - // Register the output of the task - result.getOutputData().put("outputKey1", "value"); - result.getOutputData().put("oddEven", 1); - result.getOutputData().put("mod", 4); - - return result; - } -} diff --git a/client/src/test/java/com/netflix/conductor/client/worker/TestWorkflowTask.java b/client/src/test/java/com/netflix/conductor/client/worker/TestWorkflowTask.java deleted file mode 100644 index 62720e92f..000000000 --- a/client/src/test/java/com/netflix/conductor/client/worker/TestWorkflowTask.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.worker; - -import java.io.InputStream; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class TestWorkflowTask { - - private ObjectMapper objectMapper; - - @Before - public void setup() { - objectMapper = new ObjectMapperProvider().getObjectMapper(); - } - - @Test - public void test() throws Exception { - WorkflowTask task = new WorkflowTask(); - task.setType("Hello"); - task.setName("name"); - - String json = objectMapper.writeValueAsString(task); - - WorkflowTask read = objectMapper.readValue(json, WorkflowTask.class); - assertNotNull(read); - assertEquals(task.getName(), read.getName()); - assertEquals(task.getType(), read.getType()); - - task = new WorkflowTask(); - task.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - task.setName("name"); - - json = objectMapper.writeValueAsString(task); - - read = objectMapper.readValue(json, WorkflowTask.class); - assertNotNull(read); - assertEquals(task.getName(), read.getName()); - assertEquals(task.getType(), read.getType()); - assertEquals(TaskType.SUB_WORKFLOW.name(), read.getType()); - } - - @SuppressWarnings("unchecked") - @Test - public void testObjectMapper() throws Exception { - try (InputStream stream = TestWorkflowTask.class.getResourceAsStream("/tasks.json")) { - List tasks = objectMapper.readValue(stream, List.class); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - } - } -} diff --git a/client/src/test/resources/config.properties b/client/src/test/resources/config.properties deleted file mode 100644 index 93fd67347..000000000 --- a/client/src/test/resources/config.properties +++ /dev/null @@ -1,11 +0,0 @@ -conductor.worker.pollingInterval=2 -conductor.worker.paused=false -conductor.worker.workerA.paused=true -conductor.worker.workerA.domain=domainA -conductor.worker.workerB.batchSize=84 -conductor.worker.workerB.domain=domainB -conductor.worker.Test.paused=true -conductor.worker.domainTestTask2.domain=visinghDomain -conductor.worker.task_run_always.pollOutOfDiscovery=true -conductor.worker.task_explicit_do_not_run_always.pollOutOfDiscovery=false -conductor.worker.task_ignore_override.pollOutOfDiscovery=true \ No newline at end of file diff --git a/client/src/test/resources/tasks.json b/client/src/test/resources/tasks.json deleted file mode 100644 index 424b4880e..000000000 --- a/client/src/test/resources/tasks.json +++ /dev/null @@ -1,70 +0,0 @@ -[ - { - "taskType": "task_1", - "status": "IN_PROGRESS", - "inputData": { - "mod": null, - "oddEven": null - }, - "referenceTaskName": "task_1", - "retryCount": 0, - "seq": 1, - "pollCount": 1, - "taskDefName": "task_1", - "scheduledTime": 1539623183131, - "startTime": 1539623436841, - "endTime": 0, - "updateTime": 1539623436841, - "startDelayInSeconds": 0, - "retried": false, - "executed": false, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "2d525ed8-d0e5-44c8-a2df-a110b25c09ac", - "workflowType": "kitchensink", - "taskId": "bc5d9deb-cf86-443d-a1f6-59c36d2464f7", - "callbackAfterSeconds": 0, - "workerId": "test", - "workflowTask": { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "ownerApp": "falguni-test", - "createTime": 1534274994644, - "createdBy": "CPEWORKFLOW", - "name": "task_1", - "description": "Test Task 01", - "retryCount": 0, - "timeoutSeconds": 5, - "inputKeys": [ - "mod", - "oddEven" - ], - "outputKeys": [ - "someOutput" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 0, - "responseTimeoutSeconds": 0, - "concurrentExecLimit": 0, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - } - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "taskDefinition": { - "present": true - }, - "queueWaitTime": 253710, - "taskStatus": "IN_PROGRESS" - } -] \ No newline at end of file diff --git a/common/build.gradle b/common/build.gradle deleted file mode 100644 index 87d4212ea..000000000 --- a/common/build.gradle +++ /dev/null @@ -1,53 +0,0 @@ -configurations { - annotationsProcessorCodegen -} - -dependencies { - implementation project(':conductor-annotations') - annotationsProcessorCodegen project(':conductor-annotations-processor') - - compileOnly 'org.springframework.boot:spring-boot-starter' - compileOnly 'org.springframework.boot:spring-boot-starter-validation' - - compileOnly "org.springdoc:springdoc-openapi-ui:${revOpenapi}" - - implementation "org.apache.commons:commons-lang3" - - implementation "org.apache.bval:bval-jsr:${revBval}" - - implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - - implementation "com.fasterxml.jackson.core:jackson-databind" - implementation "com.fasterxml.jackson.core:jackson-core" - - testImplementation 'org.springframework.boot:spring-boot-starter-validation' -} - -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -task protogen(dependsOn: jar, type: JavaExec) { - classpath configurations.annotationsProcessorCodegen - mainClass = "com.netflix.conductor.annotationsprocessor.protogen.ProtoGenTask" - args( - "conductor.proto", - "com.netflix.conductor.proto", - "github.com/netflix/conductor/client/gogrpc/conductor/model", - "${rootDir}/grpc/src/main/proto", - "${rootDir}/grpc/src/main/java/com/netflix/conductor/grpc", - "com.netflix.conductor.grpc", - jar.archivePath, - "com.netflix.conductor.common", - ) -} - diff --git a/common/dependencies.lock b/common/dependencies.lock deleted file mode 100644 index 813a6cd81..000000000 --- a/common/dependencies.lock +++ /dev/null @@ -1,286 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "annotationsProcessorCodegen": { - "com.github.jknack:handlebars": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "4.3.0" - }, - "com.google.guava:guava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "31.1-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "3.21.1" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "project": true - }, - "com.netflix.conductor:conductor-annotations-processor": { - "project": true - }, - "com.squareup:javapoet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "1.13.0" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "1.3.2" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "2.17.2" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-annotations-processor" - ], - "locked": "2.17.2" - } - }, - "compileClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperBuilderConfiguration.java b/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperBuilderConfiguration.java deleted file mode 100644 index a281edb34..000000000 --- a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperBuilderConfiguration.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.config; - -import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES; -import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES; -import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; - -@Configuration -public class ObjectMapperBuilderConfiguration { - - /** Disable features like {@link ObjectMapperProvider#getObjectMapper()}. */ - @Bean - public Jackson2ObjectMapperBuilderCustomizer conductorJackson2ObjectMapperBuilderCustomizer() { - return builder -> - builder.featuresToDisable( - FAIL_ON_UNKNOWN_PROPERTIES, - FAIL_ON_IGNORED_PROPERTIES, - FAIL_ON_NULL_FOR_PRIMITIVES); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperConfiguration.java b/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperConfiguration.java deleted file mode 100644 index 16cd88421..000000000 --- a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperConfiguration.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.config; - -import javax.annotation.PostConstruct; - -import org.springframework.context.annotation.Configuration; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; - -@Configuration -public class ObjectMapperConfiguration { - - private final ObjectMapper objectMapper; - - public ObjectMapperConfiguration(ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - } - - /** Set default property inclusion like {@link ObjectMapperProvider#getObjectMapper()}. */ - @PostConstruct - public void customizeDefaultObjectMapper() { - objectMapper.setDefaultPropertyInclusion( - JsonInclude.Value.construct( - JsonInclude.Include.NON_NULL, JsonInclude.Include.ALWAYS)); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperProvider.java b/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperProvider.java deleted file mode 100644 index 49682a64e..000000000 --- a/common/src/main/java/com/netflix/conductor/common/config/ObjectMapperProvider.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.config; - -import com.netflix.conductor.common.jackson.JsonProtoModule; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * A Factory class for creating a customized {@link ObjectMapper}. This is only used by the - * conductor-client module and tests that rely on {@link ObjectMapper}. See - * TestObjectMapperConfiguration. - */ -public class ObjectMapperProvider { - - /** - * The customizations in this method are configured using {@link - * org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration} - * - *

Customizations are spread across, 1. {@link ObjectMapperBuilderConfiguration} 2. {@link - * ObjectMapperConfiguration} 3. {@link JsonProtoModule} - * - *

IMPORTANT: Changes in this method need to be also performed in the default {@link - * ObjectMapper} that Spring Boot creates. - * - * @see org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration - */ - public ObjectMapper getObjectMapper() { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.setDefaultPropertyInclusion( - JsonInclude.Value.construct( - JsonInclude.Include.NON_NULL, JsonInclude.Include.ALWAYS)); - objectMapper.registerModule(new JsonProtoModule()); - return objectMapper; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/constraints/NoSemiColonConstraint.java b/common/src/main/java/com/netflix/conductor/common/constraints/NoSemiColonConstraint.java deleted file mode 100644 index 3bd402013..000000000 --- a/common/src/main/java/com/netflix/conductor/common/constraints/NoSemiColonConstraint.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.constraints; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; - -import org.apache.commons.lang3.StringUtils; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; - -/** This constraint checks semi-colon is not allowed in a given string. */ -@Documented -@Constraint(validatedBy = NoSemiColonConstraint.NoSemiColonValidator.class) -@Target({FIELD, PARAMETER}) -@Retention(RetentionPolicy.RUNTIME) -public @interface NoSemiColonConstraint { - - String message() default "String: cannot contain the following set of characters: ':'"; - - Class[] groups() default {}; - - Class[] payload() default {}; - - class NoSemiColonValidator implements ConstraintValidator { - - @Override - public void initialize(NoSemiColonConstraint constraintAnnotation) {} - - @Override - public boolean isValid(String value, ConstraintValidatorContext context) { - boolean valid = true; - - if (!StringUtils.isEmpty(value) && value.contains(":")) { - valid = false; - } - - return valid; - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/constraints/OwnerEmailMandatoryConstraint.java b/common/src/main/java/com/netflix/conductor/common/constraints/OwnerEmailMandatoryConstraint.java deleted file mode 100644 index 55347529d..000000000 --- a/common/src/main/java/com/netflix/conductor/common/constraints/OwnerEmailMandatoryConstraint.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.constraints; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; - -import org.apache.commons.lang3.StringUtils; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.TYPE; - -/** - * This constraint class validates that owner email is non-empty, but only if configuration says - * owner email is mandatory. - */ -@Documented -@Constraint(validatedBy = OwnerEmailMandatoryConstraint.WorkflowTaskValidValidator.class) -@Target({TYPE, FIELD}) -@Retention(RetentionPolicy.RUNTIME) -public @interface OwnerEmailMandatoryConstraint { - - String message() default "ownerEmail cannot be empty"; - - Class[] groups() default {}; - - Class[] payload() default {}; - - class WorkflowTaskValidValidator - implements ConstraintValidator { - - @Override - public void initialize(OwnerEmailMandatoryConstraint constraintAnnotation) {} - - @Override - public boolean isValid(String ownerEmail, ConstraintValidatorContext context) { - return !ownerEmailMandatory || !StringUtils.isEmpty(ownerEmail); - } - - private static boolean ownerEmailMandatory = true; - - public static void setOwnerEmailMandatory(boolean ownerEmailMandatory) { - WorkflowTaskValidValidator.ownerEmailMandatory = ownerEmailMandatory; - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/constraints/TaskReferenceNameUniqueConstraint.java b/common/src/main/java/com/netflix/conductor/common/constraints/TaskReferenceNameUniqueConstraint.java deleted file mode 100644 index 24f0ff433..000000000 --- a/common/src/main/java/com/netflix/conductor/common/constraints/TaskReferenceNameUniqueConstraint.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.constraints; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.util.HashMap; -import java.util.List; - -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; - -import org.apache.commons.lang3.mutable.MutableBoolean; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.ConstraintParamUtil; - -import static java.lang.annotation.ElementType.TYPE; - -/** - * This constraint class validates following things. - * - *

    - *
  • 1. WorkflowDef is valid or not - *
  • 2. Make sure taskReferenceName used across different tasks are unique - *
  • 3. Verify inputParameters points to correct tasks or not - *
- */ -@Documented -@Constraint(validatedBy = TaskReferenceNameUniqueConstraint.TaskReferenceNameUniqueValidator.class) -@Target({TYPE}) -@Retention(RetentionPolicy.RUNTIME) -public @interface TaskReferenceNameUniqueConstraint { - - String message() default ""; - - Class[] groups() default {}; - - Class[] payload() default {}; - - class TaskReferenceNameUniqueValidator - implements ConstraintValidator { - - @Override - public void initialize(TaskReferenceNameUniqueConstraint constraintAnnotation) {} - - @Override - public boolean isValid(WorkflowDef workflowDef, ConstraintValidatorContext context) { - context.disableDefaultConstraintViolation(); - - boolean valid = true; - - // check if taskReferenceNames are unique across tasks or not - HashMap taskReferenceMap = new HashMap<>(); - for (WorkflowTask workflowTask : workflowDef.collectTasks()) { - if (taskReferenceMap.containsKey(workflowTask.getTaskReferenceName())) { - String message = - String.format( - "taskReferenceName: %s should be unique across tasks for a given workflowDefinition: %s", - workflowTask.getTaskReferenceName(), workflowDef.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } else { - taskReferenceMap.put(workflowTask.getTaskReferenceName(), 1); - } - } - // check inputParameters points to valid taskDef - return valid & verifyTaskInputParameters(context, workflowDef); - } - - private boolean verifyTaskInputParameters( - ConstraintValidatorContext context, WorkflowDef workflow) { - MutableBoolean valid = new MutableBoolean(); - valid.setValue(true); - - if (workflow.getTasks() == null) { - return valid.getValue(); - } - - workflow.getTasks().stream() - .filter(workflowTask -> workflowTask.getInputParameters() != null) - .forEach( - workflowTask -> { - List errors = - ConstraintParamUtil.validateInputParam( - workflowTask.getInputParameters(), - workflowTask.getName(), - workflow); - errors.forEach( - message -> - context.buildConstraintViolationWithTemplate( - message) - .addConstraintViolation()); - if (errors.size() > 0) { - valid.setValue(false); - } - }); - - return valid.getValue(); - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/constraints/TaskTimeoutConstraint.java b/common/src/main/java/com/netflix/conductor/common/constraints/TaskTimeoutConstraint.java deleted file mode 100644 index 56525c7b5..000000000 --- a/common/src/main/java/com/netflix/conductor/common/constraints/TaskTimeoutConstraint.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.constraints; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; - -import static java.lang.annotation.ElementType.TYPE; - -/** - * This constraint checks for a given task responseTimeoutSeconds should be less than - * timeoutSeconds. - */ -@Documented -@Constraint(validatedBy = TaskTimeoutConstraint.TaskTimeoutValidator.class) -@Target({TYPE}) -@Retention(RetentionPolicy.RUNTIME) -public @interface TaskTimeoutConstraint { - - String message() default ""; - - Class[] groups() default {}; - - Class[] payload() default {}; - - class TaskTimeoutValidator implements ConstraintValidator { - - @Override - public void initialize(TaskTimeoutConstraint constraintAnnotation) {} - - @Override - public boolean isValid(TaskDef taskDef, ConstraintValidatorContext context) { - context.disableDefaultConstraintViolation(); - - boolean valid = true; - - if (taskDef.getTimeoutSeconds() > 0) { - if (taskDef.getResponseTimeoutSeconds() > taskDef.getTimeoutSeconds()) { - valid = false; - String message = - String.format( - "TaskDef: %s responseTimeoutSeconds: %d must be less than timeoutSeconds: %d", - taskDef.getName(), - taskDef.getResponseTimeoutSeconds(), - taskDef.getTimeoutSeconds()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - } - } - - return valid; - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/jackson/JsonProtoModule.java b/common/src/main/java/com/netflix/conductor/common/jackson/JsonProtoModule.java deleted file mode 100644 index 29bb5e11d..000000000 --- a/common/src/main/java/com/netflix/conductor/common/jackson/JsonProtoModule.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.jackson; - -import java.io.IOException; - -import org.springframework.stereotype.Component; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.protobuf.Any; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; - -/** - * JsonProtoModule can be registered into an {@link ObjectMapper} to enable the serialization and - * deserialization of ProtoBuf objects from/to JSON. - * - *

Right now this module only provides (de)serialization for the {@link Any} ProtoBuf type, as - * this is the only ProtoBuf object which we're currently exposing through the REST API. - * - *

Annotated as {@link Component} so Spring can register it with {@link ObjectMapper} - * - * @see AnySerializer - * @see AnyDeserializer - * @see org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration - */ -@Component(JsonProtoModule.NAME) -public class JsonProtoModule extends SimpleModule { - - public static final String NAME = "ConductorJsonProtoModule"; - - private static final String JSON_TYPE = "@type"; - private static final String JSON_VALUE = "@value"; - - /** - * AnySerializer converts a ProtoBuf {@link Any} object into its JSON representation. - * - *

This is not a canonical ProtoBuf JSON representation. Let us explain what we're - * trying to accomplish here: - * - *

The {@link Any} ProtoBuf message is a type in the PB standard library that can store any - * other arbitrary ProtoBuf message in a type-safe way, even when the server has no knowledge of - * the schema of the stored message. - * - *

It accomplishes this by storing a tuple of information: an URL-like type declaration for - * the stored message, and the serialized binary encoding of the stored message itself. Language - * specific implementations of ProtoBuf provide helper methods to encode and decode arbitrary - * messages into an {@link Any} object ({@link Any#pack(Message)} in Java). - * - *

We want to expose these {@link Any} objects in the REST API because they've been - * introduced as part of the new GRPC interface to Conductor, but unfortunately we cannot encode - * them using their canonical ProtoBuf JSON encoding. According to the docs: - * - *

The JSON representation of an `Any` value uses the regular representation of the - * deserialized, embedded message, with an additional field `@type` which contains the type URL. - * Example: - * - *

package google.profile; message Person { string first_name = 1; string last_name = 2; } { - * "@type": "type.googleapis.com/google.profile.Person", "firstName": , "lastName": - * } - * - *

In order to accomplish this representation, the PB-JSON encoder needs to have knowledge of - * all the ProtoBuf messages that could be serialized inside the {@link Any} message. This is - * not possible to accomplish inside the Conductor server, which is simply passing through - * arbitrary payloads from/to clients. - * - *

Consequently, to actually expose the Message through the REST API, we must create a custom - * encoding that contains the raw data of the serialized message, as we are not able to - * deserialize it on the server. We simply return a dictionary with '@type' and '@value' keys, - * where '@type' is identical to the canonical representation, but '@value' contains a base64 - * encoded string with the binary data of the serialized message. - * - *

Since all the provided Conductor clients are required to know this encoding, it's always - * possible to re-build the original {@link Any} message regardless of the client's language. - * - *

{@see AnyDeserializer} - */ - @SuppressWarnings("InnerClassMayBeStatic") - protected class AnySerializer extends JsonSerializer { - - @Override - public void serialize(Any value, JsonGenerator jgen, SerializerProvider provider) - throws IOException { - jgen.writeStartObject(); - jgen.writeStringField(JSON_TYPE, value.getTypeUrl()); - jgen.writeBinaryField(JSON_VALUE, value.getValue().toByteArray()); - jgen.writeEndObject(); - } - } - - /** - * AnyDeserializer converts the custom JSON representation of an {@link Any} value into its - * original form. - * - *

{@see AnySerializer} for details on this representation. - */ - @SuppressWarnings("InnerClassMayBeStatic") - protected class AnyDeserializer extends JsonDeserializer { - - @Override - public Any deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { - JsonNode root = p.getCodec().readTree(p); - JsonNode type = root.get(JSON_TYPE); - JsonNode value = root.get(JSON_VALUE); - - if (type == null || !type.isTextual()) { - ctxt.reportMappingException( - "invalid '@type' field when deserializing ProtoBuf Any object"); - } - - if (value == null || !value.isTextual()) { - ctxt.reportMappingException( - "invalid '@value' field when deserializing ProtoBuf Any object"); - } - - return Any.newBuilder() - .setTypeUrl(type.textValue()) - .setValue(ByteString.copyFrom(value.binaryValue())) - .build(); - } - } - - public JsonProtoModule() { - super(NAME); - addSerializer(Any.class, new AnySerializer()); - addDeserializer(Any.class, new AnyDeserializer()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/Auditable.java b/common/src/main/java/com/netflix/conductor/common/metadata/Auditable.java deleted file mode 100644 index 01f229480..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/Auditable.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata; - -public abstract class Auditable { - - private String ownerApp; - - private Long createTime; - - private Long updateTime; - - private String createdBy; - - private String updatedBy; - - /** - * @return the ownerApp - */ - public String getOwnerApp() { - return ownerApp; - } - - /** - * @param ownerApp the ownerApp to set - */ - public void setOwnerApp(String ownerApp) { - this.ownerApp = ownerApp; - } - - /** - * @return the createTime - */ - public Long getCreateTime() { - return createTime; - } - - /** - * @param createTime the createTime to set - */ - public void setCreateTime(Long createTime) { - this.createTime = createTime; - } - - /** - * @return the updateTime - */ - public Long getUpdateTime() { - return updateTime; - } - - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(Long updateTime) { - this.updateTime = updateTime; - } - - /** - * @return the createdBy - */ - public String getCreatedBy() { - return createdBy; - } - - /** - * @param createdBy the createdBy to set - */ - public void setCreatedBy(String createdBy) { - this.createdBy = createdBy; - } - - /** - * @return the updatedBy - */ - public String getUpdatedBy() { - return updatedBy; - } - - /** - * @param updatedBy the updatedBy to set - */ - public void setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java deleted file mode 100644 index d6a2065e6..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.events; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.events.EventHandler.Action; - -@ProtoMessage -public class EventExecution { - - @ProtoEnum - public enum Status { - IN_PROGRESS, - COMPLETED, - FAILED, - SKIPPED - } - - @ProtoField(id = 1) - private String id; - - @ProtoField(id = 2) - private String messageId; - - @ProtoField(id = 3) - private String name; - - @ProtoField(id = 4) - private String event; - - @ProtoField(id = 5) - private long created; - - @ProtoField(id = 6) - private Status status; - - @ProtoField(id = 7) - private Action.Type action; - - @ProtoField(id = 8) - private Map output = new HashMap<>(); - - public EventExecution() {} - - public EventExecution(String id, String messageId) { - this.id = id; - this.messageId = messageId; - } - - /** - * @return the id - */ - public String getId() { - return id; - } - - /** - * @param id the id to set - */ - public void setId(String id) { - this.id = id; - } - - /** - * @return the messageId - */ - public String getMessageId() { - return messageId; - } - - /** - * @param messageId the messageId to set - */ - public void setMessageId(String messageId) { - this.messageId = messageId; - } - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the event - */ - public String getEvent() { - return event; - } - - /** - * @param event the event to set - */ - public void setEvent(String event) { - this.event = event; - } - - /** - * @return the created - */ - public long getCreated() { - return created; - } - - /** - * @param created the created to set - */ - public void setCreated(long created) { - this.created = created; - } - - /** - * @return the status - */ - public Status getStatus() { - return status; - } - - /** - * @param status the status to set - */ - public void setStatus(Status status) { - this.status = status; - } - - /** - * @return the action - */ - public Action.Type getAction() { - return action; - } - - /** - * @param action the action to set - */ - public void setAction(Action.Type action) { - this.action = action; - } - - /** - * @return the output - */ - public Map getOutput() { - return output; - } - - /** - * @param output the output to set - */ - public void setOutput(Map output) { - this.output = output; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - EventExecution execution = (EventExecution) o; - return created == execution.created - && Objects.equals(id, execution.id) - && Objects.equals(messageId, execution.messageId) - && Objects.equals(name, execution.name) - && Objects.equals(event, execution.event) - && status == execution.status - && action == execution.action - && Objects.equals(output, execution.output); - } - - @Override - public int hashCode() { - return Objects.hash(id, messageId, name, event, created, status, action, output); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java deleted file mode 100644 index 77dda4c1e..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java +++ /dev/null @@ -1,418 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.events; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -import com.google.protobuf.Any; -import io.swagger.v3.oas.annotations.Hidden; - -/** Defines an event handler */ -@ProtoMessage -public class EventHandler { - - @ProtoField(id = 1) - @NotEmpty(message = "Missing event handler name") - private String name; - - @ProtoField(id = 2) - @NotEmpty(message = "Missing event location") - private String event; - - @ProtoField(id = 3) - private String condition; - - @ProtoField(id = 4) - @NotNull - @NotEmpty(message = "No actions specified. Please specify at-least one action") - private List<@Valid Action> actions = new LinkedList<>(); - - @ProtoField(id = 5) - private boolean active; - - @ProtoField(id = 6) - private String evaluatorType; - - public EventHandler() {} - - /** - * @return the name MUST be unique within a conductor instance - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the event - */ - public String getEvent() { - return event; - } - - /** - * @param event the event to set - */ - public void setEvent(String event) { - this.event = event; - } - - /** - * @return the condition - */ - public String getCondition() { - return condition; - } - - /** - * @param condition the condition to set - */ - public void setCondition(String condition) { - this.condition = condition; - } - - /** - * @return the actions - */ - public List getActions() { - return actions; - } - - /** - * @param actions the actions to set - */ - public void setActions(List actions) { - this.actions = actions; - } - - /** - * @return the active - */ - public boolean isActive() { - return active; - } - - /** - * @param active if set to false, the event handler is deactivated - */ - public void setActive(boolean active) { - this.active = active; - } - - /** - * @return the evaluator type - */ - public String getEvaluatorType() { - return evaluatorType; - } - - /** - * @param evaluatorType the evaluatorType to set - */ - public void setEvaluatorType(String evaluatorType) { - this.evaluatorType = evaluatorType; - } - - @ProtoMessage - public static class Action { - - @ProtoEnum - public enum Type { - start_workflow, - complete_task, - fail_task - } - - @ProtoField(id = 1) - private Type action; - - @ProtoField(id = 2) - private StartWorkflow start_workflow; - - @ProtoField(id = 3) - private TaskDetails complete_task; - - @ProtoField(id = 4) - private TaskDetails fail_task; - - @ProtoField(id = 5) - private boolean expandInlineJSON; - - /** - * @return the action - */ - public Type getAction() { - return action; - } - - /** - * @param action the action to set - */ - public void setAction(Type action) { - this.action = action; - } - - /** - * @return the start_workflow - */ - public StartWorkflow getStart_workflow() { - return start_workflow; - } - - /** - * @param start_workflow the start_workflow to set - */ - public void setStart_workflow(StartWorkflow start_workflow) { - this.start_workflow = start_workflow; - } - - /** - * @return the complete_task - */ - public TaskDetails getComplete_task() { - return complete_task; - } - - /** - * @param complete_task the complete_task to set - */ - public void setComplete_task(TaskDetails complete_task) { - this.complete_task = complete_task; - } - - /** - * @return the fail_task - */ - public TaskDetails getFail_task() { - return fail_task; - } - - /** - * @param fail_task the fail_task to set - */ - public void setFail_task(TaskDetails fail_task) { - this.fail_task = fail_task; - } - - /** - * @param expandInlineJSON when set to true, the in-lined JSON strings are expanded to a - * full json document - */ - public void setExpandInlineJSON(boolean expandInlineJSON) { - this.expandInlineJSON = expandInlineJSON; - } - - /** - * @return true if the json strings within the payload should be expanded. - */ - public boolean isExpandInlineJSON() { - return expandInlineJSON; - } - } - - @ProtoMessage - public static class TaskDetails { - - @ProtoField(id = 1) - private String workflowId; - - @ProtoField(id = 2) - private String taskRefName; - - @ProtoField(id = 3) - private Map output = new HashMap<>(); - - @ProtoField(id = 4) - @Hidden - private Any outputMessage; - - @ProtoField(id = 5) - private String taskId; - - /** - * @return the workflowId - */ - public String getWorkflowId() { - return workflowId; - } - - /** - * @param workflowId the workflowId to set - */ - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - - /** - * @return the taskRefName - */ - public String getTaskRefName() { - return taskRefName; - } - - /** - * @param taskRefName the taskRefName to set - */ - public void setTaskRefName(String taskRefName) { - this.taskRefName = taskRefName; - } - - /** - * @return the output - */ - public Map getOutput() { - return output; - } - - /** - * @param output the output to set - */ - public void setOutput(Map output) { - this.output = output; - } - - public Any getOutputMessage() { - return outputMessage; - } - - public void setOutputMessage(Any outputMessage) { - this.outputMessage = outputMessage; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - } - - @ProtoMessage - public static class StartWorkflow { - - @ProtoField(id = 1) - private String name; - - @ProtoField(id = 2) - private Integer version; - - @ProtoField(id = 3) - private String correlationId; - - @ProtoField(id = 4) - private Map input = new HashMap<>(); - - @ProtoField(id = 5) - @Hidden - private Any inputMessage; - - @ProtoField(id = 6) - private Map taskToDomain; - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the version - */ - public Integer getVersion() { - return version; - } - - /** - * @param version the version to set - */ - public void setVersion(Integer version) { - this.version = version; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlationId to set - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * @return the input - */ - public Map getInput() { - return input; - } - - /** - * @param input the input to set - */ - public void setInput(Map input) { - this.input = input; - } - - public Any getInputMessage() { - return inputMessage; - } - - public void setInputMessage(Any inputMessage) { - this.inputMessage = inputMessage; - } - - public Map getTaskToDomain() { - return taskToDomain; - } - - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java deleted file mode 100644 index b058e2cd4..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.Objects; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -@ProtoMessage -public class PollData { - - @ProtoField(id = 1) - private String queueName; - - @ProtoField(id = 2) - private String domain; - - @ProtoField(id = 3) - private String workerId; - - @ProtoField(id = 4) - private long lastPollTime; - - public PollData() { - super(); - } - - public PollData(String queueName, String domain, String workerId, long lastPollTime) { - super(); - this.queueName = queueName; - this.domain = domain; - this.workerId = workerId; - this.lastPollTime = lastPollTime; - } - - public String getQueueName() { - return queueName; - } - - public void setQueueName(String queueName) { - this.queueName = queueName; - } - - public String getDomain() { - return domain; - } - - public void setDomain(String domain) { - this.domain = domain; - } - - public String getWorkerId() { - return workerId; - } - - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - public long getLastPollTime() { - return lastPollTime; - } - - public void setLastPollTime(long lastPollTime) { - this.lastPollTime = lastPollTime; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - PollData pollData = (PollData) o; - return getLastPollTime() == pollData.getLastPollTime() - && Objects.equals(getQueueName(), pollData.getQueueName()) - && Objects.equals(getDomain(), pollData.getDomain()) - && Objects.equals(getWorkerId(), pollData.getWorkerId()); - } - - @Override - public int hashCode() { - return Objects.hash(getQueueName(), getDomain(), getWorkerId(), getLastPollTime()); - } - - @Override - public String toString() { - return "PollData{" - + "queueName='" - + queueName - + '\'' - + ", domain='" - + domain - + '\'' - + ", workerId='" - + workerId - + '\'' - + ", lastPollTime=" - + lastPollTime - + '}'; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java deleted file mode 100644 index f51016340..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ /dev/null @@ -1,1008 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.google.protobuf.Any; -import io.swagger.v3.oas.annotations.Hidden; - -@ProtoMessage -public class Task { - - @ProtoEnum - public enum Status { - IN_PROGRESS(false, true, true), - CANCELED(true, false, false), - FAILED(true, false, true), - FAILED_WITH_TERMINAL_ERROR( - true, false, - false), // No retries even if retries are configured, the task and the related - // workflow should be terminated - COMPLETED(true, true, true), - COMPLETED_WITH_ERRORS(true, true, true), - SCHEDULED(false, true, true), - TIMED_OUT(true, false, true), - SKIPPED(true, true, false); - - private final boolean terminal; - - private final boolean successful; - - private final boolean retriable; - - Status(boolean terminal, boolean successful, boolean retriable) { - this.terminal = terminal; - this.successful = successful; - this.retriable = retriable; - } - - public boolean isTerminal() { - return terminal; - } - - public boolean isSuccessful() { - return successful; - } - - public boolean isRetriable() { - return retriable; - } - } - - @ProtoField(id = 1) - private String taskType; - - @ProtoField(id = 2) - private Status status; - - @ProtoField(id = 3) - private Map inputData = new HashMap<>(); - - @ProtoField(id = 4) - private String referenceTaskName; - - @ProtoField(id = 5) - private int retryCount; - - @ProtoField(id = 6) - private int seq; - - @ProtoField(id = 7) - private String correlationId; - - @ProtoField(id = 8) - private int pollCount; - - @ProtoField(id = 9) - private String taskDefName; - - /** Time when the task was scheduled */ - @ProtoField(id = 10) - private long scheduledTime; - - /** Time when the task was first polled */ - @ProtoField(id = 11) - private long startTime; - - /** Time when the task completed executing */ - @ProtoField(id = 12) - private long endTime; - - /** Time when the task was last updated */ - @ProtoField(id = 13) - private long updateTime; - - @ProtoField(id = 14) - private int startDelayInSeconds; - - @ProtoField(id = 15) - private String retriedTaskId; - - @ProtoField(id = 16) - private boolean retried; - - @ProtoField(id = 17) - private boolean executed; - - @ProtoField(id = 18) - private boolean callbackFromWorker = true; - - @ProtoField(id = 19) - private long responseTimeoutSeconds; - - @ProtoField(id = 20) - private String workflowInstanceId; - - @ProtoField(id = 21) - private String workflowType; - - @ProtoField(id = 22) - private String taskId; - - @ProtoField(id = 23) - private String reasonForIncompletion; - - @ProtoField(id = 24) - private long callbackAfterSeconds; - - @ProtoField(id = 25) - private String workerId; - - @ProtoField(id = 26) - private Map outputData = new HashMap<>(); - - @ProtoField(id = 27) - private WorkflowTask workflowTask; - - @ProtoField(id = 28) - private String domain; - - @ProtoField(id = 29) - @Hidden - private Any inputMessage; - - @ProtoField(id = 30) - @Hidden - private Any outputMessage; - - // id 31 is reserved - - @ProtoField(id = 32) - private int rateLimitPerFrequency; - - @ProtoField(id = 33) - private int rateLimitFrequencyInSeconds; - - @ProtoField(id = 34) - private String externalInputPayloadStoragePath; - - @ProtoField(id = 35) - private String externalOutputPayloadStoragePath; - - @ProtoField(id = 36) - private int workflowPriority; - - @ProtoField(id = 37) - private String executionNameSpace; - - @ProtoField(id = 38) - private String isolationGroupId; - - @ProtoField(id = 40) - private int iteration; - - @ProtoField(id = 41) - private String subWorkflowId; - - /** - * Use to note that a sub workflow associated with SUB_WORKFLOW task has an action performed on - * it directly. - */ - @ProtoField(id = 42) - private boolean subworkflowChanged; - - public Task() {} - - /** - * @return Type of the task - * @see TaskType - */ - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - /** - * @return Status of the task - */ - public Status getStatus() { - return status; - } - - /** - * @param status Status of the task - */ - public void setStatus(Status status) { - this.status = status; - } - - public Map getInputData() { - return inputData; - } - - public void setInputData(Map inputData) { - if (inputData == null) { - inputData = new HashMap<>(); - } - this.inputData = inputData; - } - - /** - * @return the referenceTaskName - */ - public String getReferenceTaskName() { - return referenceTaskName; - } - - /** - * @param referenceTaskName the referenceTaskName to set - */ - public void setReferenceTaskName(String referenceTaskName) { - this.referenceTaskName = referenceTaskName; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlationId to set - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * @return the retryCount - */ - public int getRetryCount() { - return retryCount; - } - - /** - * @param retryCount the retryCount to set - */ - public void setRetryCount(int retryCount) { - this.retryCount = retryCount; - } - - /** - * @return the scheduledTime - */ - public long getScheduledTime() { - return scheduledTime; - } - - /** - * @param scheduledTime the scheduledTime to set - */ - public void setScheduledTime(long scheduledTime) { - this.scheduledTime = scheduledTime; - } - - /** - * @return the startTime - */ - public long getStartTime() { - return startTime; - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - /** - * @return the endTime - */ - public long getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - /** - * @return the startDelayInSeconds - */ - public int getStartDelayInSeconds() { - return startDelayInSeconds; - } - - /** - * @param startDelayInSeconds the startDelayInSeconds to set - */ - public void setStartDelayInSeconds(int startDelayInSeconds) { - this.startDelayInSeconds = startDelayInSeconds; - } - - /** - * @return the retriedTaskId - */ - public String getRetriedTaskId() { - return retriedTaskId; - } - - /** - * @param retriedTaskId the retriedTaskId to set - */ - public void setRetriedTaskId(String retriedTaskId) { - this.retriedTaskId = retriedTaskId; - } - - /** - * @return the seq - */ - public int getSeq() { - return seq; - } - - /** - * @param seq the seq to set - */ - public void setSeq(int seq) { - this.seq = seq; - } - - /** - * @return the updateTime - */ - public long getUpdateTime() { - return updateTime; - } - - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(long updateTime) { - this.updateTime = updateTime; - } - - /** - * @return the queueWaitTime - */ - public long getQueueWaitTime() { - if (this.startTime > 0 && this.scheduledTime > 0) { - if (this.updateTime > 0 && getCallbackAfterSeconds() > 0) { - long waitTime = - System.currentTimeMillis() - - (this.updateTime + (getCallbackAfterSeconds() * 1000)); - return waitTime > 0 ? waitTime : 0; - } else { - return this.startTime - this.scheduledTime; - } - } - return 0L; - } - - /** - * @return True if the task has been retried after failure - */ - public boolean isRetried() { - return retried; - } - - /** - * @param retried the retried to set - */ - public void setRetried(boolean retried) { - this.retried = retried; - } - - /** - * @return True if the task has completed its lifecycle within conductor (from start to - * completion to being updated in the datastore) - */ - public boolean isExecuted() { - return executed; - } - - /** - * @param executed the executed value to set - */ - public void setExecuted(boolean executed) { - this.executed = executed; - } - - /** - * @return No. of times task has been polled - */ - public int getPollCount() { - return pollCount; - } - - public void setPollCount(int pollCount) { - this.pollCount = pollCount; - } - - public void incrementPollCount() { - ++this.pollCount; - } - - public boolean isCallbackFromWorker() { - return callbackFromWorker; - } - - public void setCallbackFromWorker(boolean callbackFromWorker) { - this.callbackFromWorker = callbackFromWorker; - } - - /** - * @return Name of the task definition - */ - public String getTaskDefName() { - if (taskDefName == null || "".equals(taskDefName)) { - taskDefName = taskType; - } - return taskDefName; - } - - /** - * @param taskDefName Name of the task definition - */ - public void setTaskDefName(String taskDefName) { - this.taskDefName = taskDefName; - } - - /** - * @return the timeout for task to send response. After this timeout, the task will be re-queued - */ - public long getResponseTimeoutSeconds() { - return responseTimeoutSeconds; - } - - /** - * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the - * task will be re-queued - */ - public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - /** - * @return the workflowInstanceId - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - /** - * @param workflowInstanceId the workflowInstanceId to set - */ - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getWorkflowType() { - return workflowType; - } - - /** - * @param workflowType the name of the workflow - * @return the task object with the workflow type set - */ - public com.netflix.conductor.common.metadata.tasks.Task setWorkflowType(String workflowType) { - this.workflowType = workflowType; - return this; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - /** - * @return the reasonForIncompletion - */ - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - /** - * @param reasonForIncompletion the reasonForIncompletion to set - */ - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = StringUtils.substring(reasonForIncompletion, 0, 500); - } - - /** - * @return the callbackAfterSeconds - */ - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * @param callbackAfterSeconds the callbackAfterSeconds to set - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - /** - * @return the workerId - */ - public String getWorkerId() { - return workerId; - } - - /** - * @param workerId the workerId to set - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the outputData - */ - public Map getOutputData() { - return outputData; - } - - /** - * @param outputData the outputData to set - */ - public void setOutputData(Map outputData) { - if (outputData == null) { - outputData = new HashMap<>(); - } - this.outputData = outputData; - } - - /** - * @return Workflow Task definition - */ - public WorkflowTask getWorkflowTask() { - return workflowTask; - } - - /** - * @param workflowTask Task definition - */ - public void setWorkflowTask(WorkflowTask workflowTask) { - this.workflowTask = workflowTask; - } - - /** - * @return the domain - */ - public String getDomain() { - return domain; - } - - /** - * @param domain the Domain - */ - public void setDomain(String domain) { - this.domain = domain; - } - - public Any getInputMessage() { - return inputMessage; - } - - public void setInputMessage(Any inputMessage) { - this.inputMessage = inputMessage; - } - - public Any getOutputMessage() { - return outputMessage; - } - - public void setOutputMessage(Any outputMessage) { - this.outputMessage = outputMessage; - } - - /** - * @return {@link Optional} containing the task definition if available - */ - public Optional getTaskDefinition() { - return Optional.ofNullable(this.getWorkflowTask()).map(WorkflowTask::getTaskDefinition); - } - - public int getRateLimitPerFrequency() { - return rateLimitPerFrequency; - } - - public void setRateLimitPerFrequency(int rateLimitPerFrequency) { - this.rateLimitPerFrequency = rateLimitPerFrequency; - } - - public int getRateLimitFrequencyInSeconds() { - return rateLimitFrequencyInSeconds; - } - - public void setRateLimitFrequencyInSeconds(int rateLimitFrequencyInSeconds) { - this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; - } - - /** - * @return the external storage path for the task input payload - */ - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - /** - * @param externalInputPayloadStoragePath the external storage path where the task input payload - * is stored - */ - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - /** - * @return the external storage path for the task output payload - */ - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - /** - * @param externalOutputPayloadStoragePath the external storage path where the task output - * payload is stored - */ - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - public void setIsolationGroupId(String isolationGroupId) { - this.isolationGroupId = isolationGroupId; - } - - public String getIsolationGroupId() { - return isolationGroupId; - } - - public String getExecutionNameSpace() { - return executionNameSpace; - } - - public void setExecutionNameSpace(String executionNameSpace) { - this.executionNameSpace = executionNameSpace; - } - - /** - * @return the iteration - */ - public int getIteration() { - return iteration; - } - - /** - * @param iteration iteration - */ - public void setIteration(int iteration) { - this.iteration = iteration; - } - - public boolean isLoopOverTask() { - return iteration > 0; - } - - /** * @return the priority defined on workflow */ - public int getWorkflowPriority() { - return workflowPriority; - } - - /** - * @param workflowPriority Priority defined for workflow - */ - public void setWorkflowPriority(int workflowPriority) { - this.workflowPriority = workflowPriority; - } - - public boolean isSubworkflowChanged() { - return subworkflowChanged; - } - - public void setSubworkflowChanged(boolean subworkflowChanged) { - this.subworkflowChanged = subworkflowChanged; - } - - public String getSubWorkflowId() { - // For backwards compatibility - if (StringUtils.isNotBlank(subWorkflowId)) { - return subWorkflowId; - } else { - return this.getOutputData() != null && this.getOutputData().get("subWorkflowId") != null - ? (String) this.getOutputData().get("subWorkflowId") - : this.getInputData() != null - ? (String) this.getInputData().get("subWorkflowId") - : null; - } - } - - public void setSubWorkflowId(String subWorkflowId) { - this.subWorkflowId = subWorkflowId; - // For backwards compatibility - if (this.getOutputData() != null && this.getOutputData().containsKey("subWorkflowId")) { - this.getOutputData().put("subWorkflowId", subWorkflowId); - } - } - - public Task copy() { - Task copy = new Task(); - copy.setCallbackAfterSeconds(callbackAfterSeconds); - copy.setCallbackFromWorker(callbackFromWorker); - copy.setCorrelationId(correlationId); - copy.setInputData(inputData); - copy.setOutputData(outputData); - copy.setReferenceTaskName(referenceTaskName); - copy.setStartDelayInSeconds(startDelayInSeconds); - copy.setTaskDefName(taskDefName); - copy.setTaskType(taskType); - copy.setWorkflowInstanceId(workflowInstanceId); - copy.setWorkflowType(workflowType); - copy.setResponseTimeoutSeconds(responseTimeoutSeconds); - copy.setStatus(status); - copy.setRetryCount(retryCount); - copy.setPollCount(pollCount); - copy.setTaskId(taskId); - copy.setWorkflowTask(workflowTask); - copy.setDomain(domain); - copy.setInputMessage(inputMessage); - copy.setOutputMessage(outputMessage); - copy.setRateLimitPerFrequency(rateLimitPerFrequency); - copy.setRateLimitFrequencyInSeconds(rateLimitFrequencyInSeconds); - copy.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); - copy.setExternalOutputPayloadStoragePath(externalOutputPayloadStoragePath); - copy.setWorkflowPriority(workflowPriority); - copy.setIteration(iteration); - copy.setExecutionNameSpace(executionNameSpace); - copy.setIsolationGroupId(isolationGroupId); - copy.setSubWorkflowId(getSubWorkflowId()); - copy.setSubworkflowChanged(subworkflowChanged); - - return copy; - } - - /** - * @return a deep copy of the task instance To be used inside copy Workflow method to provide a - * valid deep copied object. Note: This does not copy the following fields: - *

    - *
  • retried - *
  • updateTime - *
  • retriedTaskId - *
- */ - public Task deepCopy() { - Task deepCopy = copy(); - deepCopy.setStartTime(startTime); - deepCopy.setScheduledTime(scheduledTime); - deepCopy.setEndTime(endTime); - deepCopy.setWorkerId(workerId); - deepCopy.setReasonForIncompletion(reasonForIncompletion); - deepCopy.setSeq(seq); - - return deepCopy; - } - - @Override - public String toString() { - return "Task{" - + "taskType='" - + taskType - + '\'' - + ", status=" - + status - + ", inputData=" - + inputData - + ", referenceTaskName='" - + referenceTaskName - + '\'' - + ", retryCount=" - + retryCount - + ", seq=" - + seq - + ", correlationId='" - + correlationId - + '\'' - + ", pollCount=" - + pollCount - + ", taskDefName='" - + taskDefName - + '\'' - + ", scheduledTime=" - + scheduledTime - + ", startTime=" - + startTime - + ", endTime=" - + endTime - + ", updateTime=" - + updateTime - + ", startDelayInSeconds=" - + startDelayInSeconds - + ", retriedTaskId='" - + retriedTaskId - + '\'' - + ", retried=" - + retried - + ", executed=" - + executed - + ", callbackFromWorker=" - + callbackFromWorker - + ", responseTimeoutSeconds=" - + responseTimeoutSeconds - + ", workflowInstanceId='" - + workflowInstanceId - + '\'' - + ", workflowType='" - + workflowType - + '\'' - + ", taskId='" - + taskId - + '\'' - + ", reasonForIncompletion='" - + reasonForIncompletion - + '\'' - + ", callbackAfterSeconds=" - + callbackAfterSeconds - + ", workerId='" - + workerId - + '\'' - + ", outputData=" - + outputData - + ", workflowTask=" - + workflowTask - + ", domain='" - + domain - + '\'' - + ", inputMessage='" - + inputMessage - + '\'' - + ", outputMessage='" - + outputMessage - + '\'' - + ", rateLimitPerFrequency=" - + rateLimitPerFrequency - + ", rateLimitFrequencyInSeconds=" - + rateLimitFrequencyInSeconds - + ", workflowPriority=" - + workflowPriority - + ", externalInputPayloadStoragePath='" - + externalInputPayloadStoragePath - + '\'' - + ", externalOutputPayloadStoragePath='" - + externalOutputPayloadStoragePath - + '\'' - + ", isolationGroupId='" - + isolationGroupId - + '\'' - + ", executionNameSpace='" - + executionNameSpace - + '\'' - + ", subworkflowChanged='" - + subworkflowChanged - + '\'' - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Task task = (Task) o; - return getRetryCount() == task.getRetryCount() - && getSeq() == task.getSeq() - && getPollCount() == task.getPollCount() - && getScheduledTime() == task.getScheduledTime() - && getStartTime() == task.getStartTime() - && getEndTime() == task.getEndTime() - && getUpdateTime() == task.getUpdateTime() - && getStartDelayInSeconds() == task.getStartDelayInSeconds() - && isRetried() == task.isRetried() - && isExecuted() == task.isExecuted() - && isCallbackFromWorker() == task.isCallbackFromWorker() - && getResponseTimeoutSeconds() == task.getResponseTimeoutSeconds() - && getCallbackAfterSeconds() == task.getCallbackAfterSeconds() - && getRateLimitPerFrequency() == task.getRateLimitPerFrequency() - && getRateLimitFrequencyInSeconds() == task.getRateLimitFrequencyInSeconds() - && Objects.equals(getTaskType(), task.getTaskType()) - && getStatus() == task.getStatus() - && getIteration() == task.getIteration() - && getWorkflowPriority() == task.getWorkflowPriority() - && Objects.equals(getInputData(), task.getInputData()) - && Objects.equals(getReferenceTaskName(), task.getReferenceTaskName()) - && Objects.equals(getCorrelationId(), task.getCorrelationId()) - && Objects.equals(getTaskDefName(), task.getTaskDefName()) - && Objects.equals(getRetriedTaskId(), task.getRetriedTaskId()) - && Objects.equals(getWorkflowInstanceId(), task.getWorkflowInstanceId()) - && Objects.equals(getWorkflowType(), task.getWorkflowType()) - && Objects.equals(getTaskId(), task.getTaskId()) - && Objects.equals(getReasonForIncompletion(), task.getReasonForIncompletion()) - && Objects.equals(getWorkerId(), task.getWorkerId()) - && Objects.equals(getOutputData(), task.getOutputData()) - && Objects.equals(getWorkflowTask(), task.getWorkflowTask()) - && Objects.equals(getDomain(), task.getDomain()) - && Objects.equals(getInputMessage(), task.getInputMessage()) - && Objects.equals(getOutputMessage(), task.getOutputMessage()) - && Objects.equals( - getExternalInputPayloadStoragePath(), - task.getExternalInputPayloadStoragePath()) - && Objects.equals( - getExternalOutputPayloadStoragePath(), - task.getExternalOutputPayloadStoragePath()) - && Objects.equals(getIsolationGroupId(), task.getIsolationGroupId()) - && Objects.equals(getExecutionNameSpace(), task.getExecutionNameSpace()); - } - - @Override - public int hashCode() { - return Objects.hash( - getTaskType(), - getStatus(), - getInputData(), - getReferenceTaskName(), - getWorkflowPriority(), - getRetryCount(), - getSeq(), - getCorrelationId(), - getPollCount(), - getTaskDefName(), - getScheduledTime(), - getStartTime(), - getEndTime(), - getUpdateTime(), - getStartDelayInSeconds(), - getRetriedTaskId(), - isRetried(), - isExecuted(), - isCallbackFromWorker(), - getResponseTimeoutSeconds(), - getWorkflowInstanceId(), - getWorkflowType(), - getTaskId(), - getReasonForIncompletion(), - getCallbackAfterSeconds(), - getWorkerId(), - getOutputData(), - getWorkflowTask(), - getDomain(), - getInputMessage(), - getOutputMessage(), - getRateLimitPerFrequency(), - getRateLimitFrequencyInSeconds(), - getExternalInputPayloadStoragePath(), - getExternalOutputPayloadStoragePath(), - getIsolationGroupId(), - getExecutionNameSpace()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java deleted file mode 100644 index b518fbb7a..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java +++ /dev/null @@ -1,484 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import javax.validation.Valid; -import javax.validation.constraints.Email; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; -import com.netflix.conductor.common.constraints.TaskTimeoutConstraint; -import com.netflix.conductor.common.metadata.Auditable; - -@ProtoMessage -@TaskTimeoutConstraint -@Valid -public class TaskDef extends Auditable { - - @ProtoEnum - public enum TimeoutPolicy { - RETRY, - TIME_OUT_WF, - ALERT_ONLY - } - - @ProtoEnum - public enum RetryLogic { - FIXED, - EXPONENTIAL_BACKOFF, - LINEAR_BACKOFF - } - - private static final int ONE_HOUR = 60 * 60; - - /** Unique name identifying the task. The name is unique across */ - @NotEmpty(message = "TaskDef name cannot be null or empty") - @ProtoField(id = 1) - private String name; - - @ProtoField(id = 2) - private String description; - - @ProtoField(id = 3) - @Min(value = 0, message = "TaskDef retryCount: {value} must be >= 0") - private int retryCount = 3; // Default - - @ProtoField(id = 4) - @NotNull - private long timeoutSeconds; - - @ProtoField(id = 5) - private List inputKeys = new ArrayList<>(); - - @ProtoField(id = 6) - private List outputKeys = new ArrayList<>(); - - @ProtoField(id = 7) - private TimeoutPolicy timeoutPolicy = TimeoutPolicy.TIME_OUT_WF; - - @ProtoField(id = 8) - private RetryLogic retryLogic = RetryLogic.FIXED; - - @ProtoField(id = 9) - private int retryDelaySeconds = 60; - - @ProtoField(id = 10) - @Min( - value = 1, - message = - "TaskDef responseTimeoutSeconds: ${validatedValue} should be minimum {value} second") - private long responseTimeoutSeconds = ONE_HOUR; - - @ProtoField(id = 11) - private Integer concurrentExecLimit; - - @ProtoField(id = 12) - private Map inputTemplate = new HashMap<>(); - - // This field is deprecated, do not use id 13. - // @ProtoField(id = 13) - // private Integer rateLimitPerSecond; - - @ProtoField(id = 14) - private Integer rateLimitPerFrequency; - - @ProtoField(id = 15) - private Integer rateLimitFrequencyInSeconds; - - @ProtoField(id = 16) - private String isolationGroupId; - - @ProtoField(id = 17) - private String executionNameSpace; - - @ProtoField(id = 18) - @OwnerEmailMandatoryConstraint - @Email(message = "ownerEmail should be valid email address") - private String ownerEmail; - - @ProtoField(id = 19) - @Min(value = 0, message = "TaskDef pollTimeoutSeconds: {value} must be >= 0") - private Integer pollTimeoutSeconds; - - @ProtoField(id = 20) - @Min(value = 1, message = "Backoff scale factor. Applicable for LINEAR_BACKOFF") - private Integer backoffScaleFactor = 1; - - public TaskDef() {} - - public TaskDef(String name) { - this.name = name; - } - - public TaskDef(String name, String description) { - this.name = name; - this.description = description; - } - - public TaskDef(String name, String description, int retryCount, long timeoutSeconds) { - this.name = name; - this.description = description; - this.retryCount = retryCount; - this.timeoutSeconds = timeoutSeconds; - } - - public TaskDef( - String name, - String description, - String ownerEmail, - int retryCount, - long timeoutSeconds, - long responseTimeoutSeconds) { - this.name = name; - this.description = description; - this.ownerEmail = ownerEmail; - this.retryCount = retryCount; - this.timeoutSeconds = timeoutSeconds; - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the description - */ - public String getDescription() { - return description; - } - - /** - * @param description the description to set - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * @return the retryCount - */ - public int getRetryCount() { - return retryCount; - } - - /** - * @param retryCount the retryCount to set - */ - public void setRetryCount(int retryCount) { - this.retryCount = retryCount; - } - - /** - * @return the timeoutSeconds - */ - public long getTimeoutSeconds() { - return timeoutSeconds; - } - - /** - * @param timeoutSeconds the timeoutSeconds to set - */ - public void setTimeoutSeconds(long timeoutSeconds) { - this.timeoutSeconds = timeoutSeconds; - } - - /** - * @return Returns the input keys - */ - public List getInputKeys() { - return inputKeys; - } - - /** - * @param inputKeys Set of keys that the task accepts in the input map - */ - public void setInputKeys(List inputKeys) { - this.inputKeys = inputKeys; - } - - /** - * @return Returns the output keys for the task when executed - */ - public List getOutputKeys() { - return outputKeys; - } - - /** - * @param outputKeys Sets the output keys - */ - public void setOutputKeys(List outputKeys) { - this.outputKeys = outputKeys; - } - - /** - * @return the timeoutPolicy - */ - public TimeoutPolicy getTimeoutPolicy() { - return timeoutPolicy; - } - - /** - * @param timeoutPolicy the timeoutPolicy to set - */ - public void setTimeoutPolicy(TimeoutPolicy timeoutPolicy) { - this.timeoutPolicy = timeoutPolicy; - } - - /** - * @return the retryLogic - */ - public RetryLogic getRetryLogic() { - return retryLogic; - } - - /** - * @param retryLogic the retryLogic to set - */ - public void setRetryLogic(RetryLogic retryLogic) { - this.retryLogic = retryLogic; - } - - /** - * @return the retryDelaySeconds - */ - public int getRetryDelaySeconds() { - return retryDelaySeconds; - } - - /** - * @return the timeout for task to send response. After this timeout, the task will be re-queued - */ - public long getResponseTimeoutSeconds() { - return responseTimeoutSeconds; - } - - /** - * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the - * task will be re-queued - */ - public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - /** - * @param retryDelaySeconds the retryDelaySeconds to set - */ - public void setRetryDelaySeconds(int retryDelaySeconds) { - this.retryDelaySeconds = retryDelaySeconds; - } - - /** - * @return the inputTemplate - */ - public Map getInputTemplate() { - return inputTemplate; - } - - /** - * @return rateLimitPerFrequency The max number of tasks that will be allowed to be executed per - * rateLimitFrequencyInSeconds. - */ - public Integer getRateLimitPerFrequency() { - return rateLimitPerFrequency == null ? 0 : rateLimitPerFrequency; - } - - /** - * @param rateLimitPerFrequency The max number of tasks that will be allowed to be executed per - * rateLimitFrequencyInSeconds. Setting the value to 0 removes the rate limit - */ - public void setRateLimitPerFrequency(Integer rateLimitPerFrequency) { - this.rateLimitPerFrequency = rateLimitPerFrequency; - } - - /** - * @return rateLimitFrequencyInSeconds: The time bucket that is used to rate limit tasks based - * on {@link #getRateLimitPerFrequency()} If null or not set, then defaults to 1 second - */ - public Integer getRateLimitFrequencyInSeconds() { - return rateLimitFrequencyInSeconds == null ? 1 : rateLimitFrequencyInSeconds; - } - - /** - * @param rateLimitFrequencyInSeconds: The time window/bucket for which the rate limit needs to - * be applied. This will only have affect if {@link #getRateLimitPerFrequency()} is greater - * than zero - */ - public void setRateLimitFrequencyInSeconds(Integer rateLimitFrequencyInSeconds) { - this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; - } - - /** - * @param concurrentExecLimit Limit of number of concurrent task that can be IN_PROGRESS at a - * given time. Seting the value to 0 removes the limit. - */ - public void setConcurrentExecLimit(Integer concurrentExecLimit) { - this.concurrentExecLimit = concurrentExecLimit; - } - - /** - * @return Limit of number of concurrent task that can be IN_PROGRESS at a given time - */ - public Integer getConcurrentExecLimit() { - return concurrentExecLimit; - } - - /** - * @return concurrency limit - */ - public int concurrencyLimit() { - return concurrentExecLimit == null ? 0 : concurrentExecLimit; - } - - /** - * @param inputTemplate the inputTemplate to set - */ - public void setInputTemplate(Map inputTemplate) { - this.inputTemplate = inputTemplate; - } - - public String getIsolationGroupId() { - return isolationGroupId; - } - - public void setIsolationGroupId(String isolationGroupId) { - this.isolationGroupId = isolationGroupId; - } - - public String getExecutionNameSpace() { - return executionNameSpace; - } - - public void setExecutionNameSpace(String executionNameSpace) { - this.executionNameSpace = executionNameSpace; - } - - /** - * @return the email of the owner of this task definition - */ - public String getOwnerEmail() { - return ownerEmail; - } - - /** - * @param ownerEmail the owner email to set - */ - public void setOwnerEmail(String ownerEmail) { - this.ownerEmail = ownerEmail; - } - - /** - * @param pollTimeoutSeconds the poll timeout to set - */ - public void setPollTimeoutSeconds(Integer pollTimeoutSeconds) { - this.pollTimeoutSeconds = pollTimeoutSeconds; - } - - /** - * @return the poll timeout of this task definition - */ - public Integer getPollTimeoutSeconds() { - return pollTimeoutSeconds; - } - - /** - * @param backoffScaleFactor the backoff rate to set - */ - public void setBackoffScaleFactor(Integer backoffScaleFactor) { - this.backoffScaleFactor = backoffScaleFactor; - } - - /** - * @return the backoff rate of this task definition - */ - public Integer getBackoffScaleFactor() { - return backoffScaleFactor; - } - - @Override - public String toString() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TaskDef taskDef = (TaskDef) o; - return getRetryCount() == taskDef.getRetryCount() - && getTimeoutSeconds() == taskDef.getTimeoutSeconds() - && getRetryDelaySeconds() == taskDef.getRetryDelaySeconds() - && getBackoffScaleFactor() == taskDef.getBackoffScaleFactor() - && getResponseTimeoutSeconds() == taskDef.getResponseTimeoutSeconds() - && Objects.equals(getName(), taskDef.getName()) - && Objects.equals(getDescription(), taskDef.getDescription()) - && Objects.equals(getInputKeys(), taskDef.getInputKeys()) - && Objects.equals(getOutputKeys(), taskDef.getOutputKeys()) - && getTimeoutPolicy() == taskDef.getTimeoutPolicy() - && getRetryLogic() == taskDef.getRetryLogic() - && Objects.equals(getConcurrentExecLimit(), taskDef.getConcurrentExecLimit()) - && Objects.equals(getRateLimitPerFrequency(), taskDef.getRateLimitPerFrequency()) - && Objects.equals(getInputTemplate(), taskDef.getInputTemplate()) - && Objects.equals(getIsolationGroupId(), taskDef.getIsolationGroupId()) - && Objects.equals(getExecutionNameSpace(), taskDef.getExecutionNameSpace()) - && Objects.equals(getOwnerEmail(), taskDef.getOwnerEmail()); - } - - @Override - public int hashCode() { - - return Objects.hash( - getName(), - getDescription(), - getRetryCount(), - getTimeoutSeconds(), - getInputKeys(), - getOutputKeys(), - getTimeoutPolicy(), - getRetryLogic(), - getRetryDelaySeconds(), - getBackoffScaleFactor(), - getResponseTimeoutSeconds(), - getConcurrentExecLimit(), - getRateLimitPerFrequency(), - getInputTemplate(), - getIsolationGroupId(), - getExecutionNameSpace(), - getOwnerEmail()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java deleted file mode 100644 index 256e1da6f..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.Objects; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -/** Model that represents the task's execution log. */ -@ProtoMessage -public class TaskExecLog { - - @ProtoField(id = 1) - private String log; - - @ProtoField(id = 2) - private String taskId; - - @ProtoField(id = 3) - private long createdTime; - - public TaskExecLog() {} - - public TaskExecLog(String log) { - this.log = log; - this.createdTime = System.currentTimeMillis(); - } - - /** - * @return Task Exec Log - */ - public String getLog() { - return log; - } - - /** - * @param log The Log - */ - public void setLog(String log) { - this.log = log; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - /** - * @return the createdTime - */ - public long getCreatedTime() { - return createdTime; - } - - /** - * @param createdTime the createdTime to set - */ - public void setCreatedTime(long createdTime) { - this.createdTime = createdTime; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TaskExecLog that = (TaskExecLog) o; - return createdTime == that.createdTime - && Objects.equals(log, that.log) - && Objects.equals(taskId, that.taskId); - } - - @Override - public int hashCode() { - return Objects.hash(log, taskId, createdTime); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java deleted file mode 100644 index d1628ea61..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import javax.validation.constraints.NotEmpty; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -import com.google.protobuf.Any; -import io.swagger.v3.oas.annotations.Hidden; - -/** Result of the task execution. */ -@ProtoMessage -public class TaskResult { - - @ProtoEnum - public enum Status { - IN_PROGRESS, - FAILED, - FAILED_WITH_TERMINAL_ERROR, - COMPLETED - } - - @NotEmpty(message = "Workflow Id cannot be null or empty") - @ProtoField(id = 1) - private String workflowInstanceId; - - @NotEmpty(message = "Task ID cannot be null or empty") - @ProtoField(id = 2) - private String taskId; - - @ProtoField(id = 3) - private String reasonForIncompletion; - - @ProtoField(id = 4) - private long callbackAfterSeconds; - - @ProtoField(id = 5) - private String workerId; - - @ProtoField(id = 6) - private Status status; - - @ProtoField(id = 7) - private Map outputData = new HashMap<>(); - - @ProtoField(id = 8) - @Hidden - private Any outputMessage; - - private List logs = new CopyOnWriteArrayList<>(); - - private String externalOutputPayloadStoragePath; - - private String subWorkflowId; - - public TaskResult(Task task) { - this.workflowInstanceId = task.getWorkflowInstanceId(); - this.taskId = task.getTaskId(); - this.reasonForIncompletion = task.getReasonForIncompletion(); - this.callbackAfterSeconds = task.getCallbackAfterSeconds(); - this.workerId = task.getWorkerId(); - this.outputData = task.getOutputData(); - this.externalOutputPayloadStoragePath = task.getExternalOutputPayloadStoragePath(); - this.subWorkflowId = task.getSubWorkflowId(); - switch (task.getStatus()) { - case CANCELED: - case COMPLETED_WITH_ERRORS: - case TIMED_OUT: - case SKIPPED: - this.status = Status.FAILED; - break; - case SCHEDULED: - this.status = Status.IN_PROGRESS; - break; - default: - this.status = Status.valueOf(task.getStatus().name()); - break; - } - } - - public TaskResult() {} - - /** - * @return Workflow instance id for which the task result is produced - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getTaskId() { - return taskId; - } - - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = StringUtils.substring(reasonForIncompletion, 0, 500); - } - - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * When set to non-zero values, the task remains in the queue for the specified seconds before - * sent back to the worker when polled. Useful for the long running task, where the task is - * updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of - * time. (delayed queue implementation) - * - * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue - * before giving it to a polling worker. - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - public String getWorkerId() { - return workerId; - } - - /** - * @param workerId a free form string identifying the worker host. Could be hostname, IP Address - * or any other meaningful identifier that can help identify the host/process which executed - * the task, in case of troubleshooting. - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the status - */ - public Status getStatus() { - return status; - } - - /** - * @param status Status of the task - *

IN_PROGRESS: Use this for long running tasks, indicating the task is still in - * progress and should be checked again at a later time. e.g. the worker checks the status - * of the job in the DB, while the job is being executed by another process. - *

FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED: Terminal statuses for the task. - * Use FAILED_WITH_TERMINAL_ERROR when you do not want the task to be retried. - * @see #setCallbackAfterSeconds(long) - */ - public void setStatus(Status status) { - this.status = status; - } - - public Map getOutputData() { - return outputData; - } - - /** - * @param outputData output data to be set for the task execution result - */ - public void setOutputData(Map outputData) { - this.outputData = outputData; - } - - /** - * Adds output - * - * @param key output field - * @param value value - * @return current instance - */ - public TaskResult addOutputData(String key, Object value) { - this.outputData.put(key, value); - return this; - } - - public Any getOutputMessage() { - return outputMessage; - } - - public void setOutputMessage(Any outputMessage) { - this.outputMessage = outputMessage; - } - - /** - * @return Task execution logs - */ - public List getLogs() { - return logs; - } - - /** - * @param logs Task execution logs - */ - public void setLogs(List logs) { - this.logs = logs; - } - - /** - * @param log Log line to be added - * @return Instance of TaskResult - */ - public TaskResult log(String log) { - this.logs.add(new TaskExecLog(log)); - return this; - } - - /** - * @return the path where the task output is stored in external storage - */ - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - /** - * @param externalOutputPayloadStoragePath path in the external storage where the task output is - * stored - */ - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - public String getSubWorkflowId() { - return subWorkflowId; - } - - public void setSubWorkflowId(String subWorkflowId) { - this.subWorkflowId = subWorkflowId; - } - - @Override - public String toString() { - return "TaskResult{" - + "workflowInstanceId='" - + workflowInstanceId - + '\'' - + ", taskId='" - + taskId - + '\'' - + ", reasonForIncompletion='" - + reasonForIncompletion - + '\'' - + ", callbackAfterSeconds=" - + callbackAfterSeconds - + ", workerId='" - + workerId - + '\'' - + ", status=" - + status - + ", outputData=" - + outputData - + ", outputMessage=" - + outputMessage - + ", logs=" - + logs - + ", externalOutputPayloadStoragePath='" - + externalOutputPayloadStoragePath - + '\'' - + ", subWorkflowId='" - + subWorkflowId - + '\'' - + '}'; - } - - public static TaskResult complete() { - return newTaskResult(Status.COMPLETED); - } - - public static TaskResult failed() { - return newTaskResult(Status.FAILED); - } - - public static TaskResult failed(String failureReason) { - TaskResult result = newTaskResult(Status.FAILED); - result.setReasonForIncompletion(failureReason); - return result; - } - - public static TaskResult inProgress() { - return newTaskResult(Status.IN_PROGRESS); - } - - public static TaskResult newTaskResult(Status status) { - TaskResult result = new TaskResult(); - result.setStatus(status); - return result; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskType.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskType.java deleted file mode 100644 index 58df74569..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskType.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.tasks; - -import java.util.HashSet; -import java.util.Set; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; - -@ProtoEnum -public enum TaskType { - SIMPLE, - DYNAMIC, - FORK_JOIN, - FORK_JOIN_DYNAMIC, - DECISION, - SWITCH, - JOIN, - DO_WHILE, - SUB_WORKFLOW, - START_WORKFLOW, - EVENT, - WAIT, - HUMAN, - USER_DEFINED, - HTTP, - LAMBDA, - INLINE, - EXCLUSIVE_JOIN, - TERMINATE, - KAFKA_PUBLISH, - JSON_JQ_TRANSFORM, - SET_VARIABLE; - - /** - * TaskType constants representing each of the possible enumeration values. Motivation: to not - * have any hardcoded/inline strings used in the code. - */ - public static final String TASK_TYPE_DECISION = "DECISION"; - - public static final String TASK_TYPE_SWITCH = "SWITCH"; - public static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; - public static final String TASK_TYPE_JOIN = "JOIN"; - public static final String TASK_TYPE_DO_WHILE = "DO_WHILE"; - public static final String TASK_TYPE_FORK_JOIN_DYNAMIC = "FORK_JOIN_DYNAMIC"; - public static final String TASK_TYPE_EVENT = "EVENT"; - public static final String TASK_TYPE_WAIT = "WAIT"; - public static final String TASK_TYPE_HUMAN = "HUMAN"; - public static final String TASK_TYPE_SUB_WORKFLOW = "SUB_WORKFLOW"; - public static final String TASK_TYPE_START_WORKFLOW = "START_WORKFLOW"; - public static final String TASK_TYPE_FORK_JOIN = "FORK_JOIN"; - public static final String TASK_TYPE_SIMPLE = "SIMPLE"; - public static final String TASK_TYPE_HTTP = "HTTP"; - public static final String TASK_TYPE_LAMBDA = "LAMBDA"; - public static final String TASK_TYPE_INLINE = "INLINE"; - public static final String TASK_TYPE_EXCLUSIVE_JOIN = "EXCLUSIVE_JOIN"; - public static final String TASK_TYPE_TERMINATE = "TERMINATE"; - public static final String TASK_TYPE_KAFKA_PUBLISH = "KAFKA_PUBLISH"; - public static final String TASK_TYPE_JSON_JQ_TRANSFORM = "JSON_JQ_TRANSFORM"; - public static final String TASK_TYPE_SET_VARIABLE = "SET_VARIABLE"; - public static final String TASK_TYPE_FORK = "FORK"; - - private static final Set BUILT_IN_TASKS = new HashSet<>(); - - static { - BUILT_IN_TASKS.add(TASK_TYPE_DECISION); - BUILT_IN_TASKS.add(TASK_TYPE_SWITCH); - BUILT_IN_TASKS.add(TASK_TYPE_FORK); - BUILT_IN_TASKS.add(TASK_TYPE_JOIN); - BUILT_IN_TASKS.add(TASK_TYPE_EXCLUSIVE_JOIN); - BUILT_IN_TASKS.add(TASK_TYPE_DO_WHILE); - } - - /** - * Converts a task type string to {@link TaskType}. For an unknown string, the value is - * defaulted to {@link TaskType#USER_DEFINED}. - * - *

NOTE: Use {@link Enum#valueOf(Class, String)} if the default of USER_DEFINED is not - * necessary. - * - * @param taskType The task type string. - * @return The {@link TaskType} enum. - */ - public static TaskType of(String taskType) { - try { - return TaskType.valueOf(taskType); - } catch (IllegalArgumentException iae) { - return TaskType.USER_DEFINED; - } - } - - public static boolean isBuiltIn(String taskType) { - return BUILT_IN_TASKS.contains(taskType); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java deleted file mode 100644 index d95354ef5..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.HashMap; -import java.util.Map; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.tasks.TaskType; - -@ProtoMessage -public class DynamicForkJoinTask { - - @ProtoField(id = 1) - private String taskName; - - @ProtoField(id = 2) - private String workflowName; - - @ProtoField(id = 3) - private String referenceName; - - @ProtoField(id = 4) - private Map input = new HashMap<>(); - - @ProtoField(id = 5) - private String type = TaskType.SIMPLE.name(); - - public DynamicForkJoinTask() {} - - public DynamicForkJoinTask( - String taskName, String workflowName, String referenceName, Map input) { - super(); - this.taskName = taskName; - this.workflowName = workflowName; - this.referenceName = referenceName; - this.input = input; - } - - public DynamicForkJoinTask( - String taskName, - String workflowName, - String referenceName, - String type, - Map input) { - super(); - this.taskName = taskName; - this.workflowName = workflowName; - this.referenceName = referenceName; - this.input = input; - this.type = type; - } - - public String getTaskName() { - return taskName; - } - - public void setTaskName(String taskName) { - this.taskName = taskName; - } - - public String getWorkflowName() { - return workflowName; - } - - public void setWorkflowName(String workflowName) { - this.workflowName = workflowName; - } - - public String getReferenceName() { - return referenceName; - } - - public void setReferenceName(String referenceName) { - this.referenceName = referenceName; - } - - public Map getInput() { - return input; - } - - public void setInput(Map input) { - this.input = input; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java deleted file mode 100644 index f11530dc7..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -@ProtoMessage -public class DynamicForkJoinTaskList { - - @ProtoField(id = 1) - private List dynamicTasks = new ArrayList<>(); - - public void add( - String taskName, String workflowName, String referenceName, Map input) { - dynamicTasks.add(new DynamicForkJoinTask(taskName, workflowName, referenceName, input)); - } - - public void add(DynamicForkJoinTask dtask) { - dynamicTasks.add(dtask); - } - - public List getDynamicTasks() { - return dynamicTasks; - } - - public void setDynamicTasks(List dynamicTasks) { - this.dynamicTasks = dynamicTasks; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java deleted file mode 100644 index 67c1b86a7..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.Map; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -@ProtoMessage -public class RerunWorkflowRequest { - - @ProtoField(id = 1) - private String reRunFromWorkflowId; - - @ProtoField(id = 2) - private Map workflowInput; - - @ProtoField(id = 3) - private String reRunFromTaskId; - - @ProtoField(id = 4) - private Map taskInput; - - @ProtoField(id = 5) - private String correlationId; - - public String getReRunFromWorkflowId() { - return reRunFromWorkflowId; - } - - public void setReRunFromWorkflowId(String reRunFromWorkflowId) { - this.reRunFromWorkflowId = reRunFromWorkflowId; - } - - public Map getWorkflowInput() { - return workflowInput; - } - - public void setWorkflowInput(Map workflowInput) { - this.workflowInput = workflowInput; - } - - public String getReRunFromTaskId() { - return reRunFromTaskId; - } - - public void setReRunFromTaskId(String reRunFromTaskId) { - this.reRunFromTaskId = reRunFromTaskId; - } - - public Map getTaskInput() { - return taskInput; - } - - public void setTaskInput(Map taskInput) { - this.taskInput = taskInput; - } - - public String getCorrelationId() { - return correlationId; - } - - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java deleted file mode 100644 index 8540794a6..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.Map; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -import com.google.protobuf.Any; -import io.swagger.v3.oas.annotations.Hidden; - -@ProtoMessage(toProto = false) -public class SkipTaskRequest { - - @ProtoField(id = 1) - private Map taskInput; - - @ProtoField(id = 2) - private Map taskOutput; - - @ProtoField(id = 3) - @Hidden - private Any taskInputMessage; - - @ProtoField(id = 4) - @Hidden - private Any taskOutputMessage; - - public Map getTaskInput() { - return taskInput; - } - - public void setTaskInput(Map taskInput) { - this.taskInput = taskInput; - } - - public Map getTaskOutput() { - return taskOutput; - } - - public void setTaskOutput(Map taskOutput) { - this.taskOutput = taskOutput; - } - - public Any getTaskInputMessage() { - return taskInputMessage; - } - - public void setTaskInputMessage(Any taskInputMessage) { - this.taskInputMessage = taskInputMessage; - } - - public Any getTaskOutputMessage() { - return taskOutputMessage; - } - - public void setTaskOutputMessage(Any taskOutputMessage) { - this.taskOutputMessage = taskOutputMessage; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java deleted file mode 100644 index cc01bca1a..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.HashMap; -import java.util.Map; - -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotNull; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -@ProtoMessage -public class StartWorkflowRequest { - - @ProtoField(id = 1) - @NotNull(message = "Workflow name cannot be null or empty") - private String name; - - @ProtoField(id = 2) - private Integer version; - - @ProtoField(id = 3) - private String correlationId; - - @ProtoField(id = 4) - private Map input = new HashMap<>(); - - @ProtoField(id = 5) - private Map taskToDomain = new HashMap<>(); - - @ProtoField(id = 6) - @Valid - private WorkflowDef workflowDef; - - @ProtoField(id = 7) - private String externalInputPayloadStoragePath; - - @ProtoField(id = 8) - @Min(value = 0, message = "priority: ${validatedValue} should be minimum {value}") - @Max(value = 99, message = "priority: ${validatedValue} should be maximum {value}") - private Integer priority = 0; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public StartWorkflowRequest withName(String name) { - this.name = name; - return this; - } - - public Integer getVersion() { - return version; - } - - public void setVersion(Integer version) { - this.version = version; - } - - public StartWorkflowRequest withVersion(Integer version) { - this.version = version; - return this; - } - - public String getCorrelationId() { - return correlationId; - } - - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - public StartWorkflowRequest withCorrelationId(String correlationId) { - this.correlationId = correlationId; - return this; - } - - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - public StartWorkflowRequest withExternalInputPayloadStoragePath( - String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - return this; - } - - public Integer getPriority() { - return priority; - } - - public void setPriority(Integer priority) { - this.priority = priority; - } - - public StartWorkflowRequest withPriority(Integer priority) { - this.priority = priority; - return this; - } - - public Map getInput() { - return input; - } - - public void setInput(Map input) { - this.input = input; - } - - public StartWorkflowRequest withInput(Map input) { - this.input = input; - return this; - } - - public Map getTaskToDomain() { - return taskToDomain; - } - - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - - public StartWorkflowRequest withTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - return this; - } - - public WorkflowDef getWorkflowDef() { - return workflowDef; - } - - public void setWorkflowDef(WorkflowDef workflowDef) { - this.workflowDef = workflowDef; - } - - public StartWorkflowRequest withWorkflowDef(WorkflowDef workflowDef) { - this.workflowDef = workflowDef; - return this; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java deleted file mode 100644 index 816981b86..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.Map; -import java.util.Objects; - -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; - -import com.fasterxml.jackson.annotation.JsonGetter; -import com.fasterxml.jackson.annotation.JsonSetter; - -@ProtoMessage -public class SubWorkflowParams { - - @ProtoField(id = 1) - @NotNull(message = "SubWorkflowParams name cannot be null") - @NotEmpty(message = "SubWorkflowParams name cannot be empty") - private String name; - - @ProtoField(id = 2) - private Integer version; - - @ProtoField(id = 3) - private Map taskToDomain; - - // workaround as WorkflowDef cannot directly be used due to cyclic dependency issue in protobuf - // imports - @ProtoField(id = 4) - private Object workflowDefinition; - - /** - * @return the name - */ - public String getName() { - if (workflowDefinition != null) { - return getWorkflowDef().getName(); - } else { - return name; - } - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the version - */ - public Integer getVersion() { - if (workflowDefinition != null) { - return getWorkflowDef().getVersion(); - } else { - return version; - } - } - - /** - * @param version the version to set - */ - public void setVersion(Integer version) { - this.version = version; - } - - /** - * @return the taskToDomain - */ - public Map getTaskToDomain() { - return taskToDomain; - } - - /** - * @param taskToDomain the taskToDomain to set - */ - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - - /** - * @return the workflowDefinition as an Object - */ - public Object getWorkflowDefinition() { - return workflowDefinition; - } - - /** - * @return the workflowDefinition as a WorkflowDef - */ - @JsonGetter("workflowDefinition") - public WorkflowDef getWorkflowDef() { - return (WorkflowDef) workflowDefinition; - } - - /** - * @param workflowDef the workflowDefinition to set - */ - public void setWorkflowDefinition(Object workflowDef) { - if (!(workflowDef == null || workflowDef instanceof WorkflowDef)) { - throw new IllegalArgumentException( - "workflowDefinition must be either null or WorkflowDef"); - } - this.workflowDefinition = workflowDef; - } - - /** - * @param workflowDef the workflowDefinition to set - */ - @JsonSetter("workflowDefinition") - public void setWorkflowDef(WorkflowDef workflowDef) { - this.workflowDefinition = workflowDef; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - SubWorkflowParams that = (SubWorkflowParams) o; - return Objects.equals(getName(), that.getName()) - && Objects.equals(getVersion(), that.getVersion()) - && Objects.equals(getTaskToDomain(), that.getTaskToDomain()) - && Objects.equals(getWorkflowDefinition(), that.getWorkflowDefinition()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java deleted file mode 100644 index c91e27692..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import javax.validation.Valid; -import javax.validation.constraints.Email; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.constraints.NoSemiColonConstraint; -import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; -import com.netflix.conductor.common.constraints.TaskReferenceNameUniqueConstraint; -import com.netflix.conductor.common.metadata.Auditable; -import com.netflix.conductor.common.metadata.tasks.TaskType; - -@ProtoMessage -@TaskReferenceNameUniqueConstraint -public class WorkflowDef extends Auditable { - - @ProtoEnum - public enum TimeoutPolicy { - TIME_OUT_WF, - ALERT_ONLY - } - - @NotEmpty(message = "WorkflowDef name cannot be null or empty") - @ProtoField(id = 1) - @NoSemiColonConstraint( - message = "Workflow name cannot contain the following set of characters: ':'") - private String name; - - @ProtoField(id = 2) - private String description; - - @ProtoField(id = 3) - private int version = 1; - - @ProtoField(id = 4) - @NotNull - @NotEmpty(message = "WorkflowTask list cannot be empty") - private List<@Valid WorkflowTask> tasks = new LinkedList<>(); - - @ProtoField(id = 5) - private List inputParameters = new LinkedList<>(); - - @ProtoField(id = 6) - private Map outputParameters = new HashMap<>(); - - @ProtoField(id = 7) - private String failureWorkflow; - - @ProtoField(id = 8) - @Min(value = 2, message = "workflowDef schemaVersion: {value} is only supported") - @Max(value = 2, message = "workflowDef schemaVersion: {value} is only supported") - private int schemaVersion = 2; - - // By default a workflow is restartable - @ProtoField(id = 9) - private boolean restartable = true; - - @ProtoField(id = 10) - private boolean workflowStatusListenerEnabled = false; - - @ProtoField(id = 11) - @OwnerEmailMandatoryConstraint - @Email(message = "ownerEmail should be valid email address") - private String ownerEmail; - - @ProtoField(id = 12) - private TimeoutPolicy timeoutPolicy = TimeoutPolicy.ALERT_ONLY; - - @ProtoField(id = 13) - @NotNull - private long timeoutSeconds; - - @ProtoField(id = 14) - private Map variables = new HashMap<>(); - - @ProtoField(id = 15) - private Map inputTemplate = new HashMap<>(); - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the description - */ - public String getDescription() { - return description; - } - - /** - * @param description the description to set - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * @return the tasks - */ - public List getTasks() { - return tasks; - } - - /** - * @param tasks the tasks to set - */ - public void setTasks(List<@Valid WorkflowTask> tasks) { - this.tasks = tasks; - } - - /** - * @return the inputParameters - */ - public List getInputParameters() { - return inputParameters; - } - - /** - * @param inputParameters the inputParameters to set - */ - public void setInputParameters(List inputParameters) { - this.inputParameters = inputParameters; - } - - /** - * @return the outputParameters - */ - public Map getOutputParameters() { - return outputParameters; - } - - /** - * @param outputParameters the outputParameters to set - */ - public void setOutputParameters(Map outputParameters) { - this.outputParameters = outputParameters; - } - - /** - * @return the version - */ - public int getVersion() { - return version; - } - - /** - * @return the failureWorkflow - */ - public String getFailureWorkflow() { - return failureWorkflow; - } - - /** - * @param failureWorkflow the failureWorkflow to set - */ - public void setFailureWorkflow(String failureWorkflow) { - this.failureWorkflow = failureWorkflow; - } - - /** - * @param version the version to set - */ - public void setVersion(int version) { - this.version = version; - } - - /** - * This method determines if the workflow is restartable or not - * - * @return true: if the workflow is restartable false: if the workflow is non restartable - */ - public boolean isRestartable() { - return restartable; - } - - /** - * This method is called only when the workflow definition is created - * - * @param restartable true: if the workflow is restartable false: if the workflow is non - * restartable - */ - public void setRestartable(boolean restartable) { - this.restartable = restartable; - } - - /** - * @return the schemaVersion - */ - public int getSchemaVersion() { - return schemaVersion; - } - - /** - * @param schemaVersion the schemaVersion to set - */ - public void setSchemaVersion(int schemaVersion) { - this.schemaVersion = schemaVersion; - } - - /** - * @return true is workflow listener will be invoked when workflow gets into a terminal state - */ - public boolean isWorkflowStatusListenerEnabled() { - return workflowStatusListenerEnabled; - } - - /** - * Specify if workflow listener is enabled to invoke a callback for completed or terminated - * workflows - * - * @param workflowStatusListenerEnabled - */ - public void setWorkflowStatusListenerEnabled(boolean workflowStatusListenerEnabled) { - this.workflowStatusListenerEnabled = workflowStatusListenerEnabled; - } - - /** - * @return the email of the owner of this workflow definition - */ - public String getOwnerEmail() { - return ownerEmail; - } - - /** - * @param ownerEmail the owner email to set - */ - public void setOwnerEmail(String ownerEmail) { - this.ownerEmail = ownerEmail; - } - - /** - * @return the timeoutPolicy - */ - public TimeoutPolicy getTimeoutPolicy() { - return timeoutPolicy; - } - - /** - * @param timeoutPolicy the timeoutPolicy to set - */ - public void setTimeoutPolicy(TimeoutPolicy timeoutPolicy) { - this.timeoutPolicy = timeoutPolicy; - } - - /** - * @return the time after which a workflow is deemed to have timed out - */ - public long getTimeoutSeconds() { - return timeoutSeconds; - } - - /** - * @param timeoutSeconds the timeout in seconds to set - */ - public void setTimeoutSeconds(long timeoutSeconds) { - this.timeoutSeconds = timeoutSeconds; - } - - /** - * @return the global workflow variables - */ - public Map getVariables() { - return variables; - } - - /** - * @param variables the set of global workflow variables to set - */ - public void setVariables(Map variables) { - this.variables = variables; - } - - public Map getInputTemplate() { - return inputTemplate; - } - - public void setInputTemplate(Map inputTemplate) { - this.inputTemplate = inputTemplate; - } - - public String key() { - return getKey(name, version); - } - - public static String getKey(String name, int version) { - return name + "." + version; - } - - public boolean containsType(String taskType) { - return collectTasks().stream().anyMatch(t -> t.getType().equals(taskType)); - } - - public WorkflowTask getNextTask(String taskReferenceName) { - WorkflowTask workflowTask = getTaskByRefName(taskReferenceName); - if (workflowTask != null && TaskType.TERMINATE.name().equals(workflowTask.getType())) { - return null; - } - - Iterator iterator = tasks.iterator(); - while (iterator.hasNext()) { - WorkflowTask task = iterator.next(); - if (task.getTaskReferenceName().equals(taskReferenceName)) { - // If taskReferenceName matches, break out - break; - } - WorkflowTask nextTask = task.next(taskReferenceName, null); - if (nextTask != null) { - return nextTask; - } else if (TaskType.DO_WHILE.name().equals(task.getType()) - && !task.getTaskReferenceName().equals(taskReferenceName) - && task.has(taskReferenceName)) { - // If the task is child of Loop Task and at last position, return null. - return null; - } - - if (task.has(taskReferenceName)) { - break; - } - } - if (iterator.hasNext()) { - return iterator.next(); - } - return null; - } - - public WorkflowTask getTaskByRefName(String taskReferenceName) { - return collectTasks().stream() - .filter( - workflowTask -> - workflowTask.getTaskReferenceName().equals(taskReferenceName)) - .findFirst() - .orElse(null); - } - - public List collectTasks() { - List tasks = new LinkedList<>(); - for (WorkflowTask workflowTask : this.tasks) { - tasks.addAll(workflowTask.collectTasks()); - } - return tasks; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - WorkflowDef that = (WorkflowDef) o; - return getVersion() == that.getVersion() - && getSchemaVersion() == that.getSchemaVersion() - && Objects.equals(getName(), that.getName()) - && Objects.equals(getDescription(), that.getDescription()) - && Objects.equals(getTasks(), that.getTasks()) - && Objects.equals(getInputParameters(), that.getInputParameters()) - && Objects.equals(getOutputParameters(), that.getOutputParameters()) - && Objects.equals(getFailureWorkflow(), that.getFailureWorkflow()) - && Objects.equals(getOwnerEmail(), that.getOwnerEmail()) - && Objects.equals(getTimeoutSeconds(), that.getTimeoutSeconds()); - } - - @Override - public int hashCode() { - return Objects.hash( - getName(), - getDescription(), - getVersion(), - getTasks(), - getInputParameters(), - getOutputParameters(), - getFailureWorkflow(), - getSchemaVersion(), - getOwnerEmail(), - getTimeoutSeconds()); - } - - @Override - public String toString() { - return "WorkflowDef{" - + "name='" - + name - + '\'' - + ", description='" - + description - + '\'' - + ", version=" - + version - + ", tasks=" - + tasks - + ", inputParameters=" - + inputParameters - + ", outputParameters=" - + outputParameters - + ", failureWorkflow='" - + failureWorkflow - + '\'' - + ", schemaVersion=" - + schemaVersion - + ", restartable=" - + restartable - + ", workflowStatusListenerEnabled=" - + workflowStatusListenerEnabled - + ", timeoutSeconds=" - + timeoutSeconds - + '}'; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java deleted file mode 100644 index f6cf4fc48..000000000 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ /dev/null @@ -1,762 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.metadata.workflow; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.PositiveOrZero; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; - -/** - * This is the task definition definied as part of the {@link WorkflowDef}. The tasks definied in - * the Workflow definition are saved as part of {@link WorkflowDef#getTasks} - */ -@ProtoMessage -public class WorkflowTask { - - @ProtoField(id = 1) - @NotEmpty(message = "WorkflowTask name cannot be empty or null") - private String name; - - @ProtoField(id = 2) - @NotEmpty(message = "WorkflowTask taskReferenceName name cannot be empty or null") - private String taskReferenceName; - - @ProtoField(id = 3) - private String description; - - @ProtoField(id = 4) - private Map inputParameters = new HashMap<>(); - - @ProtoField(id = 5) - private String type = TaskType.SIMPLE.name(); - - @ProtoField(id = 6) - private String dynamicTaskNameParam; - - @Deprecated - @ProtoField(id = 7) - private String caseValueParam; - - @Deprecated - @ProtoField(id = 8) - private String caseExpression; - - @ProtoField(id = 22) - private String scriptExpression; - - @ProtoMessage(wrapper = true) - public static class WorkflowTaskList { - - public List getTasks() { - return tasks; - } - - public void setTasks(List tasks) { - this.tasks = tasks; - } - - @ProtoField(id = 1) - private List tasks; - } - - // Populates for the tasks of the decision type - @ProtoField(id = 9) - private Map> decisionCases = new LinkedHashMap<>(); - - @Deprecated private String dynamicForkJoinTasksParam; - - @ProtoField(id = 10) - private String dynamicForkTasksParam; - - @ProtoField(id = 11) - private String dynamicForkTasksInputParamName; - - @ProtoField(id = 12) - private List<@Valid WorkflowTask> defaultCase = new LinkedList<>(); - - @ProtoField(id = 13) - private List<@Valid List<@Valid WorkflowTask>> forkTasks = new LinkedList<>(); - - @ProtoField(id = 14) - @PositiveOrZero - private int startDelay; // No. of seconds (at-least) to wait before starting a task. - - @ProtoField(id = 15) - @Valid - private SubWorkflowParams subWorkflowParam; - - @ProtoField(id = 16) - private List joinOn = new LinkedList<>(); - - @ProtoField(id = 17) - private String sink; - - @ProtoField(id = 18) - private boolean optional = false; - - @ProtoField(id = 19) - private TaskDef taskDefinition; - - @ProtoField(id = 20) - private Boolean rateLimited; - - @ProtoField(id = 21) - private List defaultExclusiveJoinTask = new LinkedList<>(); - - @ProtoField(id = 23) - private Boolean asyncComplete = false; - - @ProtoField(id = 24) - private String loopCondition; - - @ProtoField(id = 25) - private List loopOver = new LinkedList<>(); - - @ProtoField(id = 26) - private Integer retryCount; - - @ProtoField(id = 27) - private String evaluatorType; - - @ProtoField(id = 28) - private String expression; - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the taskReferenceName - */ - public String getTaskReferenceName() { - return taskReferenceName; - } - - /** - * @param taskReferenceName the taskReferenceName to set - */ - public void setTaskReferenceName(String taskReferenceName) { - this.taskReferenceName = taskReferenceName; - } - - /** - * @return the description - */ - public String getDescription() { - return description; - } - - /** - * @param description the description to set - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * @return the inputParameters - */ - public Map getInputParameters() { - return inputParameters; - } - - /** - * @param inputParameters the inputParameters to set - */ - public void setInputParameters(Map inputParameters) { - this.inputParameters = inputParameters; - } - - /** - * @return the type - */ - public String getType() { - return type; - } - - public void setWorkflowTaskType(TaskType type) { - this.type = type.name(); - } - - /** - * @param type the type to set - */ - public void setType(@NotEmpty(message = "WorkTask type cannot be null or empty") String type) { - this.type = type; - } - - /** - * @return the decisionCases - */ - public Map> getDecisionCases() { - return decisionCases; - } - - /** - * @param decisionCases the decisionCases to set - */ - public void setDecisionCases(Map> decisionCases) { - this.decisionCases = decisionCases; - } - - /** - * @return the defaultCase - */ - public List getDefaultCase() { - return defaultCase; - } - - /** - * @param defaultCase the defaultCase to set - */ - public void setDefaultCase(List defaultCase) { - this.defaultCase = defaultCase; - } - - /** - * @return the forkTasks - */ - public List> getForkTasks() { - return forkTasks; - } - - /** - * @param forkTasks the forkTasks to set - */ - public void setForkTasks(List> forkTasks) { - this.forkTasks = forkTasks; - } - - /** - * @return the startDelay in seconds - */ - public int getStartDelay() { - return startDelay; - } - - /** - * @param startDelay the startDelay to set - */ - public void setStartDelay(int startDelay) { - this.startDelay = startDelay; - } - - /** - * @return the retryCount - */ - public Integer getRetryCount() { - return retryCount; - } - - /** - * @param retryCount the retryCount to set - */ - public void setRetryCount(final Integer retryCount) { - this.retryCount = retryCount; - } - - /** - * @return the dynamicTaskNameParam - */ - public String getDynamicTaskNameParam() { - return dynamicTaskNameParam; - } - - /** - * @param dynamicTaskNameParam the dynamicTaskNameParam to set to be used by DYNAMIC tasks - */ - public void setDynamicTaskNameParam(String dynamicTaskNameParam) { - this.dynamicTaskNameParam = dynamicTaskNameParam; - } - - /** - * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link - * WorkflowTask#getExpression()} combination. - * @return the caseValueParam - */ - @Deprecated - public String getCaseValueParam() { - return caseValueParam; - } - - @Deprecated - public String getDynamicForkJoinTasksParam() { - return dynamicForkJoinTasksParam; - } - - @Deprecated - public void setDynamicForkJoinTasksParam(String dynamicForkJoinTasksParam) { - this.dynamicForkJoinTasksParam = dynamicForkJoinTasksParam; - } - - public String getDynamicForkTasksParam() { - return dynamicForkTasksParam; - } - - public void setDynamicForkTasksParam(String dynamicForkTasksParam) { - this.dynamicForkTasksParam = dynamicForkTasksParam; - } - - public String getDynamicForkTasksInputParamName() { - return dynamicForkTasksInputParamName; - } - - public void setDynamicForkTasksInputParamName(String dynamicForkTasksInputParamName) { - this.dynamicForkTasksInputParamName = dynamicForkTasksInputParamName; - } - - /** - * @param caseValueParam the caseValueParam to set - * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link - * WorkflowTask#getExpression()} combination. - */ - @Deprecated - public void setCaseValueParam(String caseValueParam) { - this.caseValueParam = caseValueParam; - } - - /** - * @return A javascript expression for decision cases. The result should be a scalar value that - * is used to decide the case branches. - * @see #getDecisionCases() - * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link - * WorkflowTask#getExpression()} combination. - */ - @Deprecated - public String getCaseExpression() { - return caseExpression; - } - - /** - * @param caseExpression A javascript expression for decision cases. The result should be a - * scalar value that is used to decide the case branches. - * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link - * WorkflowTask#getExpression()} combination. - */ - @Deprecated - public void setCaseExpression(String caseExpression) { - this.caseExpression = caseExpression; - } - - public String getScriptExpression() { - return scriptExpression; - } - - public void setScriptExpression(String expression) { - this.scriptExpression = expression; - } - - /** - * @return the subWorkflow - */ - public SubWorkflowParams getSubWorkflowParam() { - return subWorkflowParam; - } - - /** - * @param subWorkflow the subWorkflowParam to set - */ - public void setSubWorkflowParam(SubWorkflowParams subWorkflow) { - this.subWorkflowParam = subWorkflow; - } - - /** - * @return the joinOn - */ - public List getJoinOn() { - return joinOn; - } - - /** - * @param joinOn the joinOn to set - */ - public void setJoinOn(List joinOn) { - this.joinOn = joinOn; - } - - /** - * @return the loopCondition - */ - public String getLoopCondition() { - return loopCondition; - } - - /** - * @param loopCondition the expression to set - */ - public void setLoopCondition(String loopCondition) { - this.loopCondition = loopCondition; - } - - /** - * @return the loopOver - */ - public List getLoopOver() { - return loopOver; - } - - /** - * @param loopOver the loopOver to set - */ - public void setLoopOver(List loopOver) { - this.loopOver = loopOver; - } - - /** - * @return Sink value for the EVENT type of task - */ - public String getSink() { - return sink; - } - - /** - * @param sink Name of the sink - */ - public void setSink(String sink) { - this.sink = sink; - } - - /** - * @return whether wait for an external event to complete the task, for EVENT and HTTP tasks - */ - public Boolean isAsyncComplete() { - return asyncComplete; - } - - public void setAsyncComplete(Boolean asyncComplete) { - this.asyncComplete = asyncComplete; - } - - /** - * @return If the task is optional. When set to true, the workflow execution continues even when - * the task is in failed status. - */ - public boolean isOptional() { - return optional; - } - - /** - * @return Task definition associated to the Workflow Task - */ - public TaskDef getTaskDefinition() { - return taskDefinition; - } - - /** - * @param taskDefinition Task definition - */ - public void setTaskDefinition(TaskDef taskDefinition) { - this.taskDefinition = taskDefinition; - } - - /** - * @param optional when set to true, the task is marked as optional - */ - public void setOptional(boolean optional) { - this.optional = optional; - } - - public Boolean getRateLimited() { - return rateLimited; - } - - public void setRateLimited(Boolean rateLimited) { - this.rateLimited = rateLimited; - } - - public Boolean isRateLimited() { - return rateLimited != null && rateLimited; - } - - public List getDefaultExclusiveJoinTask() { - return defaultExclusiveJoinTask; - } - - public void setDefaultExclusiveJoinTask(List defaultExclusiveJoinTask) { - this.defaultExclusiveJoinTask = defaultExclusiveJoinTask; - } - - /** - * @return the evaluatorType - */ - public String getEvaluatorType() { - return evaluatorType; - } - - /** - * @param evaluatorType the evaluatorType to set - */ - public void setEvaluatorType(String evaluatorType) { - this.evaluatorType = evaluatorType; - } - - /** - * @return An evaluation expression for switch cases evaluated by corresponding evaluator. The - * result should be a scalar value that is used to decide the case branches. - * @see #getDecisionCases() - */ - public String getExpression() { - return expression; - } - - /** - * @param expression the expression to set - */ - public void setExpression(String expression) { - this.expression = expression; - } - - private Collection> children() { - Collection> workflowTaskLists = new LinkedList<>(); - - switch (TaskType.of(type)) { - case DECISION: - case SWITCH: - workflowTaskLists.addAll(decisionCases.values()); - workflowTaskLists.add(defaultCase); - break; - case FORK_JOIN: - workflowTaskLists.addAll(forkTasks); - break; - case DO_WHILE: - workflowTaskLists.add(loopOver); - break; - default: - break; - } - return workflowTaskLists; - } - - public List collectTasks() { - List tasks = new LinkedList<>(); - tasks.add(this); - for (List workflowTaskList : children()) { - for (WorkflowTask workflowTask : workflowTaskList) { - tasks.addAll(workflowTask.collectTasks()); - } - } - return tasks; - } - - public WorkflowTask next(String taskReferenceName, WorkflowTask parent) { - TaskType taskType = TaskType.of(type); - - switch (taskType) { - case DO_WHILE: - case DECISION: - case SWITCH: - for (List workflowTasks : children()) { - Iterator iterator = workflowTasks.iterator(); - while (iterator.hasNext()) { - WorkflowTask task = iterator.next(); - if (task.getTaskReferenceName().equals(taskReferenceName)) { - break; - } - WorkflowTask nextTask = task.next(taskReferenceName, this); - if (nextTask != null) { - return nextTask; - } - if (task.has(taskReferenceName)) { - break; - } - } - if (iterator.hasNext()) { - return iterator.next(); - } - } - if (taskType == TaskType.DO_WHILE && this.has(taskReferenceName)) { - // come here means this is DO_WHILE task and `taskReferenceName` is the last - // task in - // this DO_WHILE task, because DO_WHILE task need to be executed to decide - // whether to - // schedule next iteration, so we just return the DO_WHILE task, and then ignore - // generating this task again in deciderService.getNextTask() - return this; - } - break; - case FORK_JOIN: - boolean found = false; - for (List workflowTasks : children()) { - Iterator iterator = workflowTasks.iterator(); - while (iterator.hasNext()) { - WorkflowTask task = iterator.next(); - if (task.getTaskReferenceName().equals(taskReferenceName)) { - found = true; - break; - } - WorkflowTask nextTask = task.next(taskReferenceName, this); - if (nextTask != null) { - return nextTask; - } - if (task.has(taskReferenceName)) { - break; - } - } - if (iterator.hasNext()) { - return iterator.next(); - } - if (found && parent != null) { - return parent.next( - this.taskReferenceName, - parent); // we need to return join task... -- get my sibling from my - // parent.. - } - } - break; - case DYNAMIC: - case TERMINATE: - case SIMPLE: - return null; - default: - break; - } - return null; - } - - public boolean has(String taskReferenceName) { - if (this.getTaskReferenceName().equals(taskReferenceName)) { - return true; - } - - switch (TaskType.of(type)) { - case DECISION: - case SWITCH: - case DO_WHILE: - case FORK_JOIN: - for (List childx : children()) { - for (WorkflowTask child : childx) { - if (child.has(taskReferenceName)) { - return true; - } - } - } - break; - default: - break; - } - return false; - } - - public WorkflowTask get(String taskReferenceName) { - - if (this.getTaskReferenceName().equals(taskReferenceName)) { - return this; - } - for (List childx : children()) { - for (WorkflowTask child : childx) { - WorkflowTask found = child.get(taskReferenceName); - if (found != null) { - return found; - } - } - } - return null; - } - - @Override - public String toString() { - return name + "/" + taskReferenceName; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - WorkflowTask that = (WorkflowTask) o; - return getStartDelay() == that.getStartDelay() - && isOptional() == that.isOptional() - && Objects.equals(getName(), that.getName()) - && Objects.equals(getTaskReferenceName(), that.getTaskReferenceName()) - && Objects.equals(getDescription(), that.getDescription()) - && Objects.equals(getInputParameters(), that.getInputParameters()) - && Objects.equals(getType(), that.getType()) - && Objects.equals(getDynamicTaskNameParam(), that.getDynamicTaskNameParam()) - && Objects.equals(getCaseValueParam(), that.getCaseValueParam()) - && Objects.equals(getEvaluatorType(), that.getEvaluatorType()) - && Objects.equals(getExpression(), that.getExpression()) - && Objects.equals(getCaseExpression(), that.getCaseExpression()) - && Objects.equals(getDecisionCases(), that.getDecisionCases()) - && Objects.equals( - getDynamicForkJoinTasksParam(), that.getDynamicForkJoinTasksParam()) - && Objects.equals(getDynamicForkTasksParam(), that.getDynamicForkTasksParam()) - && Objects.equals( - getDynamicForkTasksInputParamName(), - that.getDynamicForkTasksInputParamName()) - && Objects.equals(getDefaultCase(), that.getDefaultCase()) - && Objects.equals(getForkTasks(), that.getForkTasks()) - && Objects.equals(getSubWorkflowParam(), that.getSubWorkflowParam()) - && Objects.equals(getJoinOn(), that.getJoinOn()) - && Objects.equals(getSink(), that.getSink()) - && Objects.equals(isAsyncComplete(), that.isAsyncComplete()) - && Objects.equals(getDefaultExclusiveJoinTask(), that.getDefaultExclusiveJoinTask()) - && Objects.equals(getRetryCount(), that.getRetryCount()); - } - - @Override - public int hashCode() { - - return Objects.hash( - getName(), - getTaskReferenceName(), - getDescription(), - getInputParameters(), - getType(), - getDynamicTaskNameParam(), - getCaseValueParam(), - getCaseExpression(), - getEvaluatorType(), - getExpression(), - getDecisionCases(), - getDynamicForkJoinTasksParam(), - getDynamicForkTasksParam(), - getDynamicForkTasksInputParamName(), - getDefaultCase(), - getForkTasks(), - getStartDelay(), - getSubWorkflowParam(), - getJoinOn(), - getSink(), - isAsyncComplete(), - isOptional(), - getDefaultExclusiveJoinTask(), - getRetryCount()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/model/BulkResponse.java b/common/src/main/java/com/netflix/conductor/common/model/BulkResponse.java deleted file mode 100644 index b0f5b38e6..000000000 --- a/common/src/main/java/com/netflix/conductor/common/model/BulkResponse.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.model; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Response object to return a list of succeeded entities and a map of failed ones, including error - * message, for the bulk request. - */ -public class BulkResponse { - - /** Key - entityId Value - error message processing this entity */ - private final Map bulkErrorResults; - - private final List bulkSuccessfulResults; - private final String message = "Bulk Request has been processed."; - - public BulkResponse() { - this.bulkSuccessfulResults = new ArrayList<>(); - this.bulkErrorResults = new HashMap<>(); - } - - public List getBulkSuccessfulResults() { - return bulkSuccessfulResults; - } - - public Map getBulkErrorResults() { - return bulkErrorResults; - } - - public void appendSuccessResponse(String id) { - bulkSuccessfulResults.add(id); - } - - public void appendFailedResponse(String id, String errorMessage) { - bulkErrorResults.put(id, errorMessage); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof BulkResponse)) { - return false; - } - BulkResponse that = (BulkResponse) o; - return Objects.equals(bulkSuccessfulResults, that.bulkSuccessfulResults) - && Objects.equals(bulkErrorResults, that.bulkErrorResults); - } - - @Override - public int hashCode() { - return Objects.hash(bulkSuccessfulResults, bulkErrorResults, message); - } - - @Override - public String toString() { - return "BulkResponse{" - + "bulkSuccessfulResults=" - + bulkSuccessfulResults - + ", bulkErrorResults=" - + bulkErrorResults - + ", message='" - + message - + '\'' - + '}'; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/run/ExternalStorageLocation.java b/common/src/main/java/com/netflix/conductor/common/run/ExternalStorageLocation.java deleted file mode 100644 index 5c3071613..000000000 --- a/common/src/main/java/com/netflix/conductor/common/run/ExternalStorageLocation.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -/** - * Describes the location where the JSON payload is stored in external storage. - * - *

The location is described using the following fields: - * - *

    - *
  • uri: The uri of the json file in external storage. - *
  • path: The relative path of the file in external storage. - *
- */ -public class ExternalStorageLocation { - - private String uri; - private String path; - - public String getUri() { - return uri; - } - - public void setUri(String uri) { - this.uri = uri; - } - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } - - @Override - public String toString() { - return "ExternalStorageLocation{" + "uri='" + uri + '\'' + ", path='" + path + '\'' + '}'; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/run/SearchResult.java b/common/src/main/java/com/netflix/conductor/common/run/SearchResult.java deleted file mode 100644 index 72be415a9..000000000 --- a/common/src/main/java/com/netflix/conductor/common/run/SearchResult.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -import java.util.List; - -public class SearchResult { - - private long totalHits; - - private List results; - - public SearchResult() {} - - public SearchResult(long totalHits, List results) { - super(); - this.totalHits = totalHits; - this.results = results; - } - - /** - * @return the totalHits - */ - public long getTotalHits() { - return totalHits; - } - - /** - * @return the results - */ - public List getResults() { - return results; - } - - /** - * @param totalHits the totalHits to set - */ - public void setTotalHits(long totalHits) { - this.totalHits = totalHits; - } - - /** - * @param results the results to set - */ - public void setResults(List results) { - this.results = results; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java b/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java deleted file mode 100644 index 9823f8b85..000000000 --- a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java +++ /dev/null @@ -1,445 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Objects; -import java.util.TimeZone; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.utils.SummaryUtil; - -@ProtoMessage -public class TaskSummary { - - /** The time should be stored as GMT */ - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - - @ProtoField(id = 1) - private String workflowId; - - @ProtoField(id = 2) - private String workflowType; - - @ProtoField(id = 3) - private String correlationId; - - @ProtoField(id = 4) - private String scheduledTime; - - @ProtoField(id = 5) - private String startTime; - - @ProtoField(id = 6) - private String updateTime; - - @ProtoField(id = 7) - private String endTime; - - @ProtoField(id = 8) - private Task.Status status; - - @ProtoField(id = 9) - private String reasonForIncompletion; - - @ProtoField(id = 10) - private long executionTime; - - @ProtoField(id = 11) - private long queueWaitTime; - - @ProtoField(id = 12) - private String taskDefName; - - @ProtoField(id = 13) - private String taskType; - - @ProtoField(id = 14) - private String input; - - @ProtoField(id = 15) - private String output; - - @ProtoField(id = 16) - private String taskId; - - @ProtoField(id = 17) - private String externalInputPayloadStoragePath; - - @ProtoField(id = 18) - private String externalOutputPayloadStoragePath; - - @ProtoField(id = 19) - private int workflowPriority; - - public TaskSummary() {} - - public TaskSummary(Task task) { - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - sdf.setTimeZone(GMT); - - this.taskId = task.getTaskId(); - this.taskDefName = task.getTaskDefName(); - this.taskType = task.getTaskType(); - this.workflowId = task.getWorkflowInstanceId(); - this.workflowType = task.getWorkflowType(); - this.workflowPriority = task.getWorkflowPriority(); - this.correlationId = task.getCorrelationId(); - this.scheduledTime = sdf.format(new Date(task.getScheduledTime())); - this.startTime = sdf.format(new Date(task.getStartTime())); - this.updateTime = sdf.format(new Date(task.getUpdateTime())); - this.endTime = sdf.format(new Date(task.getEndTime())); - this.status = task.getStatus(); - this.reasonForIncompletion = task.getReasonForIncompletion(); - this.queueWaitTime = task.getQueueWaitTime(); - if (task.getInputData() != null) { - this.input = SummaryUtil.serializeInputOutput(task.getInputData()); - } - - if (task.getOutputData() != null) { - this.output = SummaryUtil.serializeInputOutput(task.getOutputData()); - } - - if (task.getEndTime() > 0) { - this.executionTime = task.getEndTime() - task.getStartTime(); - } - - if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) { - this.externalInputPayloadStoragePath = task.getExternalInputPayloadStoragePath(); - } - if (StringUtils.isNotBlank(task.getExternalOutputPayloadStoragePath())) { - this.externalOutputPayloadStoragePath = task.getExternalOutputPayloadStoragePath(); - } - } - - /** - * @return the workflowId - */ - public String getWorkflowId() { - return workflowId; - } - - /** - * @param workflowId the workflowId to set - */ - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - - /** - * @return the workflowType - */ - public String getWorkflowType() { - return workflowType; - } - - /** - * @param workflowType the workflowType to set - */ - public void setWorkflowType(String workflowType) { - this.workflowType = workflowType; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlationId to set - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * @return the scheduledTime - */ - public String getScheduledTime() { - return scheduledTime; - } - - /** - * @param scheduledTime the scheduledTime to set - */ - public void setScheduledTime(String scheduledTime) { - this.scheduledTime = scheduledTime; - } - - /** - * @return the startTime - */ - public String getStartTime() { - return startTime; - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(String startTime) { - this.startTime = startTime; - } - - /** - * @return the updateTime - */ - public String getUpdateTime() { - return updateTime; - } - - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(String updateTime) { - this.updateTime = updateTime; - } - - /** - * @return the endTime - */ - public String getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(String endTime) { - this.endTime = endTime; - } - - /** - * @return the status - */ - public Status getStatus() { - return status; - } - - /** - * @param status the status to set - */ - public void setStatus(Status status) { - this.status = status; - } - - /** - * @return the reasonForIncompletion - */ - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - /** - * @param reasonForIncompletion the reasonForIncompletion to set - */ - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - /** - * @return the executionTime - */ - public long getExecutionTime() { - return executionTime; - } - - /** - * @param executionTime the executionTime to set - */ - public void setExecutionTime(long executionTime) { - this.executionTime = executionTime; - } - - /** - * @return the queueWaitTime - */ - public long getQueueWaitTime() { - return queueWaitTime; - } - - /** - * @param queueWaitTime the queueWaitTime to set - */ - public void setQueueWaitTime(long queueWaitTime) { - this.queueWaitTime = queueWaitTime; - } - - /** - * @return the taskDefName - */ - public String getTaskDefName() { - return taskDefName; - } - - /** - * @param taskDefName the taskDefName to set - */ - public void setTaskDefName(String taskDefName) { - this.taskDefName = taskDefName; - } - - /** - * @return the taskType - */ - public String getTaskType() { - return taskType; - } - - /** - * @param taskType the taskType to set - */ - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - /** - * @return input to the task - */ - public String getInput() { - return input; - } - - /** - * @param input input to the task - */ - public void setInput(String input) { - this.input = input; - } - - /** - * @return output of the task - */ - public String getOutput() { - return output; - } - - /** - * @param output Task output - */ - public void setOutput(String output) { - this.output = output; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - /** - * @return the external storage path for the task input payload - */ - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - /** - * @param externalInputPayloadStoragePath the external storage path where the task input payload - * is stored - */ - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - /** - * @return the external storage path for the task output payload - */ - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - /** - * @param externalOutputPayloadStoragePath the external storage path where the task output - * payload is stored - */ - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - /** - * @return the priority defined on workflow - */ - public int getWorkflowPriority() { - return workflowPriority; - } - - /** - * @param workflowPriority Priority defined for workflow - */ - public void setWorkflowPriority(int workflowPriority) { - this.workflowPriority = workflowPriority; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TaskSummary that = (TaskSummary) o; - return getExecutionTime() == that.getExecutionTime() - && getQueueWaitTime() == that.getQueueWaitTime() - && getWorkflowPriority() == that.getWorkflowPriority() - && getWorkflowId().equals(that.getWorkflowId()) - && getWorkflowType().equals(that.getWorkflowType()) - && Objects.equals(getCorrelationId(), that.getCorrelationId()) - && getScheduledTime().equals(that.getScheduledTime()) - && Objects.equals(getStartTime(), that.getStartTime()) - && Objects.equals(getUpdateTime(), that.getUpdateTime()) - && Objects.equals(getEndTime(), that.getEndTime()) - && getStatus() == that.getStatus() - && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) - && Objects.equals(getTaskDefName(), that.getTaskDefName()) - && getTaskType().equals(that.getTaskType()) - && getTaskId().equals(that.getTaskId()); - } - - @Override - public int hashCode() { - return Objects.hash( - getWorkflowId(), - getWorkflowType(), - getCorrelationId(), - getScheduledTime(), - getStartTime(), - getUpdateTime(), - getEndTime(), - getStatus(), - getReasonForIncompletion(), - getExecutionTime(), - getQueueWaitTime(), - getTaskDefName(), - getTaskType(), - getTaskId(), - getWorkflowPriority()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java deleted file mode 100644 index 0afc4d947..000000000 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ /dev/null @@ -1,573 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -import java.util.*; -import java.util.stream.Collectors; - -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.annotations.protogen.ProtoEnum; -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.metadata.Auditable; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; - -@ProtoMessage -public class Workflow extends Auditable { - - @ProtoEnum - public enum WorkflowStatus { - RUNNING(false, false), - COMPLETED(true, true), - FAILED(true, false), - TIMED_OUT(true, false), - TERMINATED(true, false), - PAUSED(false, true); - - private final boolean terminal; - - private final boolean successful; - - WorkflowStatus(boolean terminal, boolean successful) { - this.terminal = terminal; - this.successful = successful; - } - - public boolean isTerminal() { - return terminal; - } - - public boolean isSuccessful() { - return successful; - } - } - - @ProtoField(id = 1) - private WorkflowStatus status = WorkflowStatus.RUNNING; - - @ProtoField(id = 2) - private long endTime; - - @ProtoField(id = 3) - private String workflowId; - - @ProtoField(id = 4) - private String parentWorkflowId; - - @ProtoField(id = 5) - private String parentWorkflowTaskId; - - @ProtoField(id = 6) - private List tasks = new LinkedList<>(); - - @ProtoField(id = 8) - private Map input = new HashMap<>(); - - @ProtoField(id = 9) - private Map output = new HashMap<>(); - - // ids 10,11 are reserved - - @ProtoField(id = 12) - private String correlationId; - - @ProtoField(id = 13) - private String reRunFromWorkflowId; - - @ProtoField(id = 14) - private String reasonForIncompletion; - - // id 15 is reserved - - @ProtoField(id = 16) - private String event; - - @ProtoField(id = 17) - private Map taskToDomain = new HashMap<>(); - - @ProtoField(id = 18) - private Set failedReferenceTaskNames = new HashSet<>(); - - @ProtoField(id = 19) - private WorkflowDef workflowDefinition; - - @ProtoField(id = 20) - private String externalInputPayloadStoragePath; - - @ProtoField(id = 21) - private String externalOutputPayloadStoragePath; - - @ProtoField(id = 22) - @Min(value = 0, message = "workflow priority: ${validatedValue} should be minimum {value}") - @Max(value = 99, message = "workflow priority: ${validatedValue} should be maximum {value}") - private int priority; - - @ProtoField(id = 23) - private Map variables = new HashMap<>(); - - @ProtoField(id = 24) - private long lastRetriedTime; - - public Workflow() {} - - /** - * @return the status - */ - public WorkflowStatus getStatus() { - return status; - } - - /** - * @param status the status to set - */ - public void setStatus(WorkflowStatus status) { - this.status = status; - } - - /** - * @return the startTime - */ - public long getStartTime() { - return getCreateTime(); - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(long startTime) { - this.setCreateTime(startTime); - } - - /** - * @return the endTime - */ - public long getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - /** - * @return the workflowId - */ - public String getWorkflowId() { - return workflowId; - } - - /** - * @param workflowId the workflowId to set - */ - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - - /** - * @return the tasks which are scheduled, in progress or completed. - */ - public List getTasks() { - return tasks; - } - - /** - * @param tasks the tasks to set - */ - public void setTasks(List tasks) { - this.tasks = tasks; - } - - /** - * @return the input - */ - public Map getInput() { - return input; - } - - /** - * @param input the input to set - */ - public void setInput(Map input) { - if (input == null) { - input = new HashMap<>(); - } - this.input = input; - } - - /** - * @return the task to domain map - */ - public Map getTaskToDomain() { - return taskToDomain; - } - - /** - * @param taskToDomain the task to domain map - */ - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - - /** - * @return the output - */ - public Map getOutput() { - return output; - } - - /** - * @param output the output to set - */ - public void setOutput(Map output) { - if (output == null) { - output = new HashMap<>(); - } - this.output = output; - } - - /** - * @return The correlation id used when starting the workflow - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlation id - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - public String getReRunFromWorkflowId() { - return reRunFromWorkflowId; - } - - public void setReRunFromWorkflowId(String reRunFromWorkflowId) { - this.reRunFromWorkflowId = reRunFromWorkflowId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - /** - * @return the parentWorkflowId - */ - public String getParentWorkflowId() { - return parentWorkflowId; - } - - /** - * @param parentWorkflowId the parentWorkflowId to set - */ - public void setParentWorkflowId(String parentWorkflowId) { - this.parentWorkflowId = parentWorkflowId; - } - - /** - * @return the parentWorkflowTaskId - */ - public String getParentWorkflowTaskId() { - return parentWorkflowTaskId; - } - - /** - * @param parentWorkflowTaskId the parentWorkflowTaskId to set - */ - public void setParentWorkflowTaskId(String parentWorkflowTaskId) { - this.parentWorkflowTaskId = parentWorkflowTaskId; - } - - /** - * @return Name of the event that started the workflow - */ - public String getEvent() { - return event; - } - - /** - * @param event Name of the event that started the workflow - */ - public void setEvent(String event) { - this.event = event; - } - - public Set getFailedReferenceTaskNames() { - return failedReferenceTaskNames; - } - - public void setFailedReferenceTaskNames(Set failedReferenceTaskNames) { - this.failedReferenceTaskNames = failedReferenceTaskNames; - } - - public WorkflowDef getWorkflowDefinition() { - return workflowDefinition; - } - - public void setWorkflowDefinition(WorkflowDef workflowDefinition) { - this.workflowDefinition = workflowDefinition; - } - - /** - * @return the external storage path of the workflow input payload - */ - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - /** - * @param externalInputPayloadStoragePath the external storage path where the workflow input - * payload is stored - */ - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - /** - * @return the external storage path of the workflow output payload - */ - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - /** - * @return the priority to define on tasks - */ - public int getPriority() { - return priority; - } - - /** - * @param priority priority of tasks (between 0 and 99) - */ - public void setPriority(int priority) { - if (priority < 0 || priority > 99) { - throw new IllegalArgumentException("priority MUST be between 0 and 99 (inclusive)"); - } - this.priority = priority; - } - - /** - * Convenience method for accessing the workflow definition name. - * - * @return the workflow definition name. - */ - public String getWorkflowName() { - if (workflowDefinition == null) { - throw new NullPointerException("Workflow definition is null"); - } - return workflowDefinition.getName(); - } - - /** - * Convenience method for accessing the workflow definition version. - * - * @return the workflow definition version. - */ - public int getWorkflowVersion() { - if (workflowDefinition == null) { - throw new NullPointerException("Workflow definition is null"); - } - return workflowDefinition.getVersion(); - } - - /** - * @param externalOutputPayloadStoragePath the external storage path where the workflow output - * payload is stored - */ - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - /** - * @return the global workflow variables - */ - public Map getVariables() { - return variables; - } - - /** - * @param variables the set of global workflow variables to set - */ - public void setVariables(Map variables) { - this.variables = variables; - } - - /** - * Captures the last time the workflow was retried - * - * @return the last retried time of the workflow - */ - public long getLastRetriedTime() { - return lastRetriedTime; - } - - /** - * @param lastRetriedTime time in milliseconds when the workflow is retried - */ - public void setLastRetriedTime(long lastRetriedTime) { - this.lastRetriedTime = lastRetriedTime; - } - - public boolean hasParent() { - return StringUtils.isNotEmpty(parentWorkflowId); - } - - public Task getTaskByRefName(String refName) { - if (refName == null) { - throw new RuntimeException( - "refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); - } - LinkedList found = new LinkedList<>(); - for (Task t : tasks) { - if (t.getReferenceTaskName() == null) { - throw new RuntimeException( - "Task " - + t.getTaskDefName() - + ", seq=" - + t.getSeq() - + " does not have reference name specified."); - } - if (t.getReferenceTaskName().equals(refName)) { - found.add(t); - } - } - if (found.isEmpty()) { - return null; - } - return found.getLast(); - } - - /** - * @return a deep copy of the workflow instance - */ - public Workflow copy() { - Workflow copy = new Workflow(); - copy.setInput(input); - copy.setOutput(output); - copy.setStatus(status); - copy.setWorkflowId(workflowId); - copy.setParentWorkflowId(parentWorkflowId); - copy.setParentWorkflowTaskId(parentWorkflowTaskId); - copy.setReRunFromWorkflowId(reRunFromWorkflowId); - copy.setCorrelationId(correlationId); - copy.setEvent(event); - copy.setReasonForIncompletion(reasonForIncompletion); - copy.setWorkflowDefinition(workflowDefinition); - copy.setPriority(priority); - copy.setTasks(tasks.stream().map(Task::deepCopy).collect(Collectors.toList())); - copy.setVariables(variables); - copy.setEndTime(endTime); - copy.setLastRetriedTime(lastRetriedTime); - copy.setTaskToDomain(taskToDomain); - copy.setFailedReferenceTaskNames(failedReferenceTaskNames); - copy.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); - copy.setExternalOutputPayloadStoragePath(externalOutputPayloadStoragePath); - return copy; - } - - @Override - public String toString() { - String name = workflowDefinition != null ? workflowDefinition.getName() : null; - Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; - return String.format("%s.%s/%s.%s", name, version, workflowId, status); - } - - /** - * A string representation of all relevant fields that identify this workflow. Intended for use - * in log and other system generated messages. - */ - public String toShortString() { - String name = workflowDefinition != null ? workflowDefinition.getName() : null; - Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; - return String.format("%s.%s/%s", name, version, workflowId); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Workflow workflow = (Workflow) o; - return getEndTime() == workflow.getEndTime() - && getWorkflowVersion() == workflow.getWorkflowVersion() - && getStatus() == workflow.getStatus() - && Objects.equals(getWorkflowId(), workflow.getWorkflowId()) - && Objects.equals(getParentWorkflowId(), workflow.getParentWorkflowId()) - && Objects.equals(getParentWorkflowTaskId(), workflow.getParentWorkflowTaskId()) - && Objects.equals(getTasks(), workflow.getTasks()) - && Objects.equals(getInput(), workflow.getInput()) - && Objects.equals(getOutput(), workflow.getOutput()) - && Objects.equals(getWorkflowName(), workflow.getWorkflowName()) - && Objects.equals(getCorrelationId(), workflow.getCorrelationId()) - && Objects.equals(getReRunFromWorkflowId(), workflow.getReRunFromWorkflowId()) - && Objects.equals(getReasonForIncompletion(), workflow.getReasonForIncompletion()) - && Objects.equals(getEvent(), workflow.getEvent()) - && Objects.equals(getTaskToDomain(), workflow.getTaskToDomain()) - && Objects.equals( - getFailedReferenceTaskNames(), workflow.getFailedReferenceTaskNames()) - && Objects.equals( - getExternalInputPayloadStoragePath(), - workflow.getExternalInputPayloadStoragePath()) - && Objects.equals( - getExternalOutputPayloadStoragePath(), - workflow.getExternalOutputPayloadStoragePath()) - && Objects.equals(getPriority(), workflow.getPriority()) - && Objects.equals(getWorkflowDefinition(), workflow.getWorkflowDefinition()) - && Objects.equals(getVariables(), workflow.getVariables()) - && Objects.equals(getLastRetriedTime(), workflow.getLastRetriedTime()); - } - - @Override - public int hashCode() { - return Objects.hash( - getStatus(), - getEndTime(), - getWorkflowId(), - getParentWorkflowId(), - getParentWorkflowTaskId(), - getTasks(), - getInput(), - getOutput(), - getWorkflowName(), - getWorkflowVersion(), - getCorrelationId(), - getReRunFromWorkflowId(), - getReasonForIncompletion(), - getEvent(), - getTaskToDomain(), - getFailedReferenceTaskNames(), - getWorkflowDefinition(), - getExternalInputPayloadStoragePath(), - getExternalOutputPayloadStoragePath(), - getPriority(), - getVariables(), - getLastRetriedTime()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java deleted file mode 100644 index 3c6536bae..000000000 --- a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Objects; -import java.util.TimeZone; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.annotations.protogen.ProtoField; -import com.netflix.conductor.annotations.protogen.ProtoMessage; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.utils.SummaryUtil; - -/** Captures workflow summary info to be indexed in Elastic Search. */ -@ProtoMessage -public class WorkflowSummary { - - /** The time should be stored as GMT */ - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - - @ProtoField(id = 1) - private String workflowType; - - @ProtoField(id = 2) - private int version; - - @ProtoField(id = 3) - private String workflowId; - - @ProtoField(id = 4) - private String correlationId; - - @ProtoField(id = 5) - private String startTime; - - @ProtoField(id = 6) - private String updateTime; - - @ProtoField(id = 7) - private String endTime; - - @ProtoField(id = 8) - private Workflow.WorkflowStatus status; - - @ProtoField(id = 9) - private String input; - - @ProtoField(id = 10) - private String output; - - @ProtoField(id = 11) - private String reasonForIncompletion; - - @ProtoField(id = 12) - private long executionTime; - - @ProtoField(id = 13) - private String event; - - @ProtoField(id = 14) - private String failedReferenceTaskNames = ""; - - @ProtoField(id = 15) - private String externalInputPayloadStoragePath; - - @ProtoField(id = 16) - private String externalOutputPayloadStoragePath; - - @ProtoField(id = 17) - private int priority; - - public WorkflowSummary() {} - - public WorkflowSummary(Workflow workflow) { - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - sdf.setTimeZone(GMT); - - this.workflowType = workflow.getWorkflowName(); - this.version = workflow.getWorkflowVersion(); - this.workflowId = workflow.getWorkflowId(); - this.priority = workflow.getPriority(); - this.correlationId = workflow.getCorrelationId(); - if (workflow.getCreateTime() != null) { - this.startTime = sdf.format(new Date(workflow.getCreateTime())); - } - if (workflow.getEndTime() > 0) { - this.endTime = sdf.format(new Date(workflow.getEndTime())); - } - if (workflow.getUpdateTime() != null) { - this.updateTime = sdf.format(new Date(workflow.getUpdateTime())); - } - this.status = workflow.getStatus(); - if (workflow.getInput() != null) { - this.input = SummaryUtil.serializeInputOutput(workflow.getInput()); - } - if (workflow.getOutput() != null) { - this.output = SummaryUtil.serializeInputOutput(workflow.getOutput()); - } - this.reasonForIncompletion = workflow.getReasonForIncompletion(); - if (workflow.getEndTime() > 0) { - this.executionTime = workflow.getEndTime() - workflow.getStartTime(); - } - this.event = workflow.getEvent(); - this.failedReferenceTaskNames = - workflow.getFailedReferenceTaskNames().stream().collect(Collectors.joining(",")); - if (StringUtils.isNotBlank(workflow.getExternalInputPayloadStoragePath())) { - this.externalInputPayloadStoragePath = workflow.getExternalInputPayloadStoragePath(); - } - if (StringUtils.isNotBlank(workflow.getExternalOutputPayloadStoragePath())) { - this.externalOutputPayloadStoragePath = workflow.getExternalOutputPayloadStoragePath(); - } - } - - /** - * @return the workflowType - */ - public String getWorkflowType() { - return workflowType; - } - - /** - * @return the version - */ - public int getVersion() { - return version; - } - - /** - * @return the workflowId - */ - public String getWorkflowId() { - return workflowId; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @return the startTime - */ - public String getStartTime() { - return startTime; - } - - /** - * @return the endTime - */ - public String getEndTime() { - return endTime; - } - - /** - * @return the status - */ - public WorkflowStatus getStatus() { - return status; - } - - /** - * @return the input - */ - public String getInput() { - return input; - } - - public long getInputSize() { - return input != null ? input.length() : 0; - } - - /** - * @return the output - */ - public String getOutput() { - return output; - } - - public long getOutputSize() { - return output != null ? output.length() : 0; - } - - /** - * @return the reasonForIncompletion - */ - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - /** - * @return the executionTime - */ - public long getExecutionTime() { - return executionTime; - } - - /** - * @return the updateTime - */ - public String getUpdateTime() { - return updateTime; - } - - /** - * @return The event - */ - public String getEvent() { - return event; - } - - /** - * @param event The event - */ - public void setEvent(String event) { - this.event = event; - } - - public String getFailedReferenceTaskNames() { - return failedReferenceTaskNames; - } - - public void setFailedReferenceTaskNames(String failedReferenceTaskNames) { - this.failedReferenceTaskNames = failedReferenceTaskNames; - } - - public void setWorkflowType(String workflowType) { - this.workflowType = workflowType; - } - - public void setVersion(int version) { - this.version = version; - } - - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - public void setStartTime(String startTime) { - this.startTime = startTime; - } - - public void setUpdateTime(String updateTime) { - this.updateTime = updateTime; - } - - public void setEndTime(String endTime) { - this.endTime = endTime; - } - - public void setStatus(WorkflowStatus status) { - this.status = status; - } - - public void setInput(String input) { - this.input = input; - } - - public void setOutput(String output) { - this.output = output; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - public void setExecutionTime(long executionTime) { - this.executionTime = executionTime; - } - - /** - * @return the external storage path of the workflow input payload - */ - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - /** - * @param externalInputPayloadStoragePath the external storage path where the workflow input - * payload is stored - */ - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - /** - * @return the external storage path of the workflow output payload - */ - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - /** - * @param externalOutputPayloadStoragePath the external storage path where the workflow output - * payload is stored - */ - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - /** - * @return the priority to define on tasks - */ - public int getPriority() { - return priority; - } - - /** - * @param priority priority of tasks (between 0 and 99) - */ - public void setPriority(int priority) { - this.priority = priority; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - WorkflowSummary that = (WorkflowSummary) o; - return getVersion() == that.getVersion() - && getExecutionTime() == that.getExecutionTime() - && getPriority() == that.getPriority() - && getWorkflowType().equals(that.getWorkflowType()) - && getWorkflowId().equals(that.getWorkflowId()) - && Objects.equals(getCorrelationId(), that.getCorrelationId()) - && StringUtils.equals(getStartTime(), that.getStartTime()) - && StringUtils.equals(getUpdateTime(), that.getUpdateTime()) - && StringUtils.equals(getEndTime(), that.getEndTime()) - && getStatus() == that.getStatus() - && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) - && Objects.equals(getEvent(), that.getEvent()); - } - - @Override - public int hashCode() { - return Objects.hash( - getWorkflowType(), - getVersion(), - getWorkflowId(), - getCorrelationId(), - getStartTime(), - getUpdateTime(), - getEndTime(), - getStatus(), - getReasonForIncompletion(), - getExecutionTime(), - getEvent(), - getPriority()); - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/utils/ConstraintParamUtil.java b/common/src/main/java/com/netflix/conductor/common/utils/ConstraintParamUtil.java deleted file mode 100644 index 3f2eea503..000000000 --- a/common/src/main/java/com/netflix/conductor/common/utils/ConstraintParamUtil.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.EnvUtils.SystemParameters; - -@SuppressWarnings("unchecked") -public class ConstraintParamUtil { - - /** - * Validates inputParam and returns a list of errors if input is not valid. - * - * @param input {@link Map} of inputParameters - * @param taskName TaskName of inputParameters - * @param workflow WorkflowDef - * @return {@link List} of error strings. - */ - public static List validateInputParam( - Map input, String taskName, WorkflowDef workflow) { - ArrayList errorList = new ArrayList<>(); - - for (Entry e : input.entrySet()) { - Object value = e.getValue(); - if (value instanceof String) { - errorList.addAll( - extractParamPathComponentsFromString( - e.getKey(), value.toString(), taskName, workflow)); - } else if (value instanceof Map) { - // recursive call - errorList.addAll( - validateInputParam((Map) value, taskName, workflow)); - } else if (value instanceof List) { - errorList.addAll( - extractListInputParam(e.getKey(), (List) value, taskName, workflow)); - } else { - e.setValue(value); - } - } - return errorList; - } - - private static List extractListInputParam( - String key, List values, String taskName, WorkflowDef workflow) { - ArrayList errorList = new ArrayList<>(); - for (Object listVal : values) { - if (listVal instanceof String) { - errorList.addAll( - extractParamPathComponentsFromString( - key, listVal.toString(), taskName, workflow)); - } else if (listVal instanceof Map) { - errorList.addAll( - validateInputParam((Map) listVal, taskName, workflow)); - } else if (listVal instanceof List) { - errorList.addAll(extractListInputParam(key, (List) listVal, taskName, workflow)); - } - } - return errorList; - } - - private static List extractParamPathComponentsFromString( - String key, String value, String taskName, WorkflowDef workflow) { - ArrayList errorList = new ArrayList<>(); - - if (value == null) { - String message = String.format("key: %s input parameter value: is null", key); - errorList.add(message); - return errorList; - } - - String[] values = value.split("(?=(? - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.util.Optional; - -public class EnvUtils { - - public enum SystemParameters { - CPEWF_TASK_ID, - NETFLIX_ENV, - NETFLIX_STACK - } - - public static boolean isEnvironmentVariable(String test) { - for (SystemParameters c : SystemParameters.values()) { - if (c.name().equals(test)) { - return true; - } - } - String value = - Optional.ofNullable(System.getProperty(test)).orElseGet(() -> System.getenv(test)); - return value != null; - } - - public static String getSystemParametersValue(String sysParam, String taskId) { - if ("CPEWF_TASK_ID".equals(sysParam)) { - return taskId; - } - - String value = System.getenv(sysParam); - if (value == null) { - value = System.getProperty(sysParam); - } - return value; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/utils/ExternalPayloadStorage.java b/common/src/main/java/com/netflix/conductor/common/utils/ExternalPayloadStorage.java deleted file mode 100644 index 6ca7ccbc0..000000000 --- a/common/src/main/java/com/netflix/conductor/common/utils/ExternalPayloadStorage.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.io.InputStream; - -import com.netflix.conductor.common.run.ExternalStorageLocation; - -/** - * Interface used to externalize the storage of large JSON payloads in workflow and task - * input/output - */ -public interface ExternalPayloadStorage { - - enum Operation { - READ, - WRITE - } - - enum PayloadType { - WORKFLOW_INPUT, - WORKFLOW_OUTPUT, - TASK_INPUT, - TASK_OUTPUT - } - - /** - * Obtain a uri used to store/access a json payload in external storage. - * - * @param operation the type of {@link Operation} to be performed with the uri - * @param payloadType the {@link PayloadType} that is being accessed at the uri - * @param path (optional) the relative path for which the external storage location object is to - * be populated. If path is not specified, it will be computed and populated. - * @return a {@link ExternalStorageLocation} object which contains the uri and the path for the - * json payload - */ - ExternalStorageLocation getLocation(Operation operation, PayloadType payloadType, String path); - - /** - * Upload a json payload to the specified external storage location. - * - * @param path the location to which the object is to be uploaded - * @param payload an {@link InputStream} containing the json payload which is to be uploaded - * @param payloadSize the size of the json payload in bytes - */ - void upload(String path, InputStream payload, long payloadSize); - - /** - * Download the json payload from the specified external storage location. - * - * @param path the location from where the object is to be downloaded - * @return an {@link InputStream} of the json payload at the specified location - */ - InputStream download(String path); -} diff --git a/common/src/main/java/com/netflix/conductor/common/utils/SummaryUtil.java b/common/src/main/java/com/netflix/conductor/common/utils/SummaryUtil.java deleted file mode 100644 index 76127124e..000000000 --- a/common/src/main/java/com/netflix/conductor/common/utils/SummaryUtil.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.util.Map; - -import javax.annotation.PostConstruct; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.config.ObjectMapperProvider; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -@Component -public class SummaryUtil { - - private static final Logger logger = LoggerFactory.getLogger(SummaryUtil.class); - private static final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private static boolean isSummaryInputOutputJsonSerializationEnabled; - - @Value("${conductor.app.summary-input-output-json-serialization.enabled:false}") - private boolean isJsonSerializationEnabled; - - @PostConstruct - public void init() { - isSummaryInputOutputJsonSerializationEnabled = isJsonSerializationEnabled; - } - - /** - * Serializes the Workflow or Task's Input/Output object by Java's toString (default), or by a - * Json ObjectMapper (@see Configuration.isSummaryInputOutputJsonSerializationEnabled) - * - * @param object the Input or Output Object to serialize - * @return the serialized string of the Input or Output object - */ - public static String serializeInputOutput(Map object) { - if (!isSummaryInputOutputJsonSerializationEnabled) { - return object.toString(); - } - - try { - return objectMapper.writeValueAsString(object); - } catch (JsonProcessingException e) { - logger.error( - "The provided value ({}) could not be serialized as Json", - object.toString(), - e); - throw new RuntimeException(e); - } - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/utils/TaskUtils.java b/common/src/main/java/com/netflix/conductor/common/utils/TaskUtils.java deleted file mode 100644 index 5e83bd73e..000000000 --- a/common/src/main/java/com/netflix/conductor/common/utils/TaskUtils.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -public class TaskUtils { - - private static final String LOOP_TASK_DELIMITER = "__"; - - public static String appendIteration(String name, int iteration) { - return name + LOOP_TASK_DELIMITER + iteration; - } - - public static String getLoopOverTaskRefNameSuffix(int iteration) { - return LOOP_TASK_DELIMITER + iteration; - } - - public static String removeIterationFromTaskRefName(String referenceTaskName) { - String[] tokens = referenceTaskName.split(TaskUtils.LOOP_TASK_DELIMITER); - return tokens.length > 0 ? tokens[0] : referenceTaskName; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/validation/ErrorResponse.java b/common/src/main/java/com/netflix/conductor/common/validation/ErrorResponse.java deleted file mode 100644 index 5ed6256e1..000000000 --- a/common/src/main/java/com/netflix/conductor/common/validation/ErrorResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.validation; - -import java.util.List; - -public class ErrorResponse { - - private int status; - private String code; - private String message; - private String instance; - private boolean retryable; - private List validationErrors; - - public int getStatus() { - return status; - } - - public void setStatus(int status) { - this.status = status; - } - - public List getValidationErrors() { - return validationErrors; - } - - public void setValidationErrors(List validationErrors) { - this.validationErrors = validationErrors; - } - - public boolean isRetryable() { - return retryable; - } - - public void setRetryable(boolean retryable) { - this.retryable = retryable; - } - - public String getCode() { - return code; - } - - public void setCode(String code) { - this.code = code; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } -} diff --git a/common/src/main/java/com/netflix/conductor/common/validation/ValidationError.java b/common/src/main/java/com/netflix/conductor/common/validation/ValidationError.java deleted file mode 100644 index 48a53e066..000000000 --- a/common/src/main/java/com/netflix/conductor/common/validation/ValidationError.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.validation; - -import java.util.StringJoiner; - -/** Captures a validation error that can be returned in {@link ErrorResponse}. */ -public class ValidationError { - - private String path; - private String message; - private String invalidValue; - - public ValidationError() {} - - public ValidationError(String path, String message, String invalidValue) { - this.path = path; - this.message = message; - this.invalidValue = invalidValue; - } - - public String getPath() { - return path; - } - - public String getMessage() { - return message; - } - - public String getInvalidValue() { - return invalidValue; - } - - public void setPath(String path) { - this.path = path; - } - - public void setMessage(String message) { - this.message = message; - } - - public void setInvalidValue(String invalidValue) { - this.invalidValue = invalidValue; - } - - @Override - public String toString() { - return new StringJoiner(", ", ValidationError.class.getSimpleName() + "[", "]") - .add("path='" + path + "'") - .add("message='" + message + "'") - .add("invalidValue='" + invalidValue + "'") - .toString(); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/config/TestObjectMapperConfiguration.java b/common/src/test/java/com/netflix/conductor/common/config/TestObjectMapperConfiguration.java deleted file mode 100644 index 014a118dd..000000000 --- a/common/src/test/java/com/netflix/conductor/common/config/TestObjectMapperConfiguration.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.config; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** Supplies the standard Conductor {@link ObjectMapper} for tests that need them. */ -@Configuration -public class TestObjectMapperConfiguration { - - @Bean - public ObjectMapper testObjectMapper() { - return new ObjectMapperProvider().getObjectMapper(); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/events/EventHandlerTest.java b/common/src/test/java/com/netflix/conductor/common/events/EventHandlerTest.java deleted file mode 100644 index a6a1f5cd1..000000000 --- a/common/src/test/java/com/netflix/conductor/common/events/EventHandlerTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.events; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.events.EventHandler; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class EventHandlerTest { - - @Test - public void testWorkflowTaskName() { - EventHandler taskDef = new EventHandler(); // name is null - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(taskDef); - assertEquals(3, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("Missing event handler name")); - assertTrue(validationErrors.contains("Missing event location")); - assertTrue( - validationErrors.contains( - "No actions specified. Please specify at-least one action")); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/run/TaskSummaryTest.java b/common/src/test/java/com/netflix/conductor/common/run/TaskSummaryTest.java deleted file mode 100644 index 4c8ec4e6c..000000000 --- a/common/src/test/java/com/netflix/conductor/common/run/TaskSummaryTest.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.run; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.Task; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertNotNull; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class TaskSummaryTest { - - @Autowired private ObjectMapper objectMapper; - - @Test - public void testJsonSerializing() throws Exception { - Task task = new Task(); - TaskSummary taskSummary = new TaskSummary(task); - - String json = objectMapper.writeValueAsString(taskSummary); - TaskSummary read = objectMapper.readValue(json, TaskSummary.class); - assertNotNull(read); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/tasks/TaskDefTest.java b/common/src/test/java/com/netflix/conductor/common/tasks/TaskDefTest.java deleted file mode 100644 index 41f966779..000000000 --- a/common/src/test/java/com/netflix/conductor/common/tasks/TaskDefTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.tasks; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class TaskDefTest { - - private Validator validator; - - @Before - public void setup() { - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - this.validator = factory.getValidator(); - } - - @Test - public void test() { - String name = "test1"; - String description = "desc"; - int retryCount = 10; - int timeout = 100; - TaskDef def = new TaskDef(name, description, retryCount, timeout); - assertEquals(36_00, def.getResponseTimeoutSeconds()); - assertEquals(name, def.getName()); - assertEquals(description, def.getDescription()); - assertEquals(retryCount, def.getRetryCount()); - assertEquals(timeout, def.getTimeoutSeconds()); - } - - @Test - public void testTaskDef() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("task1"); - taskDef.setRetryCount(-1); - taskDef.setTimeoutSeconds(1000); - taskDef.setResponseTimeoutSeconds(1001); - - Set> result = validator.validate(taskDef); - assertEquals(3, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "TaskDef: task1 responseTimeoutSeconds: 1001 must be less than timeoutSeconds: 1000")); - assertTrue(validationErrors.contains("TaskDef retryCount: 0 must be >= 0")); - assertTrue(validationErrors.contains("ownerEmail cannot be empty")); - } - - @Test - public void testTaskDefNameAndOwnerNotSet() { - TaskDef taskDef = new TaskDef(); - taskDef.setRetryCount(-1); - taskDef.setTimeoutSeconds(1000); - taskDef.setResponseTimeoutSeconds(1); - - Set> result = validator.validate(taskDef); - assertEquals(3, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("TaskDef retryCount: 0 must be >= 0")); - assertTrue(validationErrors.contains("TaskDef name cannot be null or empty")); - assertTrue(validationErrors.contains("ownerEmail cannot be empty")); - } - - @Test - public void testTaskDefInvalidEmail() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test-task"); - taskDef.setRetryCount(1); - taskDef.setTimeoutSeconds(1000); - taskDef.setResponseTimeoutSeconds(1); - taskDef.setOwnerEmail("owner"); - - Set> result = validator.validate(taskDef); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("ownerEmail should be valid email address")); - } - - @Test - public void testTaskDefValidEmail() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test-task"); - taskDef.setRetryCount(1); - taskDef.setTimeoutSeconds(1000); - taskDef.setResponseTimeoutSeconds(1); - taskDef.setOwnerEmail("owner@test.com"); - - Set> result = validator.validate(taskDef); - assertEquals(0, result.size()); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/tasks/TaskResultTest.java b/common/src/test/java/com/netflix/conductor/common/tasks/TaskResultTest.java deleted file mode 100644 index a49fa2daf..000000000 --- a/common/src/test/java/com/netflix/conductor/common/tasks/TaskResultTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.tasks; - -import java.util.HashMap; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; - -import static org.junit.Assert.assertEquals; - -public class TaskResultTest { - - private Task task; - private TaskResult taskResult; - - @Before - public void setUp() { - task = new Task(); - task.setWorkflowInstanceId("workflow-id"); - task.setTaskId("task-id"); - task.setReasonForIncompletion("reason"); - task.setCallbackAfterSeconds(10); - task.setWorkerId("worker-id"); - task.setOutputData(new HashMap<>()); - task.setExternalOutputPayloadStoragePath("externalOutput"); - } - - @Test - public void testCanceledTask() { - task.setStatus(Task.Status.CANCELED); - taskResult = new TaskResult(task); - validateTaskResult(); - assertEquals(TaskResult.Status.FAILED, taskResult.getStatus()); - } - - @Test - public void testCompletedWithErrorsTask() { - task.setStatus(Task.Status.COMPLETED_WITH_ERRORS); - taskResult = new TaskResult(task); - validateTaskResult(); - assertEquals(TaskResult.Status.FAILED, taskResult.getStatus()); - } - - @Test - public void testScheduledTask() { - task.setStatus(Task.Status.SCHEDULED); - taskResult = new TaskResult(task); - validateTaskResult(); - assertEquals(TaskResult.Status.IN_PROGRESS, taskResult.getStatus()); - } - - @Test - public void testCompltetedTask() { - task.setStatus(Task.Status.COMPLETED); - taskResult = new TaskResult(task); - validateTaskResult(); - assertEquals(TaskResult.Status.COMPLETED, taskResult.getStatus()); - } - - private void validateTaskResult() { - assertEquals(task.getWorkflowInstanceId(), taskResult.getWorkflowInstanceId()); - assertEquals(task.getTaskId(), taskResult.getTaskId()); - assertEquals(task.getReasonForIncompletion(), taskResult.getReasonForIncompletion()); - assertEquals(task.getCallbackAfterSeconds(), taskResult.getCallbackAfterSeconds()); - assertEquals(task.getWorkerId(), taskResult.getWorkerId()); - assertEquals(task.getOutputData(), taskResult.getOutputData()); - assertEquals( - task.getExternalOutputPayloadStoragePath(), - taskResult.getExternalOutputPayloadStoragePath()); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/tasks/TaskTest.java b/common/src/test/java/com/netflix/conductor/common/tasks/TaskTest.java deleted file mode 100644 index e392f65b5..000000000 --- a/common/src/test/java/com/netflix/conductor/common/tasks/TaskTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.tasks; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Set; -import java.util.stream.Collectors; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.google.protobuf.Any; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class TaskTest { - - @Test - public void test() { - - Task task = new Task(); - task.setStatus(Status.FAILED); - assertEquals(Status.FAILED, task.getStatus()); - - Set resultStatues = - Arrays.stream(TaskResult.Status.values()) - .map(Enum::name) - .collect(Collectors.toSet()); - - for (Status status : Status.values()) { - if (resultStatues.contains(status.name())) { - TaskResult.Status trStatus = TaskResult.Status.valueOf(status.name()); - assertEquals(status.name(), trStatus.name()); - - task = new Task(); - task.setStatus(status); - assertEquals(status, task.getStatus()); - } - } - } - - @Test - public void testTaskDefinitionIfAvailable() { - Task task = new Task(); - task.setStatus(Status.FAILED); - assertEquals(Status.FAILED, task.getStatus()); - - assertNull(task.getWorkflowTask()); - assertFalse(task.getTaskDefinition().isPresent()); - - WorkflowTask workflowTask = new WorkflowTask(); - TaskDef taskDefinition = new TaskDef(); - workflowTask.setTaskDefinition(taskDefinition); - task.setWorkflowTask(workflowTask); - - assertTrue(task.getTaskDefinition().isPresent()); - assertEquals(taskDefinition, task.getTaskDefinition().get()); - } - - @Test - public void testTaskQueueWaitTime() { - Task task = new Task(); - - long currentTimeMillis = System.currentTimeMillis(); - task.setScheduledTime(currentTimeMillis - 30_000); // 30 seconds ago - task.setStartTime(currentTimeMillis - 25_000); - - long queueWaitTime = task.getQueueWaitTime(); - assertEquals(5000L, queueWaitTime); - - task.setUpdateTime(currentTimeMillis - 20_000); - task.setCallbackAfterSeconds(10); - queueWaitTime = task.getQueueWaitTime(); - assertTrue(queueWaitTime > 0); - } - - @Test - public void testDeepCopyTask() { - final Task task = new Task(); - // In order to avoid forgetting putting inside the copy method the newly added fields check - // the number of declared fields. - final int expectedTaskFieldsNumber = 40; - final int declaredFieldsNumber = task.getClass().getDeclaredFields().length; - - assertEquals(expectedTaskFieldsNumber, declaredFieldsNumber); - - task.setCallbackAfterSeconds(111L); - task.setCallbackFromWorker(false); - task.setCorrelationId("correlation_id"); - task.setInputData(new HashMap<>()); - task.setOutputData(new HashMap<>()); - task.setReferenceTaskName("ref_task_name"); - task.setStartDelayInSeconds(1); - task.setTaskDefName("task_def_name"); - task.setTaskType("dummy_task_type"); - task.setWorkflowInstanceId("workflowInstanceId"); - task.setWorkflowType("workflowType"); - task.setResponseTimeoutSeconds(11L); - task.setStatus(Status.COMPLETED); - task.setRetryCount(0); - task.setPollCount(0); - task.setTaskId("taskId"); - task.setWorkflowTask(new WorkflowTask()); - task.setDomain("domain"); - task.setInputMessage(Any.getDefaultInstance()); - task.setOutputMessage(Any.getDefaultInstance()); - task.setRateLimitPerFrequency(11); - task.setRateLimitFrequencyInSeconds(11); - task.setExternalInputPayloadStoragePath("externalInputPayloadStoragePath"); - task.setExternalOutputPayloadStoragePath("externalOutputPayloadStoragePath"); - task.setWorkflowPriority(0); - task.setIteration(1); - task.setExecutionNameSpace("name_space"); - task.setIsolationGroupId("groupId"); - task.setStartTime(12L); - task.setEndTime(20L); - task.setScheduledTime(7L); - task.setRetried(false); - task.setReasonForIncompletion(""); - task.setWorkerId(""); - task.setSubWorkflowId(""); - task.setSubworkflowChanged(false); - - final Task copy = task.deepCopy(); - assertEquals(task, copy); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/utils/ConstraintParamUtilTest.java b/common/src/test/java/com/netflix/conductor/common/utils/ConstraintParamUtilTest.java deleted file mode 100644 index d29f1744e..000000000 --- a/common/src/test/java/com/netflix/conductor/common/utils/ConstraintParamUtilTest.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import static org.junit.Assert.assertEquals; - -public class ConstraintParamUtilTest { - - @Before - public void before() { - System.setProperty("NETFLIX_STACK", "test"); - System.setProperty("NETFLIX_ENVIRONMENT", "test"); - System.setProperty("TEST_ENV", "test"); - } - - private WorkflowDef constructWorkflowDef() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - return workflowDef; - } - - @Test - public void testExtractParamPathComponents() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithMissingEnvVariable() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${NETFLIX_STACK}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithValidEnvVariable() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${workflow.input.status}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithValidMap() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${workflow.input.status}"); - Map envInputParam = new HashMap<>(); - envInputParam.put("packageId", "${workflow.input.packageId}"); - envInputParam.put("taskId", "${CPEWF_TASK_ID}"); - envInputParam.put("NETFLIX_STACK", "${NETFLIX_STACK}"); - envInputParam.put("NETFLIX_ENVIRONMENT", "${NETFLIX_ENVIRONMENT}"); - envInputParam.put("TEST_ENV", "${TEST_ENV}"); - - inputParam.put("env", envInputParam); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithInvalidEnv() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${workflow.input.status}"); - Map envInputParam = new HashMap<>(); - envInputParam.put("packageId", "${workflow.input.packageId}"); - envInputParam.put("taskId", "${CPEWF_TASK_ID}"); - envInputParam.put("TEST_ENV1", "${TEST_ENV1}"); - - inputParam.put("env", envInputParam); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 1); - } - - @Test - public void testExtractParamPathComponentsWithInputParamEmpty() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", ""); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithListInputParamWithEmptyString() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", new String[] {""}); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithInputFieldWithSpace() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${workflow.input.status sta}"); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 1); - } - - @Test - public void testExtractParamPathComponentsWithPredefineEnums() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("NETFLIX_ENV", "${CPEWF_TASK_ID}"); - inputParam.put( - "entryPoint", "/tools/pdfwatermarker_mux.py ${NETFLIX_ENV} ${CPEWF_TASK_ID} alpha"); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } - - @Test - public void testExtractParamPathComponentsWithEscapedChar() { - WorkflowDef workflowDef = constructWorkflowDef(); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "$${expression with spaces}"); - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - List results = - ConstraintParamUtil.validateInputParam(inputParam, "task_1", workflowDef); - assertEquals(results.size(), 0); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/utils/SummaryUtilTest.java b/common/src/test/java/com/netflix/conductor/common/utils/SummaryUtilTest.java deleted file mode 100644 index 79fae89d9..000000000 --- a/common/src/test/java/com/netflix/conductor/common/utils/SummaryUtilTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.utils; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.runner.ApplicationContextRunner; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -@ContextConfiguration( - classes = { - TestObjectMapperConfiguration.class, - SummaryUtilTest.SummaryUtilTestConfiguration.class - }) -@RunWith(SpringRunner.class) -public class SummaryUtilTest { - - @Configuration - static class SummaryUtilTestConfiguration { - - @Bean - public SummaryUtil summaryUtil() { - return new SummaryUtil(); - } - } - - @Autowired private ObjectMapper objectMapper; - - private Map testObject; - - @Before - public void init() { - Map child = new HashMap<>(); - child.put("testStr", "childTestStr"); - - Map obj = new HashMap<>(); - obj.put("testStr", "stringValue"); - obj.put("testArray", new ArrayList<>(Arrays.asList(1, 2, 3))); - obj.put("testObj", child); - obj.put("testNull", null); - - testObject = obj; - } - - @Test - public void testSerializeInputOutput_defaultToString() throws Exception { - new ApplicationContextRunner() - .withPropertyValues( - "conductor.app.summary-input-output-json-serialization.enabled:false") - .withUserConfiguration(SummaryUtilTestConfiguration.class) - .run( - context -> { - String serialized = SummaryUtil.serializeInputOutput(this.testObject); - - assertEquals( - this.testObject.toString(), - serialized, - "The Java.toString() Serialization should match the serialized Test Object"); - }); - } - - @Test - public void testSerializeInputOutput_jsonSerializationEnabled() throws Exception { - new ApplicationContextRunner() - .withPropertyValues( - "conductor.app.summary-input-output-json-serialization.enabled:true") - .withUserConfiguration(SummaryUtilTestConfiguration.class) - .run( - context -> { - String serialized = SummaryUtil.serializeInputOutput(testObject); - - assertEquals( - objectMapper.writeValueAsString(testObject), - serialized, - "The ObjectMapper Json Serialization should match the serialized Test Object"); - }); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/SubWorkflowParamsTest.java b/common/src/test/java/com/netflix/conductor/common/workflow/SubWorkflowParamsTest.java deleted file mode 100644 index 1859c4a0d..000000000 --- a/common/src/test/java/com/netflix/conductor/common/workflow/SubWorkflowParamsTest.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.workflow; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.fasterxml.jackson.databind.MapperFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class SubWorkflowParamsTest { - - @Autowired private ObjectMapper objectMapper; - - @Test - public void testWorkflowTaskName() { - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); // name is null - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - - Set> result = validator.validate(subWorkflowParams); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("SubWorkflowParams name cannot be null")); - assertTrue(validationErrors.contains("SubWorkflowParams name cannot be empty")); - } - - @Test - public void testWorkflowSetTaskToDomain() { - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - Map taskToDomain = new HashMap<>(); - taskToDomain.put("unit", "test"); - subWorkflowParams.setTaskToDomain(taskToDomain); - assertEquals(taskToDomain, subWorkflowParams.getTaskToDomain()); - } - - @Test(expected = IllegalArgumentException.class) - public void testSetWorkflowDefinition() { - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("dummy-name"); - subWorkflowParams.setWorkflowDefinition(new Object()); - } - - @Test - public void testGetWorkflowDef() { - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("dummy-name"); - WorkflowDef def = new WorkflowDef(); - def.setName("test_workflow"); - def.setVersion(1); - WorkflowTask task = new WorkflowTask(); - task.setName("test_task"); - task.setTaskReferenceName("t1"); - def.getTasks().add(task); - subWorkflowParams.setWorkflowDefinition(def); - assertEquals(def, subWorkflowParams.getWorkflowDefinition()); - assertEquals(def, subWorkflowParams.getWorkflowDef()); - } - - @Test - public void testWorkflowDefJson() throws Exception { - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("dummy-name"); - WorkflowDef def = new WorkflowDef(); - def.setName("test_workflow"); - def.setVersion(1); - WorkflowTask task = new WorkflowTask(); - task.setName("test_task"); - task.setTaskReferenceName("t1"); - def.getTasks().add(task); - subWorkflowParams.setWorkflowDefinition(def); - - objectMapper.enable(SerializationFeature.INDENT_OUTPUT); - objectMapper.enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY); - objectMapper.enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); - - String serializedParams = - objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(subWorkflowParams); - SubWorkflowParams deserializedParams = - objectMapper.readValue(serializedParams, SubWorkflowParams.class); - assertEquals(def, deserializedParams.getWorkflowDefinition()); - assertEquals(def, deserializedParams.getWorkflowDef()); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java deleted file mode 100644 index 16a08851e..000000000 --- a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.workflow; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class WorkflowDefValidatorTest { - - @Before - public void before() { - System.setProperty("NETFLIX_STACK", "test"); - System.setProperty("NETFLIX_ENVIRONMENT", "test"); - System.setProperty("TEST_ENV", "test"); - } - - @Test - public void testWorkflowDefConstraints() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(3, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("WorkflowDef name cannot be null or empty")); - assertTrue(validationErrors.contains("WorkflowTask list cannot be empty")); - assertTrue(validationErrors.contains("ownerEmail cannot be empty")); - // assertTrue(validationErrors.contains("workflowDef schemaVersion: 1 should be >= 2")); - } - - @Test - public void testWorkflowDefConstraintsWithMultipleEnvVariable() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID}"); - inputParam.put( - "entryPoint", - "${NETFLIX_ENVIRONMENT} ${NETFLIX_STACK} ${CPEWF_TASK_ID} ${workflow.input.status}"); - - workflowTask_1.setInputParameters(inputParam); - - WorkflowTask workflowTask_2 = new WorkflowTask(); - workflowTask_2.setName("task_2"); - workflowTask_2.setTaskReferenceName("task_2"); - workflowTask_2.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam2 = new HashMap<>(); - inputParam2.put("env", inputParam); - - workflowTask_2.setInputParameters(inputParam2); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - tasks.add(workflowTask_2); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowDefConstraintsSingleEnvVariable() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowDefConstraintsDualEnvVariable() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${NETFLIX_STACK}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowDefConstraintsWithMapAsInputParam() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("taskId", "${CPEWF_TASK_ID} ${NETFLIX_STACK}"); - Map envInputParam = new HashMap<>(); - envInputParam.put("packageId", "${workflow.input.packageId}"); - envInputParam.put("taskId", "${CPEWF_TASK_ID}"); - envInputParam.put("NETFLIX_STACK", "${NETFLIX_STACK}"); - envInputParam.put("NETFLIX_ENVIRONMENT", "${NETFLIX_ENVIRONMENT}"); - - inputParam.put("env", envInputParam); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskInputParamInvalid() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask = new WorkflowTask(); // name is null - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", "${workflow.input.Space Value}"); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "key: blabla input parameter value: workflow.input.Space Value is not valid")); - } - - @Test - public void testWorkflowTaskEmptyStringInputParamValue() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask = new WorkflowTask(); // name is null - - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", ""); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTasklistInputParamWithEmptyString() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(2); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask = new WorkflowTask(); // name is null - - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", ""); - map.put("foo", new String[] {""}); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowSchemaVersion1() { - WorkflowDef workflowDef = new WorkflowDef(); // name is null - workflowDef.setSchemaVersion(3); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask = new WorkflowTask(); - - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", ""); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("workflowDef schemaVersion: 2 is only supported")); - } - - @Test - public void testWorkflowOwnerInvalidEmail() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner"); - - WorkflowTask workflowTask = new WorkflowTask(); - - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", ""); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("ownerEmail should be valid email address")); - } - - @Test - public void testWorkflowOwnerValidEmail() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_env"); - workflowDef.setOwnerEmail("owner@test.com"); - - WorkflowTask workflowTask = new WorkflowTask(); - - workflowTask.setName("t1"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName("t1"); - - Map map = new HashMap<>(); - map.put("blabla", ""); - workflowTask.setInputParameters(map); - - workflowDef.getTasks().add(workflowTask); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } -} diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowTaskTest.java b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowTaskTest.java deleted file mode 100644 index 6d052e4d3..000000000 --- a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowTaskTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.workflow; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class WorkflowTaskTest { - - @Test - public void test() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setWorkflowTaskType(TaskType.DECISION); - - assertNotNull(workflowTask.getType()); - assertEquals(TaskType.DECISION.name(), workflowTask.getType()); - - workflowTask = new WorkflowTask(); - workflowTask.setWorkflowTaskType(TaskType.SWITCH); - - assertNotNull(workflowTask.getType()); - assertEquals(TaskType.SWITCH.name(), workflowTask.getType()); - } - - @Test - public void testOptional() { - WorkflowTask task = new WorkflowTask(); - assertFalse(task.isOptional()); - - task.setOptional(Boolean.FALSE); - assertFalse(task.isOptional()); - - task.setOptional(Boolean.TRUE); - assertTrue(task.isOptional()); - } - - @Test - public void testWorkflowTaskName() { - WorkflowTask taskDef = new WorkflowTask(); // name is null - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(taskDef); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("WorkflowTask name cannot be empty or null")); - assertTrue( - validationErrors.contains( - "WorkflowTask taskReferenceName name cannot be empty or null")); - } -} diff --git a/core/build.gradle b/core/build.gradle deleted file mode 100644 index 53c7c7ca8..000000000 --- a/core/build.gradle +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -apply plugin: 'groovy' - -dependencies { - implementation project(':conductor-common') - compileOnly 'org.springframework.boot:spring-boot-starter' - compileOnly 'org.springframework.boot:spring-boot-starter-validation' - compileOnly 'org.springframework.retry:spring-retry' - - implementation "com.fasterxml.jackson.core:jackson-annotations" - implementation "com.fasterxml.jackson.core:jackson-databind" - - implementation "commons-io:commons-io:${revCommonsIo}" - - implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - - implementation "org.apache.commons:commons-lang3" - - implementation "com.fasterxml.jackson.core:jackson-core" - - implementation "com.spotify:completable-futures:${revSpotifyCompletableFutures}" - - implementation "com.jayway.jsonpath:json-path:${revJsonPath}" - - implementation "io.reactivex:rxjava:${revRxJava}" - - implementation "com.netflix.spectator:spectator-api:${revSpectator}" - - implementation "org.apache.bval:bval-jsr:${revBval}" - - implementation "com.github.ben-manes.caffeine:caffeine" - - // JAXB is not bundled with Java 11, dependencies added explicitly - // These are needed by Apache BVAL - implementation "jakarta.xml.bind:jakarta.xml.bind-api:${revJAXB}" - implementation "jakarta.activation:jakarta.activation-api:${revActivation}" - - // Only add it as a test dependency. The actual jaxb runtime provider is provided when building the server. - testImplementation "org.glassfish.jaxb:jaxb-runtime:${revJAXB}" - - testImplementation 'org.springframework.boot:spring-boot-starter-validation' - testImplementation 'org.springframework.retry:spring-retry' - testImplementation project(':conductor-common').sourceSets.test.output - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" -} diff --git a/core/dependencies.lock b/core/dependencies.lock deleted file mode 100644 index 79c4e2ead..000000000 --- a/core/dependencies.lock +++ /dev/null @@ -1,400 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "locked": "2.4.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "locked": "2.0.0" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "locked": "2.4.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "locked": "2.0.0" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "locked": "2.4.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "locked": "2.0.0" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "locked": "2.3.3" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "locked": "2.4.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "locked": "1.2.2" - }, - "jakarta.activation:jakarta.activation-api": { - "locked": "2.0.0" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "locked": "2.3.3" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - } -} \ No newline at end of file diff --git a/core/src/main/java/com/netflix/conductor/annotations/Audit.java b/core/src/main/java/com/netflix/conductor/annotations/Audit.java deleted file mode 100644 index 6f1d47199..000000000 --- a/core/src/main/java/com/netflix/conductor/annotations/Audit.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations; - -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.TYPE; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** Mark service for custom audit implementation */ -@Target({TYPE}) -@Retention(RUNTIME) -public @interface Audit {} diff --git a/core/src/main/java/com/netflix/conductor/annotations/Trace.java b/core/src/main/java/com/netflix/conductor/annotations/Trace.java deleted file mode 100644 index 61da42cc4..000000000 --- a/core/src/main/java/com/netflix/conductor/annotations/Trace.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations; - -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.TYPE; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -@Target({TYPE}) -@Retention(RUNTIME) -public @interface Trace {} diff --git a/core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java b/core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java deleted file mode 100644 index 492931128..000000000 --- a/core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.annotations; - -import java.lang.annotation.*; - -/** - * Annotates a program element that exists, or is more widely visible than otherwise necessary, only - * for use in test code. - */ -@Retention(RetentionPolicy.CLASS) -@Target({ElementType.FIELD, ElementType.TYPE, ElementType.METHOD}) -@Documented -public @interface VisibleForTesting {} diff --git a/core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java b/core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java deleted file mode 100644 index bfe1455fb..000000000 --- a/core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.SmartLifecycle; - -public abstract class LifecycleAwareComponent implements SmartLifecycle { - - private volatile boolean running = false; - - private static final Logger LOGGER = LoggerFactory.getLogger(LifecycleAwareComponent.class); - - @Override - public final void start() { - running = true; - LOGGER.info("{} started.", getClass().getSimpleName()); - doStart(); - } - - @Override - public final void stop() { - running = false; - LOGGER.info("{} stopped.", getClass().getSimpleName()); - doStop(); - } - - @Override - public final boolean isRunning() { - return running; - } - - public void doStart() {} - - public void doStop() {} -} diff --git a/core/src/main/java/com/netflix/conductor/core/WorkflowContext.java b/core/src/main/java/com/netflix/conductor/core/WorkflowContext.java deleted file mode 100644 index d870761c5..000000000 --- a/core/src/main/java/com/netflix/conductor/core/WorkflowContext.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core; - -/** Store the authentication context, app or username or both */ -public class WorkflowContext { - - public static final ThreadLocal THREAD_LOCAL = - InheritableThreadLocal.withInitial(() -> new WorkflowContext("", "")); - - private final String clientApp; - - private final String userName; - - public WorkflowContext(String clientApp) { - this.clientApp = clientApp; - this.userName = null; - } - - public WorkflowContext(String clientApp, String userName) { - this.clientApp = clientApp; - this.userName = userName; - } - - public static WorkflowContext get() { - return THREAD_LOCAL.get(); - } - - public static void set(WorkflowContext ctx) { - THREAD_LOCAL.set(ctx); - } - - public static void unset() { - THREAD_LOCAL.remove(); - } - - /** - * @return the clientApp - */ - public String getClientApp() { - return clientApp; - } - - /** - * @return the username - */ - public String getUserName() { - return userName; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/config/ConductorCoreConfiguration.java b/core/src/main/java/com/netflix/conductor/core/config/ConductorCoreConfiguration.java deleted file mode 100644 index eedef68f1..000000000 --- a/core/src/main/java/com/netflix/conductor/core/config/ConductorCoreConfiguration.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.config; - -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.retry.RetryContext; -import org.springframework.retry.backoff.NoBackOffPolicy; -import org.springframework.retry.policy.SimpleRetryPolicy; -import org.springframework.retry.support.RetryTemplate; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.events.EventQueueProvider; -import com.netflix.conductor.core.execution.mapper.TaskMapper; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.listener.WorkflowStatusListener; -import com.netflix.conductor.core.listener.WorkflowStatusListenerStub; -import com.netflix.conductor.core.storage.DummyPayloadStorage; -import com.netflix.conductor.core.sync.Lock; -import com.netflix.conductor.core.sync.noop.NoopLock; - -import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER; -import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER; -import static com.netflix.conductor.core.utils.Utils.isTransientException; - -import static java.util.function.Function.identity; - -@Configuration(proxyBeanMethods = false) -@EnableConfigurationProperties(ConductorProperties.class) -public class ConductorCoreConfiguration { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConductorCoreConfiguration.class); - - @ConditionalOnProperty( - name = "conductor.workflow-execution-lock.type", - havingValue = "noop_lock", - matchIfMissing = true) - @Bean - public Lock provideLock() { - return new NoopLock(); - } - - @ConditionalOnProperty( - name = "conductor.external-payload-storage.type", - havingValue = "dummy", - matchIfMissing = true) - @Bean - public ExternalPayloadStorage dummyExternalPayloadStorage() { - LOGGER.info("Initialized dummy payload storage!"); - return new DummyPayloadStorage(); - } - - @ConditionalOnProperty( - name = "conductor.workflow-status-listener.type", - havingValue = "stub", - matchIfMissing = true) - @Bean - public WorkflowStatusListener workflowStatusListener() { - return new WorkflowStatusListenerStub(); - } - - @Bean - public ExecutorService executorService(ConductorProperties conductorProperties) { - ThreadFactory threadFactory = - new BasicThreadFactory.Builder() - .namingPattern("conductor-worker-%d") - .daemon(true) - .build(); - return Executors.newFixedThreadPool( - conductorProperties.getExecutorServiceMaxThreadCount(), threadFactory); - } - - @Bean - @Qualifier("taskMappersByTaskType") - public Map getTaskMappers(List taskMappers) { - return taskMappers.stream().collect(Collectors.toMap(TaskMapper::getTaskType, identity())); - } - - @Bean - @Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) - public Set asyncSystemTasks(Set allSystemTasks) { - return allSystemTasks.stream() - .filter(WorkflowSystemTask::isAsync) - .collect(Collectors.toUnmodifiableSet()); - } - - @Bean - @Qualifier(EVENT_QUEUE_PROVIDERS_QUALIFIER) - public Map getEventQueueProviders( - List eventQueueProviders) { - return eventQueueProviders.stream() - .collect(Collectors.toMap(EventQueueProvider::getQueueType, identity())); - } - - @Bean - public RetryTemplate onTransientErrorRetryTemplate() { - SimpleRetryPolicy retryPolicy = new CustomRetryPolicy(); - retryPolicy.setMaxAttempts(3); - - RetryTemplate retryTemplate = new RetryTemplate(); - retryTemplate.setRetryPolicy(retryPolicy); - retryTemplate.setBackOffPolicy(new NoBackOffPolicy()); - return retryTemplate; - } - - public static class CustomRetryPolicy extends SimpleRetryPolicy { - - @Override - public boolean canRetry(final RetryContext context) { - final Optional lastThrowable = - Optional.ofNullable(context.getLastThrowable()); - return lastThrowable - .map(throwable -> super.canRetry(context) && isTransientException(throwable)) - .orElseGet(() -> super.canRetry(context)); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/config/ConductorProperties.java b/core/src/main/java/com/netflix/conductor/core/config/ConductorProperties.java deleted file mode 100644 index b223008a1..000000000 --- a/core/src/main/java/com/netflix/conductor/core/config/ConductorProperties.java +++ /dev/null @@ -1,522 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.config; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DataSizeUnit; -import org.springframework.boot.convert.DurationUnit; -import org.springframework.util.unit.DataSize; -import org.springframework.util.unit.DataUnit; - -@ConfigurationProperties("conductor.app") -public class ConductorProperties { - - /** - * Name of the stack within which the app is running. e.g. devint, testintg, staging, prod etc. - */ - private String stack = "test"; - - /** The id with the app has been registered. */ - private String appId = "conductor"; - - /** The maximum number of threads to be allocated to the executor service threadpool. */ - private int executorServiceMaxThreadCount = 50; - - /** The timeout duration to set when a workflow is pushed to the decider queue. */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration workflowOffsetTimeout = Duration.ofSeconds(30); - - /** The number of threads to use to do background sweep on active workflows. */ - private int sweeperThreadCount = Runtime.getRuntime().availableProcessors() * 2; - - /** The number of threads to configure the threadpool in the event processor. */ - private int eventProcessorThreadCount = 2; - - /** Used to enable/disable the indexing of messages within event payloads. */ - private boolean eventMessageIndexingEnabled = true; - - /** Used to enable/disable the indexing of event execution results. */ - private boolean eventExecutionIndexingEnabled = true; - - /** Used to enable/disable the workflow execution lock. */ - private boolean workflowExecutionLockEnabled = false; - - /** The time (in milliseconds) for which the lock is leased for. */ - private Duration lockLeaseTime = Duration.ofMillis(60000); - - /** - * The time (in milliseconds) for which the thread will block in an attempt to acquire the lock. - */ - private Duration lockTimeToTry = Duration.ofMillis(500); - - /** - * The time (in seconds) that is used to consider if a worker is actively polling for a task. - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration activeWorkerLastPollTimeout = Duration.ofSeconds(10); - - /** - * The time (in seconds) for which a task execution will be postponed if being rate limited or - * concurrent execution limited. - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration taskExecutionPostponeDuration = Duration.ofSeconds(60); - - /** Used to enable/disable the indexing of task execution logs. */ - private boolean taskExecLogIndexingEnabled = true; - - /** Used to enable/disable asynchronous indexing to elasticsearch. */ - private boolean asyncIndexingEnabled = false; - - /** The number of threads to be used within the threadpool for system task workers. */ - private int systemTaskWorkerThreadCount = Runtime.getRuntime().availableProcessors() * 2; - - /** - * The interval (in seconds) after which a system task will be checked by the system task worker - * for completion. - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration systemTaskWorkerCallbackDuration = Duration.ofSeconds(30); - - /** - * The interval (in milliseconds) at which system task queues will be polled by the system task - * workers. - */ - private Duration systemTaskWorkerPollInterval = Duration.ofMillis(50); - - /** The namespace for the system task workers to provide instance level isolation. */ - private String systemTaskWorkerExecutionNamespace = ""; - - /** - * The number of threads to be used within the threadpool for system task workers in each - * isolation group. - */ - private int isolatedSystemTaskWorkerThreadCount = 1; - - /** The max number of system tasks to be polled in a single request. */ - private int systemTaskMaxPollCount = 1; - - /** - * The duration of workflow execution which qualifies a workflow as a short-running workflow - * when async indexing to elasticsearch is enabled. - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration asyncUpdateShortRunningWorkflowDuration = Duration.ofSeconds(30); - - /** - * The delay with which short-running workflows will be updated in the elasticsearch index when - * async indexing is enabled. - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration asyncUpdateDelay = Duration.ofSeconds(60); - - /** - * Used to control the validation for owner email field as mandatory within workflow and task - * definitions. - */ - private boolean ownerEmailMandatory = true; - - /** - * The number of threads to be usde in Scheduler used for polling events from multiple event - * queues. By default, a thread count equal to the number of CPU cores is chosen. - */ - private int eventQueueSchedulerPollThreadCount = Runtime.getRuntime().availableProcessors(); - - /** The time interval (in milliseconds) at which the default event queues will be polled. */ - private Duration eventQueuePollInterval = Duration.ofMillis(100); - - /** The number of messages to be polled from a default event queue in a single operation. */ - private int eventQueuePollCount = 10; - - /** The timeout (in milliseconds) for the poll operation on the default event queue. */ - private Duration eventQueueLongPollTimeout = Duration.ofMillis(1000); - - /** - * The threshold of the workflow input payload size in KB beyond which the payload will be - * stored in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize workflowInputPayloadSizeThreshold = DataSize.ofKilobytes(5120L); - - /** - * The maximum threshold of the workflow input payload size in KB beyond which input will be - * rejected and the workflow will be marked as FAILED. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize maxWorkflowInputPayloadSizeThreshold = DataSize.ofKilobytes(10240L); - - /** - * The threshold of the workflow output payload size in KB beyond which the payload will be - * stored in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize workflowOutputPayloadSizeThreshold = DataSize.ofKilobytes(5120L); - - /** - * The maximum threshold of the workflow output payload size in KB beyond which output will be - * rejected and the workflow will be marked as FAILED. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize maxWorkflowOutputPayloadSizeThreshold = DataSize.ofKilobytes(10240L); - - /** - * The threshold of the task input payload size in KB beyond which the payload will be stored in - * {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize taskInputPayloadSizeThreshold = DataSize.ofKilobytes(3072L); - - /** - * The maximum threshold of the task input payload size in KB beyond which the task input will - * be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize maxTaskInputPayloadSizeThreshold = DataSize.ofKilobytes(10240L); - - /** - * The threshold of the task output payload size in KB beyond which the payload will be stored - * in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize taskOutputPayloadSizeThreshold = DataSize.ofKilobytes(3072L); - - /** - * The maximum threshold of the task output payload size in KB beyond which the task input will - * be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize maxTaskOutputPayloadSizeThreshold = DataSize.ofKilobytes(10240L); - - /** - * The maximum threshold of the workflow variables payload size in KB beyond which the task - * changes will be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR. - */ - @DataSizeUnit(DataUnit.KILOBYTES) - private DataSize maxWorkflowVariablesPayloadSizeThreshold = DataSize.ofKilobytes(256L); - - public String getStack() { - return stack; - } - - public void setStack(String stack) { - this.stack = stack; - } - - public String getAppId() { - return appId; - } - - public void setAppId(String appId) { - this.appId = appId; - } - - public int getExecutorServiceMaxThreadCount() { - return executorServiceMaxThreadCount; - } - - public void setExecutorServiceMaxThreadCount(int executorServiceMaxThreadCount) { - this.executorServiceMaxThreadCount = executorServiceMaxThreadCount; - } - - public Duration getWorkflowOffsetTimeout() { - return workflowOffsetTimeout; - } - - public void setWorkflowOffsetTimeout(Duration workflowOffsetTimeout) { - this.workflowOffsetTimeout = workflowOffsetTimeout; - } - - public int getSweeperThreadCount() { - return sweeperThreadCount; - } - - public void setSweeperThreadCount(int sweeperThreadCount) { - this.sweeperThreadCount = sweeperThreadCount; - } - - public int getEventProcessorThreadCount() { - return eventProcessorThreadCount; - } - - public void setEventProcessorThreadCount(int eventProcessorThreadCount) { - this.eventProcessorThreadCount = eventProcessorThreadCount; - } - - public boolean isEventMessageIndexingEnabled() { - return eventMessageIndexingEnabled; - } - - public void setEventMessageIndexingEnabled(boolean eventMessageIndexingEnabled) { - this.eventMessageIndexingEnabled = eventMessageIndexingEnabled; - } - - public boolean isEventExecutionIndexingEnabled() { - return eventExecutionIndexingEnabled; - } - - public void setEventExecutionIndexingEnabled(boolean eventExecutionIndexingEnabled) { - this.eventExecutionIndexingEnabled = eventExecutionIndexingEnabled; - } - - public boolean isWorkflowExecutionLockEnabled() { - return workflowExecutionLockEnabled; - } - - public void setWorkflowExecutionLockEnabled(boolean workflowExecutionLockEnabled) { - this.workflowExecutionLockEnabled = workflowExecutionLockEnabled; - } - - public Duration getLockLeaseTime() { - return lockLeaseTime; - } - - public void setLockLeaseTime(Duration lockLeaseTime) { - this.lockLeaseTime = lockLeaseTime; - } - - public Duration getLockTimeToTry() { - return lockTimeToTry; - } - - public void setLockTimeToTry(Duration lockTimeToTry) { - this.lockTimeToTry = lockTimeToTry; - } - - public Duration getActiveWorkerLastPollTimeout() { - return activeWorkerLastPollTimeout; - } - - public void setActiveWorkerLastPollTimeout(Duration activeWorkerLastPollTimeout) { - this.activeWorkerLastPollTimeout = activeWorkerLastPollTimeout; - } - - public Duration getTaskExecutionPostponeDuration() { - return taskExecutionPostponeDuration; - } - - public void setTaskExecutionPostponeDuration(Duration taskExecutionPostponeDuration) { - this.taskExecutionPostponeDuration = taskExecutionPostponeDuration; - } - - public boolean isTaskExecLogIndexingEnabled() { - return taskExecLogIndexingEnabled; - } - - public void setTaskExecLogIndexingEnabled(boolean taskExecLogIndexingEnabled) { - this.taskExecLogIndexingEnabled = taskExecLogIndexingEnabled; - } - - public boolean isAsyncIndexingEnabled() { - return asyncIndexingEnabled; - } - - public void setAsyncIndexingEnabled(boolean asyncIndexingEnabled) { - this.asyncIndexingEnabled = asyncIndexingEnabled; - } - - public int getSystemTaskWorkerThreadCount() { - return systemTaskWorkerThreadCount; - } - - public void setSystemTaskWorkerThreadCount(int systemTaskWorkerThreadCount) { - this.systemTaskWorkerThreadCount = systemTaskWorkerThreadCount; - } - - public Duration getSystemTaskWorkerCallbackDuration() { - return systemTaskWorkerCallbackDuration; - } - - public void setSystemTaskWorkerCallbackDuration(Duration systemTaskWorkerCallbackDuration) { - this.systemTaskWorkerCallbackDuration = systemTaskWorkerCallbackDuration; - } - - public Duration getSystemTaskWorkerPollInterval() { - return systemTaskWorkerPollInterval; - } - - public void setSystemTaskWorkerPollInterval(Duration systemTaskWorkerPollInterval) { - this.systemTaskWorkerPollInterval = systemTaskWorkerPollInterval; - } - - public String getSystemTaskWorkerExecutionNamespace() { - return systemTaskWorkerExecutionNamespace; - } - - public void setSystemTaskWorkerExecutionNamespace(String systemTaskWorkerExecutionNamespace) { - this.systemTaskWorkerExecutionNamespace = systemTaskWorkerExecutionNamespace; - } - - public int getIsolatedSystemTaskWorkerThreadCount() { - return isolatedSystemTaskWorkerThreadCount; - } - - public void setIsolatedSystemTaskWorkerThreadCount(int isolatedSystemTaskWorkerThreadCount) { - this.isolatedSystemTaskWorkerThreadCount = isolatedSystemTaskWorkerThreadCount; - } - - public int getSystemTaskMaxPollCount() { - return systemTaskMaxPollCount; - } - - public void setSystemTaskMaxPollCount(int systemTaskMaxPollCount) { - this.systemTaskMaxPollCount = systemTaskMaxPollCount; - } - - public Duration getAsyncUpdateShortRunningWorkflowDuration() { - return asyncUpdateShortRunningWorkflowDuration; - } - - public void setAsyncUpdateShortRunningWorkflowDuration( - Duration asyncUpdateShortRunningWorkflowDuration) { - this.asyncUpdateShortRunningWorkflowDuration = asyncUpdateShortRunningWorkflowDuration; - } - - public Duration getAsyncUpdateDelay() { - return asyncUpdateDelay; - } - - public void setAsyncUpdateDelay(Duration asyncUpdateDelay) { - this.asyncUpdateDelay = asyncUpdateDelay; - } - - public boolean isOwnerEmailMandatory() { - return ownerEmailMandatory; - } - - public void setOwnerEmailMandatory(boolean ownerEmailMandatory) { - this.ownerEmailMandatory = ownerEmailMandatory; - } - - public int getEventQueueSchedulerPollThreadCount() { - return eventQueueSchedulerPollThreadCount; - } - - public void setEventQueueSchedulerPollThreadCount(int eventQueueSchedulerPollThreadCount) { - this.eventQueueSchedulerPollThreadCount = eventQueueSchedulerPollThreadCount; - } - - public Duration getEventQueuePollInterval() { - return eventQueuePollInterval; - } - - public void setEventQueuePollInterval(Duration eventQueuePollInterval) { - this.eventQueuePollInterval = eventQueuePollInterval; - } - - public int getEventQueuePollCount() { - return eventQueuePollCount; - } - - public void setEventQueuePollCount(int eventQueuePollCount) { - this.eventQueuePollCount = eventQueuePollCount; - } - - public Duration getEventQueueLongPollTimeout() { - return eventQueueLongPollTimeout; - } - - public void setEventQueueLongPollTimeout(Duration eventQueueLongPollTimeout) { - this.eventQueueLongPollTimeout = eventQueueLongPollTimeout; - } - - public DataSize getWorkflowInputPayloadSizeThreshold() { - return workflowInputPayloadSizeThreshold; - } - - public void setWorkflowInputPayloadSizeThreshold(DataSize workflowInputPayloadSizeThreshold) { - this.workflowInputPayloadSizeThreshold = workflowInputPayloadSizeThreshold; - } - - public DataSize getMaxWorkflowInputPayloadSizeThreshold() { - return maxWorkflowInputPayloadSizeThreshold; - } - - public void setMaxWorkflowInputPayloadSizeThreshold( - DataSize maxWorkflowInputPayloadSizeThreshold) { - this.maxWorkflowInputPayloadSizeThreshold = maxWorkflowInputPayloadSizeThreshold; - } - - public DataSize getWorkflowOutputPayloadSizeThreshold() { - return workflowOutputPayloadSizeThreshold; - } - - public void setWorkflowOutputPayloadSizeThreshold(DataSize workflowOutputPayloadSizeThreshold) { - this.workflowOutputPayloadSizeThreshold = workflowOutputPayloadSizeThreshold; - } - - public DataSize getMaxWorkflowOutputPayloadSizeThreshold() { - return maxWorkflowOutputPayloadSizeThreshold; - } - - public void setMaxWorkflowOutputPayloadSizeThreshold( - DataSize maxWorkflowOutputPayloadSizeThreshold) { - this.maxWorkflowOutputPayloadSizeThreshold = maxWorkflowOutputPayloadSizeThreshold; - } - - public DataSize getTaskInputPayloadSizeThreshold() { - return taskInputPayloadSizeThreshold; - } - - public void setTaskInputPayloadSizeThreshold(DataSize taskInputPayloadSizeThreshold) { - this.taskInputPayloadSizeThreshold = taskInputPayloadSizeThreshold; - } - - public DataSize getMaxTaskInputPayloadSizeThreshold() { - return maxTaskInputPayloadSizeThreshold; - } - - public void setMaxTaskInputPayloadSizeThreshold(DataSize maxTaskInputPayloadSizeThreshold) { - this.maxTaskInputPayloadSizeThreshold = maxTaskInputPayloadSizeThreshold; - } - - public DataSize getTaskOutputPayloadSizeThreshold() { - return taskOutputPayloadSizeThreshold; - } - - public void setTaskOutputPayloadSizeThreshold(DataSize taskOutputPayloadSizeThreshold) { - this.taskOutputPayloadSizeThreshold = taskOutputPayloadSizeThreshold; - } - - public DataSize getMaxTaskOutputPayloadSizeThreshold() { - return maxTaskOutputPayloadSizeThreshold; - } - - public void setMaxTaskOutputPayloadSizeThreshold(DataSize maxTaskOutputPayloadSizeThreshold) { - this.maxTaskOutputPayloadSizeThreshold = maxTaskOutputPayloadSizeThreshold; - } - - public DataSize getMaxWorkflowVariablesPayloadSizeThreshold() { - return maxWorkflowVariablesPayloadSizeThreshold; - } - - public void setMaxWorkflowVariablesPayloadSizeThreshold( - DataSize maxWorkflowVariablesPayloadSizeThreshold) { - this.maxWorkflowVariablesPayloadSizeThreshold = maxWorkflowVariablesPayloadSizeThreshold; - } - - /** - * @return Returns all the configurations in a map. - */ - public Map getAll() { - Map map = new HashMap<>(); - Properties props = System.getProperties(); - props.forEach((key, value) -> map.put(key.toString(), value)); - return map; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/config/SchedulerConfiguration.java b/core/src/main/java/com/netflix/conductor/core/config/SchedulerConfiguration.java deleted file mode 100644 index 364406e10..000000000 --- a/core/src/main/java/com/netflix/conductor/core/config/SchedulerConfiguration.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.config; - -import java.util.concurrent.Executor; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; - -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.scheduling.annotation.EnableAsync; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.scheduling.annotation.SchedulingConfigurer; -import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; -import org.springframework.scheduling.config.ScheduledTaskRegistrar; - -import rx.Scheduler; -import rx.schedulers.Schedulers; - -@Configuration(proxyBeanMethods = false) -@EnableScheduling -@EnableAsync -public class SchedulerConfiguration implements SchedulingConfigurer { - - public static final String SWEEPER_EXECUTOR_NAME = "WorkflowSweeperExecutor"; - - /** - * Used by some {@link com.netflix.conductor.core.events.queue.ObservableQueue} implementations. - * - * @see com.netflix.conductor.core.events.queue.ConductorObservableQueue - */ - @Bean - public Scheduler scheduler(ConductorProperties properties) { - ThreadFactory threadFactory = - new BasicThreadFactory.Builder() - .namingPattern("event-queue-poll-scheduler-thread-%d") - .build(); - Executor executorService = - Executors.newFixedThreadPool( - properties.getEventQueueSchedulerPollThreadCount(), threadFactory); - - return Schedulers.from(executorService); - } - - @Bean(SWEEPER_EXECUTOR_NAME) - public Executor sweeperExecutor(ConductorProperties properties) { - if (properties.getSweeperThreadCount() <= 0) { - throw new IllegalStateException( - "conductor.app.sweeper-thread-count must be greater than 0."); - } - ThreadFactory threadFactory = - new BasicThreadFactory.Builder().namingPattern("sweeper-thread-%d").build(); - return Executors.newFixedThreadPool(properties.getSweeperThreadCount(), threadFactory); - } - - @Override - public void configureTasks(ScheduledTaskRegistrar taskRegistrar) { - ThreadPoolTaskScheduler threadPoolTaskScheduler = new ThreadPoolTaskScheduler(); - threadPoolTaskScheduler.setPoolSize(3); // equal to the number of scheduled jobs - threadPoolTaskScheduler.setThreadNamePrefix("scheduled-task-pool-"); - threadPoolTaskScheduler.initialize(); - taskRegistrar.setTaskScheduler(threadPoolTaskScheduler); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java b/core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java deleted file mode 100644 index 5b905a285..000000000 --- a/core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java +++ /dev/null @@ -1,719 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.dal; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import javax.annotation.PreDestroy; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.dao.*; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE; - -/** - * Service that acts as a facade for accessing execution data from the {@link ExecutionDAO}, {@link - * RateLimitingDAO} and {@link IndexDAO} storage layers - */ -@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") -@Component -public class ExecutionDAOFacade { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionDAOFacade.class); - - private static final String ARCHIVED_FIELD = "archived"; - private static final String RAW_JSON_FIELD = "rawJSON"; - - private final ExecutionDAO executionDAO; - private final QueueDAO queueDAO; - private final IndexDAO indexDAO; - private final RateLimitingDAO rateLimitingDao; - private final ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO; - private final PollDataDAO pollDataDAO; - private final ObjectMapper objectMapper; - private final ConductorProperties properties; - private final ExternalPayloadStorageUtils externalPayloadStorageUtils; - - private final ScheduledThreadPoolExecutor scheduledThreadPoolExecutor; - - public ExecutionDAOFacade( - ExecutionDAO executionDAO, - QueueDAO queueDAO, - IndexDAO indexDAO, - RateLimitingDAO rateLimitingDao, - ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO, - PollDataDAO pollDataDAO, - ObjectMapper objectMapper, - ConductorProperties properties, - ExternalPayloadStorageUtils externalPayloadStorageUtils) { - this.executionDAO = executionDAO; - this.queueDAO = queueDAO; - this.indexDAO = indexDAO; - this.rateLimitingDao = rateLimitingDao; - this.concurrentExecutionLimitDAO = concurrentExecutionLimitDAO; - this.pollDataDAO = pollDataDAO; - this.objectMapper = objectMapper; - this.properties = properties; - this.externalPayloadStorageUtils = externalPayloadStorageUtils; - this.scheduledThreadPoolExecutor = - new ScheduledThreadPoolExecutor( - 4, - (runnable, executor) -> { - LOGGER.warn( - "Request {} to delay updating index dropped in executor {}", - runnable, - executor); - Monitors.recordDiscardedIndexingCount("delayQueue"); - }); - this.scheduledThreadPoolExecutor.setRemoveOnCancelPolicy(true); - } - - @PreDestroy - public void shutdownExecutorService() { - try { - LOGGER.info("Gracefully shutdown executor service"); - scheduledThreadPoolExecutor.shutdown(); - if (scheduledThreadPoolExecutor.awaitTermination( - properties.getAsyncUpdateDelay().getSeconds(), TimeUnit.SECONDS)) { - LOGGER.debug("tasks completed, shutting down"); - } else { - LOGGER.warn( - "Forcing shutdown after waiting for {} seconds", - properties.getAsyncUpdateDelay()); - scheduledThreadPoolExecutor.shutdownNow(); - } - } catch (InterruptedException ie) { - LOGGER.warn( - "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); - scheduledThreadPoolExecutor.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - public WorkflowModel getWorkflowModel(String workflowId, boolean includeTasks) { - WorkflowModel workflowModel = getWorkflowModelFromDataStore(workflowId, includeTasks); - populateWorkflowAndTaskPayloadData(workflowModel); - return workflowModel; - } - - /** - * Fetches the {@link Workflow} object from the data store given the id. Attempts to fetch from - * {@link ExecutionDAO} first, if not found, attempts to fetch from {@link IndexDAO}. - * - * @param workflowId the id of the workflow to be fetched - * @param includeTasks if true, fetches the {@link Task} data in the workflow. - * @return the {@link Workflow} object - * @throws ApplicationException if - *

    - *
  • no such {@link Workflow} is found - *
  • parsing the {@link Workflow} object fails - *
- */ - public Workflow getWorkflow(String workflowId, boolean includeTasks) { - return getWorkflowModelFromDataStore(workflowId, includeTasks).toWorkflow(); - } - - private WorkflowModel getWorkflowModelFromDataStore(String workflowId, boolean includeTasks) { - WorkflowModel workflow = executionDAO.getWorkflow(workflowId, includeTasks); - if (workflow == null) { - LOGGER.debug("Workflow {} not found in executionDAO, checking indexDAO", workflowId); - String json = indexDAO.get(workflowId, RAW_JSON_FIELD); - if (json == null) { - String errorMsg = String.format("No such workflow found by id: %s", workflowId); - LOGGER.error(errorMsg); - throw new ApplicationException(ApplicationException.Code.NOT_FOUND, errorMsg); - } - - try { - workflow = objectMapper.readValue(json, WorkflowModel.class); - if (!includeTasks) { - workflow.getTasks().clear(); - } - } catch (IOException e) { - String errorMsg = String.format("Error reading workflow: %s", workflowId); - LOGGER.error(errorMsg); - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - return workflow; - } - - /** - * Retrieve all workflow executions with the given correlationId and workflow type Uses the - * {@link IndexDAO} to search across workflows if the {@link ExecutionDAO} cannot perform - * searches across workflows. - * - * @param workflowName, workflow type to be queried - * @param correlationId the correlation id to be queried - * @param includeTasks if true, fetches the {@link Task} data within the workflows - * @return the list of {@link Workflow} executions matching the correlationId - */ - public List getWorkflowsByCorrelationId( - String workflowName, String correlationId, boolean includeTasks) { - if (!executionDAO.canSearchAcrossWorkflows()) { - String query = - "correlationId='" + correlationId + "' AND workflowType='" + workflowName + "'"; - SearchResult result = indexDAO.searchWorkflows(query, "*", 0, 1000, null); - return result.getResults().stream() - .parallel() - .map( - workflowId -> { - try { - return getWorkflow(workflowId, includeTasks); - } catch (ApplicationException e) { - // This might happen when the workflow archival failed and the - // workflow was removed from primary datastore - LOGGER.error( - "Error getting the workflow: {} for correlationId: {} from datastore/index", - workflowId, - correlationId, - e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } - return executionDAO - .getWorkflowsByCorrelationId(workflowName, correlationId, includeTasks) - .stream() - .map(WorkflowModel::toWorkflow) - .collect(Collectors.toList()); - } - - public List getWorkflowsByName(String workflowName, Long startTime, Long endTime) { - return executionDAO.getWorkflowsByType(workflowName, startTime, endTime).stream() - .map(WorkflowModel::toWorkflow) - .collect(Collectors.toList()); - } - - public List getPendingWorkflowsByName(String workflowName, int version) { - return executionDAO.getPendingWorkflowsByType(workflowName, version).stream() - .map(WorkflowModel::toWorkflow) - .collect(Collectors.toList()); - } - - public List getRunningWorkflowIds(String workflowName, int version) { - return executionDAO.getRunningWorkflowIds(workflowName, version); - } - - public long getPendingWorkflowCount(String workflowName) { - return executionDAO.getPendingWorkflowCount(workflowName); - } - - /** - * Creates a new workflow in the data store - * - * @param workflowModel the workflow to be created - * @return the id of the created workflow - */ - public String createWorkflow(WorkflowModel workflowModel) { - externalizeWorkflowData(workflowModel); - executionDAO.createWorkflow(workflowModel); - // Add to decider queue - queueDAO.push( - DECIDER_QUEUE, - workflowModel.getWorkflowId(), - workflowModel.getPriority(), - properties.getWorkflowOffsetTimeout().getSeconds()); - if (properties.isAsyncIndexingEnabled()) { - indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); - } else { - indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); - } - return workflowModel.getWorkflowId(); - } - - private void externalizeTaskData(TaskModel taskModel) { - externalPayloadStorageUtils.verifyAndUpload( - taskModel, ExternalPayloadStorage.PayloadType.TASK_INPUT); - externalPayloadStorageUtils.verifyAndUpload( - taskModel, ExternalPayloadStorage.PayloadType.TASK_OUTPUT); - } - - private void externalizeWorkflowData(WorkflowModel workflowModel) { - externalPayloadStorageUtils.verifyAndUpload( - workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT); - externalPayloadStorageUtils.verifyAndUpload( - workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT); - } - - /** - * Updates the given workflow in the data store - * - * @param workflowModel the workflow tp be updated - * @return the id of the updated workflow - */ - public String updateWorkflow(WorkflowModel workflowModel) { - workflowModel.setUpdatedTime(System.currentTimeMillis()); - if (workflowModel.getStatus().isTerminal()) { - workflowModel.setEndTime(System.currentTimeMillis()); - } - externalizeWorkflowData(workflowModel); - executionDAO.updateWorkflow(workflowModel); - if (properties.isAsyncIndexingEnabled()) { - if (workflowModel.getStatus().isTerminal() - && workflowModel.getEndTime() - workflowModel.getCreateTime() - < properties.getAsyncUpdateShortRunningWorkflowDuration().toMillis()) { - final String workflowId = workflowModel.getWorkflowId(); - DelayWorkflowUpdate delayWorkflowUpdate = new DelayWorkflowUpdate(workflowId); - LOGGER.debug( - "Delayed updating workflow: {} in the index by {} seconds", - workflowId, - properties.getAsyncUpdateDelay()); - scheduledThreadPoolExecutor.schedule( - delayWorkflowUpdate, - properties.getAsyncUpdateDelay().getSeconds(), - TimeUnit.SECONDS); - Monitors.recordWorkerQueueSize( - "delayQueue", scheduledThreadPoolExecutor.getQueue().size()); - } else { - indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); - } - if (workflowModel.getStatus().isTerminal()) { - workflowModel - .getTasks() - .forEach( - taskModel -> - indexDAO.asyncIndexTask( - new TaskSummary(taskModel.toTask()))); - } - } else { - indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); - } - return workflowModel.getWorkflowId(); - } - - public void removeFromPendingWorkflow(String workflowType, String workflowId) { - executionDAO.removeFromPendingWorkflow(workflowType, workflowId); - } - - /** - * Removes the workflow from the data store. - * - * @param workflowId the id of the workflow to be removed - * @param archiveWorkflow if true, the workflow will be archived in the {@link IndexDAO} after - * removal from {@link ExecutionDAO} - */ - public void removeWorkflow(String workflowId, boolean archiveWorkflow) { - try { - WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true); - - removeWorkflowIndex(workflow, archiveWorkflow); - // remove workflow from DAO - try { - executionDAO.removeWorkflow(workflowId); - } catch (Exception ex) { - Monitors.recordDaoError("executionDao", "removeWorkflow"); - throw ex; - } - } catch (ApplicationException ae) { - throw ae; - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, - "Error removing workflow: " + workflowId, - e); - } - try { - queueDAO.remove(DECIDER_QUEUE, workflowId); - } catch (Exception e) { - LOGGER.info("Error removing workflow: {} from decider queue", workflowId, e); - } - } - - private void removeWorkflowIndex(WorkflowModel workflow, boolean archiveWorkflow) - throws JsonProcessingException { - if (archiveWorkflow) { - if (workflow.getStatus().isTerminal()) { - // Only allow archival if workflow is in terminal state - // DO NOT archive async, since if archival errors out, workflow data will be lost - indexDAO.updateWorkflow( - workflow.getWorkflowId(), - new String[] {RAW_JSON_FIELD, ARCHIVED_FIELD}, - new Object[] {objectMapper.writeValueAsString(workflow), true}); - } else { - throw new ApplicationException( - Code.INVALID_INPUT, - String.format( - "Cannot archive workflow: %s with status: %s", - workflow.getWorkflowId(), workflow.getStatus())); - } - } else { - // Not archiving, also remove workflow from index - indexDAO.asyncRemoveWorkflow(workflow.getWorkflowId()); - } - } - - public void removeWorkflowWithExpiry( - String workflowId, boolean archiveWorkflow, int ttlSeconds) { - try { - WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true); - - removeWorkflowIndex(workflow, archiveWorkflow); - // remove workflow from DAO with TTL - try { - executionDAO.removeWorkflowWithExpiry(workflowId, ttlSeconds); - } catch (Exception ex) { - Monitors.recordDaoError("executionDao", "removeWorkflow"); - throw ex; - } - } catch (ApplicationException ae) { - throw ae; - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, - "Error removing workflow: " + workflowId, - e); - } - } - - /** - * Reset the workflow state by removing from the {@link ExecutionDAO} and removing this workflow - * from the {@link IndexDAO}. - * - * @param workflowId the workflow id to be reset - */ - public void resetWorkflow(String workflowId) { - try { - getWorkflowModelFromDataStore(workflowId, true); - executionDAO.removeWorkflow(workflowId); - if (properties.isAsyncIndexingEnabled()) { - indexDAO.asyncRemoveWorkflow(workflowId); - } else { - indexDAO.removeWorkflow(workflowId); - } - } catch (ApplicationException ae) { - throw ae; - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, - "Error resetting workflow state: " + workflowId, - e); - } - } - - public List createTasks(List tasks) { - tasks.forEach(this::externalizeTaskData); - return executionDAO.createTasks(tasks); - } - - public List getTasksForWorkflow(String workflowId) { - return executionDAO.getTasksForWorkflow(workflowId).stream() - .map(TaskModel::toTask) - .collect(Collectors.toList()); - } - - public TaskModel getTaskModel(String taskId) { - TaskModel taskModel = getTaskFromDatastore(taskId); - if (taskModel != null) { - populateTaskData(taskModel); - } - return taskModel; - } - - public Task getTask(String taskId) { - TaskModel taskModel = getTaskFromDatastore(taskId); - if (taskModel != null) { - return taskModel.toTask(); - } - return null; - } - - private TaskModel getTaskFromDatastore(String taskId) { - return executionDAO.getTask(taskId); - } - - public List getTasksByName(String taskName, String startKey, int count) { - return executionDAO.getTasks(taskName, startKey, count).stream() - .map(TaskModel::toTask) - .collect(Collectors.toList()); - } - - public List getPendingTasksForTaskType(String taskType) { - return executionDAO.getPendingTasksForTaskType(taskType).stream() - .map(TaskModel::toTask) - .collect(Collectors.toList()); - } - - public long getInProgressTaskCount(String taskDefName) { - return executionDAO.getInProgressTaskCount(taskDefName); - } - - /** - * Sets the update time for the task. Sets the end time for the task (if task is in terminal - * state and end time is not set). Updates the task in the {@link ExecutionDAO} first, then - * stores it in the {@link IndexDAO}. - * - * @param taskModel the task to be updated in the data store - * @throws ApplicationException if the dao operations fail - */ - public void updateTask(TaskModel taskModel) { - try { - if (taskModel.getStatus() != null) { - if (!taskModel.getStatus().isTerminal() - || (taskModel.getStatus().isTerminal() && taskModel.getUpdateTime() == 0)) { - taskModel.setUpdateTime(System.currentTimeMillis()); - } - if (taskModel.getStatus().isTerminal() && taskModel.getEndTime() == 0) { - taskModel.setEndTime(System.currentTimeMillis()); - } - } - externalizeTaskData(taskModel); - executionDAO.updateTask(taskModel); - /* - * Indexing a task for every update adds a lot of volume. That is ok but if async indexing - * is enabled and tasks are stored in memory until a block has completed, we would lose a lot - * of tasks on a system failure. So only index for each update if async indexing is not enabled. - * If it *is* enabled, tasks will be indexed only when a workflow is in terminal state. - */ - if (!properties.isAsyncIndexingEnabled()) { - indexDAO.indexTask(new TaskSummary(taskModel.toTask())); - } - } catch (Exception e) { - String errorMsg = - String.format( - "Error updating task: %s in workflow: %s", - taskModel.getTaskId(), taskModel.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - - public void updateTasks(List tasks) { - tasks.forEach(this::updateTask); - } - - public void removeTask(String taskId) { - executionDAO.removeTask(taskId); - } - - public List getTaskPollData(String taskName) { - return pollDataDAO.getPollData(taskName); - } - - public List getAllPollData() { - return pollDataDAO.getAllPollData(); - } - - public PollData getTaskPollDataByDomain(String taskName, String domain) { - try { - return pollDataDAO.getPollData(taskName, domain); - } catch (Exception e) { - LOGGER.error( - "Error fetching pollData for task: '{}', domain: '{}'", taskName, domain, e); - return null; - } - } - - public void updateTaskLastPoll(String taskName, String domain, String workerId) { - try { - pollDataDAO.updateLastPollData(taskName, domain, workerId); - } catch (Exception e) { - LOGGER.error( - "Error updating PollData for task: {} in domain: {} from worker: {}", - taskName, - domain, - workerId, - e); - Monitors.error(this.getClass().getCanonicalName(), "updateTaskLastPoll"); - } - } - - /** - * Save the {@link EventExecution} to the data store Saves to {@link ExecutionDAO} first, if - * this succeeds then saves to the {@link IndexDAO}. - * - * @param eventExecution the {@link EventExecution} to be saved - * @return true if save succeeds, false otherwise. - */ - public boolean addEventExecution(EventExecution eventExecution) { - boolean added = executionDAO.addEventExecution(eventExecution); - - if (added) { - indexEventExecution(eventExecution); - } - - return added; - } - - public void updateEventExecution(EventExecution eventExecution) { - executionDAO.updateEventExecution(eventExecution); - indexEventExecution(eventExecution); - } - - private void indexEventExecution(EventExecution eventExecution) { - if (properties.isEventExecutionIndexingEnabled()) { - if (properties.isAsyncIndexingEnabled()) { - indexDAO.asyncAddEventExecution(eventExecution); - } else { - indexDAO.addEventExecution(eventExecution); - } - } - } - - public void removeEventExecution(EventExecution eventExecution) { - executionDAO.removeEventExecution(eventExecution); - } - - public boolean exceedsInProgressLimit(TaskModel task) { - return concurrentExecutionLimitDAO.exceedsLimit(task); - } - - public boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef) { - return rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef); - } - - public void addTaskExecLog(List logs) { - if (properties.isTaskExecLogIndexingEnabled()) { - if (properties.isAsyncIndexingEnabled()) { - indexDAO.asyncAddTaskExecutionLogs(logs); - } else { - indexDAO.addTaskExecutionLogs(logs); - } - } - } - - public void addMessage(String queue, Message message) { - if (properties.isAsyncIndexingEnabled()) { - indexDAO.asyncAddMessage(queue, message); - } else { - indexDAO.addMessage(queue, message); - } - } - - public SearchResult searchWorkflows( - String query, String freeText, int start, int count, List sort) { - return indexDAO.searchWorkflows(query, freeText, start, count, sort); - } - - public SearchResult searchTasks( - String query, String freeText, int start, int count, List sort) { - return indexDAO.searchTasks(query, freeText, start, count, sort); - } - - public List getTaskExecutionLogs(String taskId) { - return properties.isTaskExecLogIndexingEnabled() - ? indexDAO.getTaskExecutionLogs(taskId) - : Collections.emptyList(); - } - - /** - * Populates the workflow input data and the tasks input/output data if stored in external - * payload storage. - * - * @param workflowModel the workflowModel for which the payload data needs to be populated from - * external storage (if applicable) - */ - private void populateWorkflowAndTaskPayloadData(WorkflowModel workflowModel) { - if (StringUtils.isNotBlank(workflowModel.getExternalInputPayloadStoragePath())) { - Map workflowInputParams = - externalPayloadStorageUtils.downloadPayload( - workflowModel.getExternalInputPayloadStoragePath()); - Monitors.recordExternalPayloadStorageUsage( - workflowModel.getWorkflowName(), - ExternalPayloadStorage.Operation.READ.toString(), - ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.toString()); - workflowModel.internalizeInput(workflowInputParams); - } - - if (StringUtils.isNotBlank(workflowModel.getExternalOutputPayloadStoragePath())) { - Map workflowOutputParams = - externalPayloadStorageUtils.downloadPayload( - workflowModel.getExternalOutputPayloadStoragePath()); - Monitors.recordExternalPayloadStorageUsage( - workflowModel.getWorkflowName(), - ExternalPayloadStorage.Operation.READ.toString(), - ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.toString()); - workflowModel.internalizeOutput(workflowOutputParams); - } - - workflowModel.getTasks().forEach(this::populateTaskData); - } - - private void populateTaskData(TaskModel taskModel) { - if (StringUtils.isNotBlank(taskModel.getExternalOutputPayloadStoragePath())) { - Map outputData = - externalPayloadStorageUtils.downloadPayload( - taskModel.getExternalOutputPayloadStoragePath()); - taskModel.internalizeOutput(outputData); - Monitors.recordExternalPayloadStorageUsage( - taskModel.getTaskDefName(), - ExternalPayloadStorage.Operation.READ.toString(), - ExternalPayloadStorage.PayloadType.TASK_OUTPUT.toString()); - } - - if (StringUtils.isNotBlank(taskModel.getExternalInputPayloadStoragePath())) { - Map inputData = - externalPayloadStorageUtils.downloadPayload( - taskModel.getExternalInputPayloadStoragePath()); - taskModel.internalizeInput(inputData); - Monitors.recordExternalPayloadStorageUsage( - taskModel.getTaskDefName(), - ExternalPayloadStorage.Operation.READ.toString(), - ExternalPayloadStorage.PayloadType.TASK_INPUT.toString()); - } - } - - class DelayWorkflowUpdate implements Runnable { - - private final String workflowId; - - DelayWorkflowUpdate(String workflowId) { - this.workflowId = workflowId; - } - - @Override - public void run() { - try { - WorkflowModel workflowModel = executionDAO.getWorkflow(workflowId, false); - indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); - } catch (Exception e) { - LOGGER.error("Unable to update workflow: {}", workflowId, e); - } - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java deleted file mode 100644 index 6b8139652..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.Map; - -import com.netflix.conductor.common.metadata.events.EventHandler; - -public interface ActionProcessor { - - Map execute( - EventHandler.Action action, Object payloadObject, String event, String messageId); -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/DefaultEventProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/DefaultEventProcessor.java deleted file mode 100644 index 19767c478..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/DefaultEventProcessor.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventExecution.Status; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.events.EventHandler.Action; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.utils.JsonUtils; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.MetadataService; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.spotify.futures.CompletableFutures; - -import static com.netflix.conductor.core.utils.Utils.isTransientException; - -/** - * Event Processor is used to dispatch actions configured in the event handlers, based on incoming - * events to the event queues. - * - *

Set conductor.default-event-processor.enabled=false to disable event processing. - */ -@Component -@ConditionalOnProperty( - name = "conductor.default-event-processor.enabled", - havingValue = "true", - matchIfMissing = true) -public class DefaultEventProcessor { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventProcessor.class); - - private final MetadataService metadataService; - private final ExecutionService executionService; - private final ActionProcessor actionProcessor; - - private final ExecutorService eventActionExecutorService; - private final ObjectMapper objectMapper; - private final JsonUtils jsonUtils; - private final boolean isEventMessageIndexingEnabled; - private final Map evaluators; - private final RetryTemplate retryTemplate; - - public DefaultEventProcessor( - ExecutionService executionService, - MetadataService metadataService, - ActionProcessor actionProcessor, - JsonUtils jsonUtils, - ConductorProperties properties, - ObjectMapper objectMapper, - Map evaluators, - @Qualifier("onTransientErrorRetryTemplate") RetryTemplate retryTemplate) { - this.executionService = executionService; - this.metadataService = metadataService; - this.actionProcessor = actionProcessor; - this.objectMapper = objectMapper; - this.jsonUtils = jsonUtils; - this.evaluators = evaluators; - - if (properties.getEventProcessorThreadCount() <= 0) { - throw new IllegalStateException( - "Cannot set event processor thread count to <=0. To disable event " - + "processing, set conductor.default-event-processor.enabled=false."); - } - ThreadFactory threadFactory = - new BasicThreadFactory.Builder() - .namingPattern("event-action-executor-thread-%d") - .build(); - eventActionExecutorService = - Executors.newFixedThreadPool( - properties.getEventProcessorThreadCount(), threadFactory); - - this.isEventMessageIndexingEnabled = properties.isEventMessageIndexingEnabled(); - this.retryTemplate = retryTemplate; - LOGGER.info("Event Processing is ENABLED"); - } - - public void handle(ObservableQueue queue, Message msg) { - List transientFailures = null; - Boolean executionFailed = false; - try { - if (isEventMessageIndexingEnabled) { - executionService.addMessage(queue.getName(), msg); - } - String event = queue.getType() + ":" + queue.getName(); - LOGGER.debug("Evaluating message: {} for event: {}", msg.getId(), event); - transientFailures = executeEvent(event, msg); - } catch (Exception e) { - executionFailed = true; - LOGGER.error("Error handling message: {} on queue:{}", msg, queue.getName(), e); - Monitors.recordEventQueueMessagesError(queue.getType(), queue.getName()); - } finally { - if (executionFailed || CollectionUtils.isEmpty(transientFailures)) { - queue.ack(Collections.singletonList(msg)); - LOGGER.debug("Message: {} acked on queue: {}", msg.getId(), queue.getName()); - } else if (queue.rePublishIfNoAck()) { - // re-submit this message to the queue, to be retried later - // This is needed for queues with no unack timeout, since messages are removed - // from the queue - queue.publish(Collections.singletonList(msg)); - LOGGER.debug("Message: {} published to queue: {}", msg.getId(), queue.getName()); - } - Monitors.recordEventQueueMessagesHandled(queue.getType(), queue.getName()); - } - } - - /** - * Executes all the actions configured on all the event handlers triggered by the {@link - * Message} on the queue If any of the actions on an event handler fails due to a transient - * failure, the execution is not persisted such that it can be retried - * - * @return a list of {@link EventExecution} that failed due to transient failures. - */ - protected List executeEvent(String event, Message msg) throws Exception { - List eventHandlerList = metadataService.getEventHandlersForEvent(event, true); - Object payloadObject = getPayloadObject(msg.getPayload()); - - List transientFailures = new ArrayList<>(); - for (EventHandler eventHandler : eventHandlerList) { - String condition = eventHandler.getCondition(); - String evaluatorType = eventHandler.getEvaluatorType(); - // Set default to true so that if condition is not specified, it falls through - // to process the event. - Boolean success = true; - if (StringUtils.isNotEmpty(condition) && evaluators.get(evaluatorType) != null) { - Object result = - evaluators - .get(evaluatorType) - .evaluate(condition, jsonUtils.expand(payloadObject)); - success = ScriptEvaluator.toBoolean(result); - } else if (StringUtils.isNotEmpty(condition)) { - LOGGER.debug("Checking condition: {} for event: {}", condition, event); - success = ScriptEvaluator.evalBool(condition, jsonUtils.expand(payloadObject)); - } - - if (!success) { - String id = msg.getId() + "_" + 0; - EventExecution eventExecution = new EventExecution(id, msg.getId()); - eventExecution.setCreated(System.currentTimeMillis()); - eventExecution.setEvent(eventHandler.getEvent()); - eventExecution.setName(eventHandler.getName()); - eventExecution.setStatus(Status.SKIPPED); - eventExecution.getOutput().put("msg", msg.getPayload()); - eventExecution.getOutput().put("condition", condition); - executionService.addEventExecution(eventExecution); - LOGGER.debug( - "Condition: {} not successful for event: {} with payload: {}", - condition, - eventHandler.getEvent(), - msg.getPayload()); - continue; - } - - CompletableFuture> future = - executeActionsForEventHandler(eventHandler, msg); - future.whenComplete( - (result, error) -> - result.forEach( - eventExecution -> { - if (error != null - || eventExecution.getStatus() - == Status.IN_PROGRESS) { - transientFailures.add(eventExecution); - } else { - executionService.updateEventExecution( - eventExecution); - } - })) - .get(); - } - return processTransientFailures(transientFailures); - } - - /** - * Remove the event executions which failed temporarily. - * - * @param eventExecutions The event executions which failed with a transient error. - * @return The event executions which failed with a transient error. - */ - protected List processTransientFailures(List eventExecutions) { - eventExecutions.forEach(executionService::removeEventExecution); - return eventExecutions; - } - - /** - * @param eventHandler the {@link EventHandler} for which the actions are to be executed - * @param msg the {@link Message} that triggered the event - * @return a {@link CompletableFuture} holding a list of {@link EventExecution}s for the {@link - * Action}s executed in the event handler - */ - protected CompletableFuture> executeActionsForEventHandler( - EventHandler eventHandler, Message msg) { - List> futuresList = new ArrayList<>(); - int i = 0; - for (Action action : eventHandler.getActions()) { - String id = msg.getId() + "_" + i++; - EventExecution eventExecution = new EventExecution(id, msg.getId()); - eventExecution.setCreated(System.currentTimeMillis()); - eventExecution.setEvent(eventHandler.getEvent()); - eventExecution.setName(eventHandler.getName()); - eventExecution.setAction(action.getAction()); - eventExecution.setStatus(Status.IN_PROGRESS); - if (executionService.addEventExecution(eventExecution)) { - futuresList.add( - CompletableFuture.supplyAsync( - () -> - execute( - eventExecution, - action, - getPayloadObject(msg.getPayload())), - eventActionExecutorService)); - } else { - LOGGER.warn("Duplicate delivery/execution of message: {}", msg.getId()); - } - } - return CompletableFutures.allAsList(futuresList); - } - - /** - * @param eventExecution the instance of {@link EventExecution} - * @param action the {@link Action} to be executed for the event - * @param payload the {@link Message#getPayload()} - * @return the event execution updated with execution output, if the execution is - * completed/failed with non-transient error the input event execution, if the execution - * failed due to transient error - */ - protected EventExecution execute(EventExecution eventExecution, Action action, Object payload) { - try { - LOGGER.debug( - "Executing action: {} for event: {} with messageId: {} with payload: {}", - action.getAction(), - eventExecution.getId(), - eventExecution.getMessageId(), - payload); - - Map output = - retryTemplate.execute( - context -> - actionProcessor.execute( - action, - payload, - eventExecution.getEvent(), - eventExecution.getMessageId())); - if (output != null) { - eventExecution.getOutput().putAll(output); - } - eventExecution.setStatus(Status.COMPLETED); - Monitors.recordEventExecutionSuccess( - eventExecution.getEvent(), - eventExecution.getName(), - eventExecution.getAction().name()); - } catch (RuntimeException e) { - LOGGER.error( - "Error executing action: {} for event: {} with messageId: {}", - action.getAction(), - eventExecution.getEvent(), - eventExecution.getMessageId(), - e); - if (!isTransientException(e)) { - // not a transient error, fail the event execution - eventExecution.setStatus(Status.FAILED); - eventExecution.getOutput().put("exception", e.getMessage()); - Monitors.recordEventExecutionError( - eventExecution.getEvent(), - eventExecution.getName(), - eventExecution.getAction().name(), - e.getClass().getSimpleName()); - } - } - return eventExecution; - } - - private Object getPayloadObject(String payload) { - Object payloadObject = null; - if (payload != null) { - try { - payloadObject = objectMapper.readValue(payload, Object.class); - } catch (Exception e) { - payloadObject = payload; - } - } - return payloadObject; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/DefaultEventQueueManager.java b/core/src/main/java/com/netflix/conductor/core/events/DefaultEventQueueManager.java deleted file mode 100644 index 2b5abac22..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/DefaultEventQueueManager.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.Lifecycle; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.core.LifecycleAwareComponent; -import com.netflix.conductor.core.events.queue.DefaultEventQueueProcessor; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel.Status; - -/** - * Manages the event queues registered in the system and sets up listeners for these. - * - *

Manages the lifecycle of - - * - *

    - *
  • Queues registered with event handlers - *
  • Default event queues that Conductor listens on - *
- * - * @see DefaultEventQueueProcessor - */ -@Component -@ConditionalOnProperty( - name = "conductor.default-event-processor.enabled", - havingValue = "true", - matchIfMissing = true) -public class DefaultEventQueueManager extends LifecycleAwareComponent implements EventQueueManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventQueueManager.class); - - private final EventHandlerDAO eventHandlerDAO; - private final EventQueues eventQueues; - private final DefaultEventProcessor defaultEventProcessor; - private final Map eventToQueueMap = new ConcurrentHashMap<>(); - private final Map defaultQueues; - - public DefaultEventQueueManager( - Map defaultQueues, - EventHandlerDAO eventHandlerDAO, - EventQueues eventQueues, - DefaultEventProcessor defaultEventProcessor) { - this.defaultQueues = defaultQueues; - this.eventHandlerDAO = eventHandlerDAO; - this.eventQueues = eventQueues; - this.defaultEventProcessor = defaultEventProcessor; - } - - /** - * @return Returns a map of queues which are active. Key is event name and value is queue URI - */ - @Override - public Map getQueues() { - Map queues = new HashMap<>(); - eventToQueueMap.forEach((key, value) -> queues.put(key, value.getName())); - return queues; - } - - @Override - public Map> getQueueSizes() { - Map> queues = new HashMap<>(); - eventToQueueMap.forEach( - (key, value) -> { - Map size = new HashMap<>(); - size.put(value.getName(), value.size()); - queues.put(key, size); - }); - return queues; - } - - @Override - public void doStart() { - eventToQueueMap.forEach( - (event, queue) -> { - LOGGER.info("Start listening for events: {}", event); - queue.start(); - }); - defaultQueues.forEach( - (status, queue) -> { - LOGGER.info( - "Start listening on default queue {} for status {}", - queue.getName(), - status); - queue.start(); - }); - } - - @Override - public void doStop() { - eventToQueueMap.forEach( - (event, queue) -> { - LOGGER.info("Stop listening for events: {}", event); - queue.stop(); - }); - defaultQueues.forEach( - (status, queue) -> { - LOGGER.info( - "Stop listening on default queue {} for status {}", - status, - queue.getName()); - queue.stop(); - }); - } - - @Scheduled(fixedDelay = 60_000) - public void refreshEventQueues() { - try { - Set events = - eventHandlerDAO.getAllEventHandlers().stream() - .map(EventHandler::getEvent) - .collect(Collectors.toSet()); - - List createdQueues = new LinkedList<>(); - events.forEach( - event -> - eventToQueueMap.computeIfAbsent( - event, - s -> { - ObservableQueue q = eventQueues.getQueue(event); - createdQueues.add(q); - return q; - })); - - // start listening on all of the created queues - createdQueues.stream() - .filter(Objects::nonNull) - .peek(Lifecycle::start) - .forEach(this::listen); - - } catch (Exception e) { - Monitors.error(getClass().getSimpleName(), "refresh"); - LOGGER.error("refresh event queues failed", e); - } - } - - private void listen(ObservableQueue queue) { - queue.observe().subscribe((Message msg) -> defaultEventProcessor.handle(queue, msg)); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/EventQueueManager.java b/core/src/main/java/com/netflix/conductor/core/events/EventQueueManager.java deleted file mode 100644 index fc6a568f6..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/EventQueueManager.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.Map; - -public interface EventQueueManager { - - Map getQueues(); - - Map> getQueueSizes(); -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/EventQueueProvider.java b/core/src/main/java/com/netflix/conductor/core/events/EventQueueProvider.java deleted file mode 100644 index 8bd11f929..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/EventQueueProvider.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import org.springframework.lang.NonNull; - -import com.netflix.conductor.core.events.queue.ObservableQueue; - -public interface EventQueueProvider { - - String getQueueType(); - - /** - * Creates or reads the {@link ObservableQueue} for the given queueURI. - * - * @param queueURI The URI of the queue. - * @return The {@link ObservableQueue} implementation for the queueURI. - * @throws IllegalArgumentException thrown when an {@link ObservableQueue} can not be created - * for the queueURI. - */ - @NonNull - ObservableQueue getQueue(String queueURI) throws IllegalArgumentException; -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java b/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java deleted file mode 100644 index b4ab2e388..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.lang.NonNull; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.utils.ParametersUtils; - -/** Holders for internal event queues */ -@Component -public class EventQueues { - - public static final String EVENT_QUEUE_PROVIDERS_QUALIFIER = "EventQueueProviders"; - - private static final Logger LOGGER = LoggerFactory.getLogger(EventQueues.class); - - private final ParametersUtils parametersUtils; - private final Map providers; - - @Autowired - public EventQueues( - @Qualifier(EVENT_QUEUE_PROVIDERS_QUALIFIER) Map providers, - ParametersUtils parametersUtils) { - this.providers = providers; - this.parametersUtils = parametersUtils; - } - - public List getProviders() { - return providers.values().stream() - .map(p -> p.getClass().getName()) - .collect(Collectors.toList()); - } - - @NonNull - public ObservableQueue getQueue(String eventType) { - String event = parametersUtils.replace(eventType).toString(); - int index = event.indexOf(':'); - if (index == -1) { - throw new IllegalArgumentException("Illegal event " + event); - } - - String type = event.substring(0, index); - String queueURI = event.substring(index + 1); - EventQueueProvider provider = providers.get(type); - if (provider != null) { - return provider.getQueue(queueURI); - } else { - throw new IllegalArgumentException("Unknown queue type " + type); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/ScriptEvaluator.java b/core/src/main/java/com/netflix/conductor/core/events/ScriptEvaluator.java deleted file mode 100644 index 506ded221..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/ScriptEvaluator.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import javax.script.Bindings; -import javax.script.ScriptEngine; -import javax.script.ScriptEngineManager; -import javax.script.ScriptException; - -public class ScriptEvaluator { - - private static final ScriptEngine engine = new ScriptEngineManager().getEngineByName("nashorn"); - - private ScriptEvaluator() {} - - /** - * Evaluates the script with the help of input provided but converts the result to a boolean - * value. - * - * @param script Script to be evaluated. - * @param input Input parameters. - * @throws ScriptException - * @return True or False based on the result of the evaluated expression. - */ - public static Boolean evalBool(String script, Object input) throws ScriptException { - return toBoolean(eval(script, input)); - } - - /** - * Evaluates the script with the help of input provided. - * - * @param script Script to be evaluated. - * @param input Input parameters. - * @throws ScriptException - * @return Generic object, the result of the evaluated expression. - */ - public static Object eval(String script, Object input) throws ScriptException { - Bindings bindings = engine.createBindings(); - bindings.put("$", input); - return engine.eval(script, bindings); - } - - /** - * Converts a generic object into boolean value. Checks if the Object is of type Boolean and - * returns the value of the Boolean object. Checks if the Object is of type Number and returns - * True if the value is greater than 0. - * - * @param input Generic object that will be inspected to return a boolean value. - * @return True or False based on the input provided. - */ - public static Boolean toBoolean(Object input) { - if (input instanceof Boolean) { - return ((Boolean) input); - } else if (input instanceof Number) { - return ((Number) input).doubleValue() > 0; - } - return false; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/SimpleActionProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/SimpleActionProcessor.java deleted file mode 100644 index 21e8ac279..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/SimpleActionProcessor.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.events.EventHandler.Action; -import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; -import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.utils.JsonUtils; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * Action Processor subscribes to the Event Actions queue and processes the actions (e.g. start - * workflow etc) - */ -@Component -public class SimpleActionProcessor implements ActionProcessor { - - private static final Logger LOGGER = LoggerFactory.getLogger(SimpleActionProcessor.class); - - private final WorkflowExecutor workflowExecutor; - private final ParametersUtils parametersUtils; - private final JsonUtils jsonUtils; - - public SimpleActionProcessor( - WorkflowExecutor workflowExecutor, - ParametersUtils parametersUtils, - JsonUtils jsonUtils) { - this.workflowExecutor = workflowExecutor; - this.parametersUtils = parametersUtils; - this.jsonUtils = jsonUtils; - } - - public Map execute( - Action action, Object payloadObject, String event, String messageId) { - - LOGGER.debug( - "Executing action: {} for event: {} with messageId:{}", - action.getAction(), - event, - messageId); - - Object jsonObject = payloadObject; - if (action.isExpandInlineJSON()) { - jsonObject = jsonUtils.expand(payloadObject); - } - - switch (action.getAction()) { - case start_workflow: - return startWorkflow(action, jsonObject, event, messageId); - case complete_task: - return completeTask( - action, - jsonObject, - action.getComplete_task(), - TaskModel.Status.COMPLETED, - event, - messageId); - case fail_task: - return completeTask( - action, - jsonObject, - action.getFail_task(), - TaskModel.Status.FAILED, - event, - messageId); - default: - break; - } - throw new UnsupportedOperationException( - "Action not supported " + action.getAction() + " for event " + event); - } - - private Map completeTask( - Action action, - Object payload, - TaskDetails taskDetails, - TaskModel.Status status, - String event, - String messageId) { - - Map input = new HashMap<>(); - input.put("workflowId", taskDetails.getWorkflowId()); - input.put("taskId", taskDetails.getTaskId()); - input.put("taskRefName", taskDetails.getTaskRefName()); - input.putAll(taskDetails.getOutput()); - - Map replaced = parametersUtils.replace(input, payload); - String workflowId = (String) replaced.get("workflowId"); - String taskId = (String) replaced.get("taskId"); - String taskRefName = (String) replaced.get("taskRefName"); - - TaskModel taskModel = null; - if (StringUtils.isNotEmpty(taskId)) { - taskModel = workflowExecutor.getTask(taskId); - } else if (StringUtils.isNotEmpty(workflowId) && StringUtils.isNotEmpty(taskRefName)) { - WorkflowModel workflow = workflowExecutor.getWorkflow(workflowId, true); - if (workflow == null) { - replaced.put("error", "No workflow found with ID: " + workflowId); - return replaced; - } - taskModel = workflow.getTaskByRefName(taskRefName); - // Task can be loopover task.In such case find corresponding task and update - List loopOverTaskList = - workflow.getTasks().stream() - .filter( - t -> - TaskUtils.removeIterationFromTaskRefName( - t.getReferenceTaskName()) - .equals(taskRefName)) - .collect(Collectors.toList()); - if (!loopOverTaskList.isEmpty()) { - // Find loopover task with the highest iteration value - taskModel = - loopOverTaskList.stream() - .sorted(Comparator.comparingInt(TaskModel::getIteration).reversed()) - .findFirst() - .get(); - } - } - - if (taskModel == null) { - replaced.put( - "error", - "No task found with taskId: " - + taskId - + ", reference name: " - + taskRefName - + ", workflowId: " - + workflowId); - return replaced; - } - - taskModel.setStatus(status); - taskModel.setOutputData(replaced); - taskModel.setOutputMessage(taskDetails.getOutputMessage()); - taskModel.getOutputData().put("conductor.event.messageId", messageId); - taskModel.getOutputData().put("conductor.event.name", event); - - try { - workflowExecutor.updateTask(new TaskResult(taskModel.toTask())); - LOGGER.debug( - "Updated task: {} in workflow:{} with status: {} for event: {} for message:{}", - taskId, - workflowId, - status, - event, - messageId); - } catch (RuntimeException e) { - Monitors.recordEventActionError( - action.getAction().name(), taskModel.getTaskType(), event); - LOGGER.error( - "Error updating task: {} in workflow: {} in action: {} for event: {} for message: {}", - taskDetails.getTaskRefName(), - taskDetails.getWorkflowId(), - action.getAction(), - event, - messageId, - e); - replaced.put("error", e.getMessage()); - throw e; - } - return replaced; - } - - private Map startWorkflow( - Action action, Object payload, String event, String messageId) { - StartWorkflow params = action.getStart_workflow(); - Map output = new HashMap<>(); - try { - Map inputParams = params.getInput(); - Map workflowInput = parametersUtils.replace(inputParams, payload); - - Map paramsMap = new HashMap<>(); - Optional.ofNullable(params.getCorrelationId()) - .ifPresent(value -> paramsMap.put("correlationId", value)); - Map replaced = parametersUtils.replace(paramsMap, payload); - - workflowInput.put("conductor.event.messageId", messageId); - workflowInput.put("conductor.event.name", event); - - String workflowId = - workflowExecutor.startWorkflow( - params.getName(), - params.getVersion(), - Optional.ofNullable(replaced.get("correlationId")) - .map(Object::toString) - .orElse(params.getCorrelationId()), - workflowInput, - null, - event, - params.getTaskToDomain()); - output.put("workflowId", workflowId); - LOGGER.debug( - "Started workflow: {}/{}/{} for event: {} for message:{}", - params.getName(), - params.getVersion(), - workflowId, - event, - messageId); - - } catch (RuntimeException e) { - Monitors.recordEventActionError(action.getAction().name(), params.getName(), event); - LOGGER.error( - "Error starting workflow: {}, version: {}, for event: {} for message: {}", - params.getName(), - params.getVersion(), - event, - messageId, - e); - output.put("error", e.getMessage()); - throw e; - } - return output; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorEventQueueProvider.java b/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorEventQueueProvider.java deleted file mode 100644 index 76e530101..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorEventQueueProvider.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events.queue; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.lang.NonNull; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.EventQueueProvider; -import com.netflix.conductor.dao.QueueDAO; - -import rx.Scheduler; - -/** - * Default provider for {@link com.netflix.conductor.core.events.queue.ObservableQueue} that listens - * on the conductor queue prefix. - * - *

Set conductor.event-queues.default.enabled=false to disable the default queue. - * - * @see ConductorObservableQueue - */ -@Component -@ConditionalOnProperty( - name = "conductor.event-queues.default.enabled", - havingValue = "true", - matchIfMissing = true) -public class ConductorEventQueueProvider implements EventQueueProvider { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConductorEventQueueProvider.class); - private final Map queues = new ConcurrentHashMap<>(); - private final QueueDAO queueDAO; - private final ConductorProperties properties; - private final Scheduler scheduler; - - public ConductorEventQueueProvider( - QueueDAO queueDAO, ConductorProperties properties, Scheduler scheduler) { - this.queueDAO = queueDAO; - this.properties = properties; - this.scheduler = scheduler; - } - - @Override - public String getQueueType() { - return "conductor"; - } - - @Override - @NonNull - public ObservableQueue getQueue(String queueURI) { - return queues.computeIfAbsent( - queueURI, - q -> new ConductorObservableQueue(queueURI, queueDAO, properties, scheduler)); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorObservableQueue.java b/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorObservableQueue.java deleted file mode 100644 index 649cc7b50..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorObservableQueue.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events.queue; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; - -import rx.Observable; -import rx.Observable.OnSubscribe; -import rx.Scheduler; - -/** - * An {@link ObservableQueue} implementation using the underlying {@link QueueDAO} implementation. - */ -public class ConductorObservableQueue implements ObservableQueue { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConductorObservableQueue.class); - - private static final String QUEUE_TYPE = "conductor"; - - private final String queueName; - private final QueueDAO queueDAO; - private final long pollTimeMS; - private final int longPollTimeout; - private final int pollCount; - private final Scheduler scheduler; - private volatile boolean running; - - ConductorObservableQueue( - String queueName, - QueueDAO queueDAO, - ConductorProperties properties, - Scheduler scheduler) { - this.queueName = queueName; - this.queueDAO = queueDAO; - this.pollTimeMS = properties.getEventQueuePollInterval().toMillis(); - this.pollCount = properties.getEventQueuePollCount(); - this.longPollTimeout = (int) properties.getEventQueueLongPollTimeout().toMillis(); - this.scheduler = scheduler; - } - - @Override - public Observable observe() { - OnSubscribe subscriber = getOnSubscribe(); - return Observable.create(subscriber); - } - - @Override - public List ack(List messages) { - for (Message msg : messages) { - queueDAO.ack(queueName, msg.getId()); - } - return messages.stream().map(Message::getId).collect(Collectors.toList()); - } - - public void setUnackTimeout(Message message, long unackTimeout) { - queueDAO.setUnackTimeout(queueName, message.getId(), unackTimeout); - } - - @Override - public void publish(List messages) { - queueDAO.push(queueName, messages); - } - - @Override - public long size() { - return queueDAO.getSize(queueName); - } - - @Override - public String getType() { - return QUEUE_TYPE; - } - - @Override - public String getName() { - return queueName; - } - - @Override - public String getURI() { - return queueName; - } - - private List receiveMessages() { - try { - List messages = queueDAO.pollMessages(queueName, pollCount, longPollTimeout); - Monitors.recordEventQueueMessagesProcessed(QUEUE_TYPE, queueName, messages.size()); - Monitors.recordEventQueuePollSize(queueName, messages.size()); - return messages; - } catch (Exception exception) { - LOGGER.error("Exception while getting messages from queueDAO", exception); - Monitors.recordObservableQMessageReceivedErrors(QUEUE_TYPE); - } - return new ArrayList<>(); - } - - private OnSubscribe getOnSubscribe() { - return subscriber -> { - Observable interval = - Observable.interval(pollTimeMS, TimeUnit.MILLISECONDS, scheduler); - interval.flatMap( - (Long x) -> { - if (!isRunning()) { - LOGGER.debug( - "Component stopped, skip listening for messages from Conductor Queue"); - return Observable.from(Collections.emptyList()); - } - List messages = receiveMessages(); - return Observable.from(messages); - }) - .subscribe(subscriber::onNext, subscriber::onError); - }; - } - - @Override - public void start() { - LOGGER.info("Started listening to {}:{}", getClass().getSimpleName(), queueName); - running = true; - } - - @Override - public void stop() { - LOGGER.info("Stopped listening to {}:{}", getClass().getSimpleName(), queueName); - running = false; - } - - @Override - public boolean isRunning() { - return running; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/queue/DefaultEventQueueProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/queue/DefaultEventQueueProcessor.java deleted file mode 100644 index 835d18c3a..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/queue/DefaultEventQueueProcessor.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events.queue; - -import java.util.*; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.TaskModel.Status; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; - -/** - * Monitors and processes messages on the default event queues that Conductor listens on. - * - *

The default event queue type is controlled using the property: - * conductor.default-event-queue.type - */ -@Component -@ConditionalOnProperty( - name = "conductor.default-event-queue-processor.enabled", - havingValue = "true", - matchIfMissing = true) -public class DefaultEventQueueProcessor { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventQueueProcessor.class); - private final Map queues; - private final WorkflowExecutor workflowExecutor; - private static final TypeReference> _mapType = new TypeReference<>() {}; - private final ObjectMapper objectMapper; - - public DefaultEventQueueProcessor( - Map queues, - WorkflowExecutor workflowExecutor, - ObjectMapper objectMapper) { - this.queues = queues; - this.workflowExecutor = workflowExecutor; - this.objectMapper = objectMapper; - queues.forEach(this::startMonitor); - LOGGER.info( - "DefaultEventQueueProcessor initialized with {} queues", queues.entrySet().size()); - } - - private void startMonitor(Status status, ObservableQueue queue) { - - queue.observe() - .subscribe( - (Message msg) -> { - try { - LOGGER.debug("Got message {}", msg.getPayload()); - String payload = msg.getPayload(); - JsonNode payloadJSON = objectMapper.readTree(payload); - String externalId = getValue("externalId", payloadJSON); - if (externalId == null || "".equals(externalId)) { - LOGGER.error("No external Id found in the payload {}", payload); - queue.ack(Collections.singletonList(msg)); - return; - } - - JsonNode json = objectMapper.readTree(externalId); - String workflowId = getValue("workflowId", json); - String taskRefName = getValue("taskRefName", json); - String taskId = getValue("taskId", json); - if (workflowId == null || "".equals(workflowId)) { - // This is a bad message, we cannot process it - LOGGER.error( - "No workflow id found in the message. {}", payload); - queue.ack(Collections.singletonList(msg)); - return; - } - WorkflowModel workflow = - workflowExecutor.getWorkflow(workflowId, true); - Optional optionalTaskModel; - if (StringUtils.isNotEmpty(taskId)) { - optionalTaskModel = - workflow.getTasks().stream() - .filter( - task -> - !task.getStatus().isTerminal() - && task.getTaskId() - .equals(taskId)) - .findFirst(); - } else if (StringUtils.isEmpty(taskRefName)) { - LOGGER.error( - "No taskRefName found in the message. If there is only one WAIT task, will mark it as completed. {}", - payload); - optionalTaskModel = - workflow.getTasks().stream() - .filter( - task -> - !task.getStatus().isTerminal() - && task.getTaskType() - .equals( - TASK_TYPE_WAIT)) - .findFirst(); - } else { - optionalTaskModel = - workflow.getTasks().stream() - .filter( - task -> - !task.getStatus().isTerminal() - && task.getReferenceTaskName() - .equals( - taskRefName)) - .findFirst(); - } - - if (optionalTaskModel.isEmpty()) { - LOGGER.error( - "No matching tasks found to be marked as completed for workflow {}, taskRefName {}, taskId {}", - workflowId, - taskRefName, - taskId); - queue.ack(Collections.singletonList(msg)); - return; - } - - Task task = optionalTaskModel.get().toTask(); - task.setStatus(TaskModel.mapToTaskStatus(status)); - task.getOutputData() - .putAll(objectMapper.convertValue(payloadJSON, _mapType)); - workflowExecutor.updateTask(new TaskResult(task)); - - List failures = queue.ack(Collections.singletonList(msg)); - if (!failures.isEmpty()) { - LOGGER.error("Not able to ack the messages {}", failures); - } - } catch (JsonParseException e) { - LOGGER.error("Bad message? : {} ", msg, e); - queue.ack(Collections.singletonList(msg)); - - } catch (ApplicationException e) { - if (e.getCode().equals(Code.NOT_FOUND)) { - LOGGER.error( - "Workflow ID specified is not valid for this environment"); - queue.ack(Collections.singletonList(msg)); - } - LOGGER.error("Error processing message: {}", msg, e); - } catch (Exception e) { - LOGGER.error("Error processing message: {}", msg, e); - } - }, - (Throwable t) -> LOGGER.error(t.getMessage(), t)); - LOGGER.info("QueueListener::STARTED...listening for " + queue.getName()); - } - - private String getValue(String fieldName, JsonNode json) { - JsonNode node = json.findValue(fieldName); - if (node == null) { - return null; - } - return node.textValue(); - } - - public Map size() { - Map size = new HashMap<>(); - queues.forEach((key, queue) -> size.put(queue.getName(), queue.size())); - return size; - } - - public Map queues() { - Map size = new HashMap<>(); - queues.forEach((key, queue) -> size.put(key, queue.getURI())); - return size; - } - - public void updateByTaskRefName( - String workflowId, String taskRefName, Map output, Status status) - throws Exception { - Map externalIdMap = new HashMap<>(); - externalIdMap.put("workflowId", workflowId); - externalIdMap.put("taskRefName", taskRefName); - - update(externalIdMap, output, status); - } - - public void updateByTaskId( - String workflowId, String taskId, Map output, Status status) - throws Exception { - Map externalIdMap = new HashMap<>(); - externalIdMap.put("workflowId", workflowId); - externalIdMap.put("taskId", taskId); - - update(externalIdMap, output, status); - } - - private void update( - Map externalIdMap, Map output, Status status) - throws Exception { - Map outputMap = new HashMap<>(); - - outputMap.put("externalId", objectMapper.writeValueAsString(externalIdMap)); - outputMap.putAll(output); - - Message msg = - new Message( - UUID.randomUUID().toString(), - objectMapper.writeValueAsString(outputMap), - null); - ObservableQueue queue = queues.get(status); - if (queue == null) { - throw new IllegalArgumentException( - "There is no queue for handling " + status.toString() + " status"); - } - queue.publish(Collections.singletonList(msg)); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/queue/Message.java b/core/src/main/java/com/netflix/conductor/core/events/queue/Message.java deleted file mode 100644 index b7d33961f..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/queue/Message.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events.queue; - -import java.util.Objects; - -public class Message { - - private String payload; - private String id; - private String receipt; - private int priority; - - public Message() {} - - public Message(String id, String payload, String receipt) { - this.payload = payload; - this.id = id; - this.receipt = receipt; - } - - public Message(String id, String payload, String receipt, int priority) { - this.payload = payload; - this.id = id; - this.receipt = receipt; - this.priority = priority; - } - - /** - * @return the payload - */ - public String getPayload() { - return payload; - } - - /** - * @param payload the payload to set - */ - public void setPayload(String payload) { - this.payload = payload; - } - - /** - * @return the id - */ - public String getId() { - return id; - } - - /** - * @param id the id to set - */ - public void setId(String id) { - this.id = id; - } - - /** - * @return Receipt attached to the message - */ - public String getReceipt() { - return receipt; - } - - /** - * @param receipt Receipt attached to the message - */ - public void setReceipt(String receipt) { - this.receipt = receipt; - } - - /** - * Gets the message priority - * - * @return priority of message. - */ - public int getPriority() { - return priority; - } - - /** - * Sets the message priority (between 0 and 99). Higher priority message is retrieved ahead of - * lower priority ones. - * - * @param priority the priority of message (between 0 and 99) - */ - public void setPriority(int priority) { - this.priority = priority; - } - - @Override - public String toString() { - return id; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Message message = (Message) o; - return Objects.equals(payload, message.payload) - && Objects.equals(id, message.id) - && Objects.equals(priority, message.priority) - && Objects.equals(receipt, message.receipt); - } - - @Override - public int hashCode() { - return Objects.hash(payload, id, receipt, priority); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/events/queue/ObservableQueue.java b/core/src/main/java/com/netflix/conductor/core/events/queue/ObservableQueue.java deleted file mode 100644 index 718cded76..000000000 --- a/core/src/main/java/com/netflix/conductor/core/events/queue/ObservableQueue.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events.queue; - -import java.util.List; - -import org.springframework.context.Lifecycle; - -import rx.Observable; - -public interface ObservableQueue extends Lifecycle { - - /** - * @return An observable for the given queue - */ - Observable observe(); - - /** - * @return Type of the queue - */ - String getType(); - - /** - * @return Name of the queue - */ - String getName(); - - /** - * @return URI identifier for the queue. - */ - String getURI(); - - /** - * @param messages to be ack'ed - * @return the id of the ones which could not be ack'ed - */ - List ack(List messages); - - /** - * @param messages Messages to be published - */ - void publish(List messages); - - /** - * Used to determine if the queue supports unack/visibility timeout such that the messages will - * re-appear on the queue after a specific period and are available to be picked up again and - * retried. - * - * @return - false if the queue message need not be re-published to the queue for retriability - - * true if the message must be re-published to the queue for retriability - */ - default boolean rePublishIfNoAck() { - return false; - } - - /** - * Extend the lease of the unacknowledged message for longer period. - * - * @param message Message for which the timeout has to be changed - * @param unackTimeout timeout in milliseconds for which the unack lease should be extended. - * (replaces the current value with this value) - */ - void setUnackTimeout(Message message, long unackTimeout); - - /** - * @return Size of the queue - no. messages pending. Note: Depending upon the implementation, - * this can be an approximation - */ - long size(); - - /** Used to close queue instance prior to remove from queues */ - default void close() {} -} diff --git a/core/src/main/java/com/netflix/conductor/core/exception/ApplicationException.java b/core/src/main/java/com/netflix/conductor/core/exception/ApplicationException.java deleted file mode 100644 index f1a086d40..000000000 --- a/core/src/main/java/com/netflix/conductor/core/exception/ApplicationException.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.exception; - -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -public class ApplicationException extends RuntimeException { - - public enum Code { - INVALID_INPUT(400), - INTERNAL_ERROR(500), - NOT_FOUND(404), - CONFLICT(409), - UNAUTHORIZED(403), - BACKEND_ERROR(500); - - private final int statusCode; - - Code(int statusCode) { - this.statusCode = statusCode; - } - - public int getStatusCode() { - return statusCode; - } - } - - private final Code code; - - public boolean isRetryable() { - return this.code == Code.BACKEND_ERROR; - } - - public ApplicationException(String msg, Throwable t) { - this(Code.INTERNAL_ERROR, msg, t); - } - - public ApplicationException(Code code, String msg, Throwable t) { - super(code + " - " + msg, t); - this.code = code; - } - - public ApplicationException(Code code, Throwable t) { - super(code.name(), t); - this.code = code; - } - - public ApplicationException(Code code, String message) { - super(message); - this.code = code; - } - - public int getHttpStatusCode() { - return this.code.getStatusCode(); - } - - public Code getCode() { - return this.code; - } - - public String getTrace() { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(baos); - this.printStackTrace(ps); - ps.flush(); - return baos.toString(); - } - - public Map toMap() { - HashMap map = new LinkedHashMap<>(); - map.put("code", code.name()); - map.put("message", super.getMessage()); - map.put("retryable", isRetryable()); - return map; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java b/core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java deleted file mode 100644 index 366c4864f..000000000 --- a/core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.exception; - -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.model.WorkflowModel.Status.FAILED; - -public class TerminateWorkflowException extends RuntimeException { - - private final WorkflowModel.Status workflowStatus; - private final TaskModel task; - - public TerminateWorkflowException(String reason) { - this(reason, FAILED); - } - - public TerminateWorkflowException(String reason, WorkflowModel.Status workflowStatus) { - this(reason, workflowStatus, null); - } - - public TerminateWorkflowException( - String reason, WorkflowModel.Status workflowStatus, TaskModel task) { - super(reason); - this.workflowStatus = workflowStatus; - this.task = task; - } - - public WorkflowModel.Status getWorkflowStatus() { - return workflowStatus; - } - - public TaskModel getTask() { - return task; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java deleted file mode 100644 index cb5d737d3..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -@Component -public class AsyncSystemTaskExecutor { - - private final ExecutionDAOFacade executionDAOFacade; - private final QueueDAO queueDAO; - private final MetadataDAO metadataDAO; - private final long queueTaskMessagePostponeSecs; - private final long systemTaskCallbackTime; - private final WorkflowExecutor workflowExecutor; - - private static final Logger LOGGER = LoggerFactory.getLogger(AsyncSystemTaskExecutor.class); - - public AsyncSystemTaskExecutor( - ExecutionDAOFacade executionDAOFacade, - QueueDAO queueDAO, - MetadataDAO metadataDAO, - ConductorProperties conductorProperties, - WorkflowExecutor workflowExecutor) { - this.executionDAOFacade = executionDAOFacade; - this.queueDAO = queueDAO; - this.metadataDAO = metadataDAO; - this.workflowExecutor = workflowExecutor; - this.systemTaskCallbackTime = - conductorProperties.getSystemTaskWorkerCallbackDuration().getSeconds(); - this.queueTaskMessagePostponeSecs = - conductorProperties.getTaskExecutionPostponeDuration().getSeconds(); - } - - /** - * Executes and persists the results of an async {@link WorkflowSystemTask}. - * - * @param systemTask The {@link WorkflowSystemTask} to be executed. - * @param taskId The id of the {@link TaskModel} object. - */ - public void execute(WorkflowSystemTask systemTask, String taskId) { - TaskModel task = loadTaskQuietly(taskId); - if (task == null) { - LOGGER.error("TaskId: {} could not be found while executing {}", taskId, systemTask); - return; - } - - LOGGER.debug("Task: {} fetched from execution DAO for taskId: {}", task, taskId); - String queueName = QueueUtils.getQueueName(task); - if (task.getStatus().isTerminal()) { - // Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this - // can happen! - LOGGER.info("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId()); - queueDAO.remove(queueName, task.getTaskId()); - return; - } - - if (task.getStatus().equals(TaskModel.Status.SCHEDULED)) { - if (executionDAOFacade.exceedsInProgressLimit(task)) { - LOGGER.warn( - "Concurrent Execution limited for {}:{}", taskId, task.getTaskDefName()); - postponeQuietly(queueName, task); - return; - } - if (task.getRateLimitPerFrequency() > 0 - && executionDAOFacade.exceedsRateLimitPerFrequency( - task, metadataDAO.getTaskDef(task.getTaskDefName()))) { - LOGGER.warn( - "RateLimit Execution limited for {}:{}, limit:{}", - taskId, - task.getTaskDefName(), - task.getRateLimitPerFrequency()); - postponeQuietly(queueName, task); - return; - } - } - - boolean hasTaskExecutionCompleted = false; - String workflowId = task.getWorkflowInstanceId(); - // if we are here the Task object is updated and needs to be persisted regardless of an - // exception - try { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - - if (workflow.getStatus().isTerminal()) { - LOGGER.info( - "Workflow {} has been completed for {}/{}", - workflow.toShortString(), - systemTask, - task.getTaskId()); - if (!task.getStatus().isTerminal()) { - task.setStatus(TaskModel.Status.CANCELED); - task.setReasonForIncompletion( - String.format( - "Workflow is in %s state", workflow.getStatus().toString())); - } - queueDAO.remove(queueName, task.getTaskId()); - return; - } - - LOGGER.debug( - "Executing {}/{} in {} state", - task.getTaskType(), - task.getTaskId(), - task.getStatus()); - - boolean isTaskAsyncComplete = systemTask.isAsyncComplete(task); - if (task.getStatus() == TaskModel.Status.SCHEDULED || !isTaskAsyncComplete) { - task.incrementPollCount(); - } - - if (task.getStatus() == TaskModel.Status.SCHEDULED) { - task.setStartTime(System.currentTimeMillis()); - Monitors.recordQueueWaitTime(task.getTaskDefName(), task.getQueueWaitTime()); - systemTask.start(workflow, task, workflowExecutor); - } else if (task.getStatus() == TaskModel.Status.IN_PROGRESS) { - systemTask.execute(workflow, task, workflowExecutor); - } - - // Update message in Task queue based on Task status - // Remove asyncComplete system tasks from the queue that are not in SCHEDULED state - if (isTaskAsyncComplete && task.getStatus() != TaskModel.Status.SCHEDULED) { - queueDAO.remove(queueName, task.getTaskId()); - hasTaskExecutionCompleted = true; - } else if (task.getStatus().isTerminal()) { - task.setEndTime(System.currentTimeMillis()); - queueDAO.remove(queueName, task.getTaskId()); - hasTaskExecutionCompleted = true; - LOGGER.debug("{} removed from queue: {}", task, queueName); - } else { - task.setCallbackAfterSeconds(systemTaskCallbackTime); - queueDAO.postpone( - queueName, - task.getTaskId(), - task.getWorkflowPriority(), - systemTaskCallbackTime); - LOGGER.debug("{} postponed in queue: {}", task, queueName); - } - - LOGGER.debug( - "Finished execution of {}/{}-{}", - systemTask, - task.getTaskId(), - task.getStatus()); - } catch (Exception e) { - Monitors.error(AsyncSystemTaskExecutor.class.getSimpleName(), "executeSystemTask"); - LOGGER.error("Error executing system task - {}, with id: {}", systemTask, taskId, e); - } finally { - executionDAOFacade.updateTask(task); - // if the current task execution has completed, then the workflow needs to be evaluated - if (hasTaskExecutionCompleted) { - workflowExecutor.decide(workflowId); - } - } - } - - private void postponeQuietly(String queueName, TaskModel task) { - try { - queueDAO.postpone( - queueName, - task.getTaskId(), - task.getWorkflowPriority(), - queueTaskMessagePostponeSecs); - } catch (Exception e) { - LOGGER.error("Error postponing task: {} in queue: {}", task.getTaskId(), queueName); - } - } - - private TaskModel loadTaskQuietly(String taskId) { - try { - return executionDAOFacade.getTaskModel(taskId); - } catch (Exception e) { - return null; - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java deleted file mode 100644 index 18bda3918..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ /dev/null @@ -1,888 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.time.Duration; -import java.util.*; -import java.util.function.Predicate; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.mapper.TaskMapper; -import com.netflix.conductor.core.execution.mapper.TaskMapperContext; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TERMINATE; -import static com.netflix.conductor.model.TaskModel.Status.*; - -/** - * Decider evaluates the state of the workflow by inspecting the current state along with the - * blueprint. The result of the evaluation is either to schedule further tasks, complete/fail the - * workflow or do nothing. - */ -@Service -public class DeciderService { - - private static final Logger LOGGER = LoggerFactory.getLogger(DeciderService.class); - - private final IDGenerator idGenerator; - private final ParametersUtils parametersUtils; - private final ExternalPayloadStorageUtils externalPayloadStorageUtils; - private final MetadataDAO metadataDAO; - private final SystemTaskRegistry systemTaskRegistry; - private final long taskPendingTimeThresholdMins; - - private final Map taskMappers; - - private final Predicate isNonPendingTask = - task -> !task.isRetried() && !task.getStatus().equals(SKIPPED) && !task.isExecuted(); - - private final Predicate containsSuccessfulTerminateTask = - workflow -> - workflow.getTasks().stream() - .anyMatch( - task -> - TERMINATE.name().equals(task.getTaskType()) - && task.getStatus().isTerminal() - && task.getStatus().isSuccessful()); - - public DeciderService( - IDGenerator idGenerator, - ParametersUtils parametersUtils, - MetadataDAO metadataDAO, - ExternalPayloadStorageUtils externalPayloadStorageUtils, - SystemTaskRegistry systemTaskRegistry, - @Qualifier("taskMappersByTaskType") Map taskMappers, - @Value("${conductor.app.taskPendingTimeThreshold:60m}") - Duration taskPendingTimeThreshold) { - this.idGenerator = idGenerator; - this.metadataDAO = metadataDAO; - this.parametersUtils = parametersUtils; - this.taskMappers = taskMappers; - this.externalPayloadStorageUtils = externalPayloadStorageUtils; - this.taskPendingTimeThresholdMins = taskPendingTimeThreshold.toMinutes(); - this.systemTaskRegistry = systemTaskRegistry; - } - - public DeciderOutcome decide(WorkflowModel workflow) throws TerminateWorkflowException { - - // In case of a new workflow the list of tasks will be empty. - final List tasks = workflow.getTasks(); - // Filter the list of tasks and include only tasks that are not executed, - // not marked to be skipped and not ready for rerun. - // For a new workflow, the list of unprocessedTasks will be empty - List unprocessedTasks = - tasks.stream() - .filter(t -> !t.getStatus().equals(SKIPPED) && !t.isExecuted()) - .collect(Collectors.toList()); - - List tasksToBeScheduled = new LinkedList<>(); - if (unprocessedTasks.isEmpty()) { - // this is the flow that the new workflow will go through - tasksToBeScheduled = startWorkflow(workflow); - if (tasksToBeScheduled == null) { - tasksToBeScheduled = new LinkedList<>(); - } - } - return decide(workflow, tasksToBeScheduled); - } - - private DeciderOutcome decide(final WorkflowModel workflow, List preScheduledTasks) - throws TerminateWorkflowException { - - DeciderOutcome outcome = new DeciderOutcome(); - - if (workflow.getStatus().isTerminal()) { - // you cannot evaluate a terminal workflow - LOGGER.debug( - "Workflow {} is already finished. Reason: {}", - workflow, - workflow.getReasonForIncompletion()); - return outcome; - } - - checkWorkflowTimeout(workflow); - - if (workflow.getStatus().equals(WorkflowModel.Status.PAUSED)) { - LOGGER.debug("Workflow " + workflow.getWorkflowId() + " is paused"); - return outcome; - } - - // Filter the list of tasks and include only tasks that are not retried, not executed - // marked to be skipped and not part of System tasks that is DECISION, FORK, JOIN - // This list will be empty for a new workflow being started - List pendingTasks = - workflow.getTasks().stream().filter(isNonPendingTask).collect(Collectors.toList()); - - // Get all the tasks that have not completed their lifecycle yet - // This list will be empty for a new workflow - Set executedTaskRefNames = - workflow.getTasks().stream() - .filter(TaskModel::isExecuted) - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toSet()); - - Map tasksToBeScheduled = new LinkedHashMap<>(); - - preScheduledTasks.forEach( - preScheduledTask -> { - tasksToBeScheduled.put( - preScheduledTask.getReferenceTaskName(), preScheduledTask); - }); - - // A new workflow does not enter this code branch - for (TaskModel pendingTask : pendingTasks) { - - if (systemTaskRegistry.isSystemTask(pendingTask.getTaskType()) - && !pendingTask.getStatus().isTerminal()) { - tasksToBeScheduled.putIfAbsent(pendingTask.getReferenceTaskName(), pendingTask); - executedTaskRefNames.remove(pendingTask.getReferenceTaskName()); - } - - Optional taskDefinition = pendingTask.getTaskDefinition(); - if (taskDefinition.isEmpty()) { - taskDefinition = - Optional.ofNullable( - workflow.getWorkflowDefinition() - .getTaskByRefName( - pendingTask.getReferenceTaskName())) - .map(WorkflowTask::getTaskDefinition); - } - - if (taskDefinition.isPresent()) { - checkTaskTimeout(taskDefinition.get(), pendingTask); - checkTaskPollTimeout(taskDefinition.get(), pendingTask); - // If the task has not been updated for "responseTimeoutSeconds" then mark task as - // TIMED_OUT - if (isResponseTimedOut(taskDefinition.get(), pendingTask)) { - timeoutTask(taskDefinition.get(), pendingTask); - } - } - - if (!pendingTask.getStatus().isSuccessful()) { - WorkflowTask workflowTask = pendingTask.getWorkflowTask(); - if (workflowTask == null) { - workflowTask = - workflow.getWorkflowDefinition() - .getTaskByRefName(pendingTask.getReferenceTaskName()); - } - - Optional retryTask = - retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow); - if (retryTask.isPresent()) { - tasksToBeScheduled.put(retryTask.get().getReferenceTaskName(), retryTask.get()); - executedTaskRefNames.remove(retryTask.get().getReferenceTaskName()); - outcome.tasksToBeUpdated.add(pendingTask); - } else { - pendingTask.setStatus(COMPLETED_WITH_ERRORS); - } - } - - if (!pendingTask.isExecuted() - && !pendingTask.isRetried() - && pendingTask.getStatus().isTerminal()) { - pendingTask.setExecuted(true); - List nextTasks = getNextTask(workflow, pendingTask); - if (pendingTask.isLoopOverTask() - && !TaskType.DO_WHILE.name().equals(pendingTask.getTaskType()) - && !nextTasks.isEmpty()) { - nextTasks = filterNextLoopOverTasks(nextTasks, pendingTask, workflow); - } - nextTasks.forEach( - nextTask -> - tasksToBeScheduled.putIfAbsent( - nextTask.getReferenceTaskName(), nextTask)); - outcome.tasksToBeUpdated.add(pendingTask); - LOGGER.debug( - "Scheduling Tasks from {}, next = {} for workflowId: {}", - pendingTask.getTaskDefName(), - nextTasks.stream() - .map(TaskModel::getTaskDefName) - .collect(Collectors.toList()), - workflow.getWorkflowId()); - } - } - - // All the tasks that need to scheduled are added to the outcome, in case of - List unScheduledTasks = - tasksToBeScheduled.values().stream() - .filter(task -> !executedTaskRefNames.contains(task.getReferenceTaskName())) - .collect(Collectors.toList()); - if (!unScheduledTasks.isEmpty()) { - LOGGER.debug( - "Scheduling Tasks: {} for workflow: {}", - unScheduledTasks.stream() - .map(TaskModel::getTaskDefName) - .collect(Collectors.toList()), - workflow.getWorkflowId()); - outcome.tasksToBeScheduled.addAll(unScheduledTasks); - } - if (containsSuccessfulTerminateTask.test(workflow) - || (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow))) { - LOGGER.debug("Marking workflow: {} as complete.", workflow); - outcome.isComplete = true; - } - - return outcome; - } - - @VisibleForTesting - List filterNextLoopOverTasks( - List tasks, TaskModel pendingTask, WorkflowModel workflow) { - - // Update the task reference name and iteration - tasks.forEach( - nextTask -> { - nextTask.setReferenceTaskName( - TaskUtils.appendIteration( - nextTask.getReferenceTaskName(), pendingTask.getIteration())); - nextTask.setIteration(pendingTask.getIteration()); - }); - - List tasksInWorkflow = - workflow.getTasks().stream() - .filter( - runningTask -> - runningTask.getStatus().equals(TaskModel.Status.IN_PROGRESS) - || runningTask.getStatus().isTerminal()) - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toList()); - - return tasks.stream() - .filter( - runningTask -> - !tasksInWorkflow.contains(runningTask.getReferenceTaskName())) - .collect(Collectors.toList()); - } - - private List startWorkflow(WorkflowModel workflow) - throws TerminateWorkflowException { - final WorkflowDef workflowDef = workflow.getWorkflowDefinition(); - - LOGGER.debug("Starting workflow: {}", workflow); - - // The tasks will be empty in case of new workflow - List tasks = workflow.getTasks(); - // Check if the workflow is a re-run case or if it is a new workflow execution - if (workflow.getReRunFromWorkflowId() == null || tasks.isEmpty()) { - - if (workflowDef.getTasks().isEmpty()) { - throw new TerminateWorkflowException( - "No tasks found to be executed", WorkflowModel.Status.COMPLETED); - } - - WorkflowTask taskToSchedule = - workflowDef - .getTasks() - .get(0); // Nothing is running yet - so schedule the first task - // Loop until a non-skipped task is found - while (isTaskSkipped(taskToSchedule, workflow)) { - taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); - } - - // In case of a new workflow, the first non-skippable task will be scheduled - return getTasksToBeScheduled(workflow, taskToSchedule, 0); - } - - // Get the first task to schedule - TaskModel rerunFromTask = - tasks.stream() - .findFirst() - .map( - task -> { - task.setStatus(SCHEDULED); - task.setRetried(true); - task.setRetryCount(0); - return task; - }) - .orElseThrow( - () -> { - String reason = - String.format( - "The workflow %s is marked for re-run from %s but could not find the starting task", - workflow.getWorkflowId(), - workflow.getReRunFromWorkflowId()); - return new TerminateWorkflowException(reason); - }); - - return Collections.singletonList(rerunFromTask); - } - - /** - * Updates the workflow output. - * - * @param workflow the workflow instance - * @param task if not null, the output of this task will be copied to workflow output if no - * output parameters are specified in the workflow definition if null, the output of the - * last task in the workflow will be copied to workflow output of no output parameters are - * specified in the workflow definition - */ - void updateWorkflowOutput(final WorkflowModel workflow, TaskModel task) { - List allTasks = workflow.getTasks(); - if (allTasks.isEmpty()) { - return; - } - - Map output = new HashMap<>(); - Optional optionalTask = - allTasks.stream() - .filter( - t -> - TaskType.TERMINATE.name().equals(t.getTaskType()) - && t.getStatus().isTerminal() - && t.getStatus().isSuccessful()) - .findFirst(); - if (optionalTask.isPresent()) { - TaskModel terminateTask = optionalTask.get(); - if (StringUtils.isNotBlank(terminateTask.getExternalOutputPayloadStoragePath())) { - output = - externalPayloadStorageUtils.downloadPayload( - terminateTask.getExternalOutputPayloadStoragePath()); - Monitors.recordExternalPayloadStorageUsage( - terminateTask.getTaskDefName(), - Operation.READ.toString(), - PayloadType.TASK_OUTPUT.toString()); - } else if (!terminateTask.getOutputData().isEmpty()) { - output = terminateTask.getOutputData(); - } - } else { - TaskModel last = Optional.ofNullable(task).orElse(allTasks.get(allTasks.size() - 1)); - WorkflowDef workflowDef = workflow.getWorkflowDefinition(); - if (workflowDef.getOutputParameters() != null - && !workflowDef.getOutputParameters().isEmpty()) { - output = - parametersUtils.getTaskInput( - workflowDef.getOutputParameters(), workflow, null, null); - } else if (StringUtils.isNotBlank(last.getExternalOutputPayloadStoragePath())) { - output = - externalPayloadStorageUtils.downloadPayload( - last.getExternalOutputPayloadStoragePath()); - Monitors.recordExternalPayloadStorageUsage( - last.getTaskDefName(), - Operation.READ.toString(), - PayloadType.TASK_OUTPUT.toString()); - } else { - output = last.getOutputData(); - } - } - workflow.setOutput(output); - } - - public boolean checkForWorkflowCompletion(final WorkflowModel workflow) - throws TerminateWorkflowException { - List allTasks = workflow.getTasks(); - if (allTasks.isEmpty()) { - return false; - } - - if (containsSuccessfulTerminateTask.test(workflow)) { - return true; - } - - Map taskStatusMap = new HashMap<>(); - workflow.getTasks() - .forEach(task -> taskStatusMap.put(task.getReferenceTaskName(), task.getStatus())); - - List workflowTasks = workflow.getWorkflowDefinition().getTasks(); - boolean allCompletedSuccessfully = - workflowTasks.stream() - .parallel() - .allMatch( - wftask -> { - TaskModel.Status status = - taskStatusMap.get(wftask.getTaskReferenceName()); - return status != null - && status.isSuccessful() - && status.isTerminal(); - }); - - boolean noPendingTasks = - taskStatusMap.values().stream().allMatch(TaskModel.Status::isTerminal); - - boolean noPendingSchedule = - workflow.getTasks().stream() - .parallel() - .noneMatch( - wftask -> { - String next = getNextTasksToBeScheduled(workflow, wftask); - return next != null && !taskStatusMap.containsKey(next); - }); - - return allCompletedSuccessfully && noPendingTasks && noPendingSchedule; - } - - List getNextTask(WorkflowModel workflow, TaskModel task) { - final WorkflowDef workflowDef = workflow.getWorkflowDefinition(); - - // Get the following task after the last completed task - if (systemTaskRegistry.isSystemTask(task.getTaskType()) - && (TaskType.TASK_TYPE_DECISION.equals(task.getTaskType()) - || TaskType.TASK_TYPE_SWITCH.equals(task.getTaskType()))) { - if (task.getInputData().get("hasChildren") != null) { - return Collections.emptyList(); - } - } - - String taskReferenceName = - task.isLoopOverTask() - ? TaskUtils.removeIterationFromTaskRefName(task.getReferenceTaskName()) - : task.getReferenceTaskName(); - WorkflowTask taskToSchedule = workflowDef.getNextTask(taskReferenceName); - while (isTaskSkipped(taskToSchedule, workflow)) { - taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); - } - if (taskToSchedule != null && TaskType.DO_WHILE.name().equals(taskToSchedule.getType())) { - // check if already has this DO_WHILE task, ignore it if it already exists - String nextTaskReferenceName = taskToSchedule.getTaskReferenceName(); - if (workflow.getTasks().stream() - .anyMatch( - runningTask -> - runningTask - .getReferenceTaskName() - .equals(nextTaskReferenceName))) { - return Collections.emptyList(); - } - } - if (taskToSchedule != null) { - return getTasksToBeScheduled(workflow, taskToSchedule, 0); - } - - return Collections.emptyList(); - } - - private String getNextTasksToBeScheduled(WorkflowModel workflow, TaskModel task) { - final WorkflowDef def = workflow.getWorkflowDefinition(); - - String taskReferenceName = task.getReferenceTaskName(); - WorkflowTask taskToSchedule = def.getNextTask(taskReferenceName); - while (isTaskSkipped(taskToSchedule, workflow)) { - taskToSchedule = def.getNextTask(taskToSchedule.getTaskReferenceName()); - } - return taskToSchedule == null ? null : taskToSchedule.getTaskReferenceName(); - } - - @VisibleForTesting - Optional retry( - TaskDef taskDefinition, - WorkflowTask workflowTask, - TaskModel task, - WorkflowModel workflow) - throws TerminateWorkflowException { - - int retryCount = task.getRetryCount(); - - if (taskDefinition == null) { - taskDefinition = metadataDAO.getTaskDef(task.getTaskDefName()); - } - - final int expectedRetryCount = - taskDefinition == null - ? 0 - : Optional.ofNullable(workflowTask) - .map(WorkflowTask::getRetryCount) - .orElse(taskDefinition.getRetryCount()); - if (!task.getStatus().isRetriable() - || TaskType.isBuiltIn(task.getTaskType()) - || expectedRetryCount <= retryCount) { - if (workflowTask != null && workflowTask.isOptional()) { - return Optional.empty(); - } - WorkflowModel.Status status; - switch (task.getStatus()) { - case CANCELED: - status = WorkflowModel.Status.TERMINATED; - break; - case TIMED_OUT: - status = WorkflowModel.Status.TIMED_OUT; - break; - default: - status = WorkflowModel.Status.FAILED; - break; - } - updateWorkflowOutput(workflow, task); - throw new TerminateWorkflowException(task.getReasonForIncompletion(), status, task); - } - - // retry... - but not immediately - put a delay... - int startDelay = taskDefinition.getRetryDelaySeconds(); - switch (taskDefinition.getRetryLogic()) { - case FIXED: - startDelay = taskDefinition.getRetryDelaySeconds(); - break; - case LINEAR_BACKOFF: - int linearRetryDelaySeconds = - taskDefinition.getRetryDelaySeconds() - * taskDefinition.getBackoffScaleFactor() - * (task.getRetryCount() + 1); - // Reset integer overflow to max value - startDelay = - linearRetryDelaySeconds < 0 ? Integer.MAX_VALUE : linearRetryDelaySeconds; - break; - case EXPONENTIAL_BACKOFF: - int exponentialRetryDelaySeconds = - taskDefinition.getRetryDelaySeconds() - * (int) Math.pow(2, task.getRetryCount()); - // Reset integer overflow to max value - startDelay = - exponentialRetryDelaySeconds < 0 - ? Integer.MAX_VALUE - : exponentialRetryDelaySeconds; - break; - } - - task.setRetried(true); - - TaskModel rescheduled = task.copy(); - rescheduled.setStartDelayInSeconds(startDelay); - rescheduled.setCallbackAfterSeconds(startDelay); - rescheduled.setRetryCount(task.getRetryCount() + 1); - rescheduled.setRetried(false); - rescheduled.setTaskId(idGenerator.generate()); - rescheduled.setRetriedTaskId(task.getTaskId()); - rescheduled.setStatus(SCHEDULED); - rescheduled.setPollCount(0); - rescheduled.setInputData(new HashMap<>(task.getInputData())); - rescheduled.setReasonForIncompletion(null); - rescheduled.setSubWorkflowId(null); - rescheduled.setSeq(0); - rescheduled.setScheduledTime(0); - rescheduled.setStartTime(0); - rescheduled.setEndTime(0); - rescheduled.setWorkerId(null); - - if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) { - rescheduled.setExternalInputPayloadStoragePath( - task.getExternalInputPayloadStoragePath()); - } else { - rescheduled.addInput(task.getInputData()); - } - if (workflowTask != null && workflow.getWorkflowDefinition().getSchemaVersion() > 1) { - Map taskInput = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), - workflow, - rescheduled.getTaskId(), - taskDefinition); - rescheduled.addInput(taskInput); - } - // for the schema version 1, we do not have to recompute the inputs - return Optional.of(rescheduled); - } - - @VisibleForTesting - void checkWorkflowTimeout(WorkflowModel workflow) { - WorkflowDef workflowDef = workflow.getWorkflowDefinition(); - if (workflowDef == null) { - LOGGER.warn("Missing workflow definition : {}", workflow.getWorkflowId()); - return; - } - if (workflow.getStatus().isTerminal() || workflowDef.getTimeoutSeconds() <= 0) { - return; - } - - long timeout = 1000L * workflowDef.getTimeoutSeconds(); - long now = System.currentTimeMillis(); - long elapsedTime = - workflow.getLastRetriedTime() > 0 - ? now - workflow.getLastRetriedTime() - : now - workflow.getCreateTime(); - - if (elapsedTime < timeout) { - return; - } - - String reason = - String.format( - "Workflow timed out after %d seconds. Timeout configured as %d seconds. " - + "Timeout policy configured to %s", - elapsedTime / 1000L, - workflowDef.getTimeoutSeconds(), - workflowDef.getTimeoutPolicy().name()); - - switch (workflowDef.getTimeoutPolicy()) { - case ALERT_ONLY: - LOGGER.info("{} {}", workflow.getWorkflowId(), reason); - Monitors.recordWorkflowTermination( - workflow.getWorkflowName(), - WorkflowModel.Status.TIMED_OUT, - workflow.getOwnerApp()); - return; - case TIME_OUT_WF: - throw new TerminateWorkflowException(reason, WorkflowModel.Status.TIMED_OUT); - } - } - - @VisibleForTesting - void checkTaskTimeout(TaskDef taskDef, TaskModel task) { - - if (taskDef == null) { - LOGGER.warn( - "Missing task definition for task:{}/{} in workflow:{}", - task.getTaskId(), - task.getTaskDefName(), - task.getWorkflowInstanceId()); - return; - } - if (task.getStatus().isTerminal() - || taskDef.getTimeoutSeconds() <= 0 - || task.getStartTime() <= 0) { - return; - } - - long timeout = 1000L * taskDef.getTimeoutSeconds(); - long now = System.currentTimeMillis(); - long elapsedTime = - now - (task.getStartTime() + ((long) task.getStartDelayInSeconds() * 1000L)); - - if (elapsedTime < timeout) { - return; - } - - String reason = - String.format( - "Task timed out after %d seconds. Timeout configured as %d seconds. " - + "Timeout policy configured to %s", - elapsedTime / 1000L, - taskDef.getTimeoutSeconds(), - taskDef.getTimeoutPolicy().name()); - timeoutTaskWithTimeoutPolicy(reason, taskDef, task); - } - - @VisibleForTesting - void checkTaskPollTimeout(TaskDef taskDef, TaskModel task) { - if (taskDef == null) { - LOGGER.warn( - "Missing task definition for task:{}/{} in workflow:{}", - task.getTaskId(), - task.getTaskDefName(), - task.getWorkflowInstanceId()); - return; - } - if (taskDef.getPollTimeoutSeconds() == null - || taskDef.getPollTimeoutSeconds() <= 0 - || !task.getStatus().equals(SCHEDULED)) { - return; - } - - final long pollTimeout = 1000L * taskDef.getPollTimeoutSeconds(); - final long adjustedPollTimeout = pollTimeout + task.getCallbackAfterSeconds() * 1000L; - final long now = System.currentTimeMillis(); - final long pollElapsedTime = - now - (task.getScheduledTime() + ((long) task.getStartDelayInSeconds() * 1000L)); - - if (pollElapsedTime < adjustedPollTimeout) { - return; - } - - String reason = - String.format( - "Task poll timed out after %d seconds. Poll timeout configured as %d seconds. Timeout policy configured to %s", - pollElapsedTime / 1000L, - pollTimeout / 1000L, - taskDef.getTimeoutPolicy().name()); - timeoutTaskWithTimeoutPolicy(reason, taskDef, task); - } - - void timeoutTaskWithTimeoutPolicy(String reason, TaskDef taskDef, TaskModel task) { - Monitors.recordTaskTimeout(task.getTaskDefName()); - - switch (taskDef.getTimeoutPolicy()) { - case ALERT_ONLY: - LOGGER.info(reason); - return; - case RETRY: - task.setStatus(TIMED_OUT); - task.setReasonForIncompletion(reason); - return; - case TIME_OUT_WF: - task.setStatus(TIMED_OUT); - task.setReasonForIncompletion(reason); - throw new TerminateWorkflowException(reason, WorkflowModel.Status.TIMED_OUT, task); - } - } - - @VisibleForTesting - boolean isResponseTimedOut(TaskDef taskDefinition, TaskModel task) { - if (taskDefinition == null) { - LOGGER.warn( - "missing task type : {}, workflowId= {}", - task.getTaskDefName(), - task.getWorkflowInstanceId()); - return false; - } - - if (task.getStatus().isTerminal() || isAyncCompleteSystemTask(task)) { - return false; - } - - // calculate pendingTime - long now = System.currentTimeMillis(); - long callbackTime = 1000L * task.getCallbackAfterSeconds(); - long referenceTime = - task.getUpdateTime() > 0 ? task.getUpdateTime() : task.getScheduledTime(); - long pendingTime = now - (referenceTime + callbackTime); - Monitors.recordTaskPendingTime(task.getTaskType(), task.getWorkflowType(), pendingTime); - long thresholdMS = taskPendingTimeThresholdMins * 60 * 1000; - if (pendingTime > thresholdMS) { - LOGGER.warn( - "Task: {} of type: {} in workflow: {}/{} is in pending state for longer than {} ms", - task.getTaskId(), - task.getTaskType(), - task.getWorkflowInstanceId(), - task.getWorkflowType(), - thresholdMS); - } - - if (!task.getStatus().equals(IN_PROGRESS) - || taskDefinition.getResponseTimeoutSeconds() == 0) { - return false; - } - - LOGGER.debug( - "Evaluating responseTimeOut for Task: {}, with Task Definition: {}", - task, - taskDefinition); - long responseTimeout = 1000L * taskDefinition.getResponseTimeoutSeconds(); - long adjustedResponseTimeout = responseTimeout + callbackTime; - long noResponseTime = now - task.getUpdateTime(); - - if (noResponseTime < adjustedResponseTimeout) { - LOGGER.debug( - "Current responseTime: {} has not exceeded the configured responseTimeout of {} for the Task: {} with Task Definition: {}", - pendingTime, - responseTimeout, - task, - taskDefinition); - return false; - } - - Monitors.recordTaskResponseTimeout(task.getTaskDefName()); - return true; - } - - private void timeoutTask(TaskDef taskDef, TaskModel task) { - String reason = - "responseTimeout: " - + taskDef.getResponseTimeoutSeconds() - + " exceeded for the taskId: " - + task.getTaskId() - + " with Task Definition: " - + task.getTaskDefName(); - LOGGER.debug(reason); - task.setStatus(TIMED_OUT); - task.setReasonForIncompletion(reason); - } - - public List getTasksToBeScheduled( - WorkflowModel workflow, WorkflowTask taskToSchedule, int retryCount) { - return getTasksToBeScheduled(workflow, taskToSchedule, retryCount, null); - } - - public List getTasksToBeScheduled( - WorkflowModel workflow, - WorkflowTask taskToSchedule, - int retryCount, - String retriedTaskId) { - Map input = - parametersUtils.getTaskInput( - taskToSchedule.getInputParameters(), workflow, null, null); - - String type = taskToSchedule.getType(); - TaskType taskType = TaskType.of(type); - - // get tasks already scheduled (in progress/terminal) for this workflow instance - List tasksInWorkflow = - workflow.getTasks().stream() - .filter( - runningTask -> - runningTask.getStatus().equals(TaskModel.Status.IN_PROGRESS) - || runningTask.getStatus().isTerminal()) - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toList()); - - String taskId = idGenerator.generate(); - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(taskToSchedule.getTaskDefinition()) - .withWorkflowTask(taskToSchedule) - .withTaskInput(input) - .withRetryCount(retryCount) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .withDeciderService(this) - .build(); - - // For static forks, each branch of the fork creates a join task upon completion for - // dynamic forks, a join task is created with the fork and also with each branch of the - // fork. - // A new task must only be scheduled if a task, with the same reference name is not already - // in this workflow instance - return taskMappers.get(taskType).getMappedTasks(taskMapperContext).stream() - .filter(task -> !tasksInWorkflow.contains(task.getReferenceTaskName())) - .collect(Collectors.toList()); - } - - private boolean isTaskSkipped(WorkflowTask taskToSchedule, WorkflowModel workflow) { - try { - boolean isTaskSkipped = false; - if (taskToSchedule != null) { - TaskModel t = workflow.getTaskByRefName(taskToSchedule.getTaskReferenceName()); - if (t == null) { - isTaskSkipped = false; - } else if (t.getStatus().equals(SKIPPED)) { - isTaskSkipped = true; - } - } - return isTaskSkipped; - } catch (Exception e) { - throw new TerminateWorkflowException(e.getMessage()); - } - } - - private boolean isAyncCompleteSystemTask(TaskModel task) { - return systemTaskRegistry.isSystemTask(task.getTaskType()) - && systemTaskRegistry.get(task.getTaskType()).isAsyncComplete(task); - } - - public static class DeciderOutcome { - - List tasksToBeScheduled = new LinkedList<>(); - List tasksToBeUpdated = new LinkedList<>(); - boolean isComplete; - - private DeciderOutcome() {} - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java deleted file mode 100644 index d8aeb764b..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ /dev/null @@ -1,2017 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.util.*; -import java.util.function.Predicate; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.*; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.execution.tasks.Terminate; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.listener.WorkflowStatusListener; -import com.netflix.conductor.core.metadata.MetadataMapperService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.service.ExecutionLockService; - -import static com.netflix.conductor.core.exception.ApplicationException.Code.*; -import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE; -import static com.netflix.conductor.model.TaskModel.Status.*; - -/** Workflow services provider interface */ -@Trace -@Component -public class WorkflowExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutor.class); - private static final int PARENT_WF_PRIORITY = 10; - - private final MetadataDAO metadataDAO; - private final QueueDAO queueDAO; - private final DeciderService deciderService; - private final ConductorProperties properties; - private final MetadataMapperService metadataMapperService; - private final ExecutionDAOFacade executionDAOFacade; - private final ParametersUtils parametersUtils; - private final IDGenerator idGenerator; - private final WorkflowStatusListener workflowStatusListener; - private final SystemTaskRegistry systemTaskRegistry; - - private long activeWorkerLastPollMs; - private static final String CLASS_NAME = WorkflowExecutor.class.getSimpleName(); - private final ExecutionLockService executionLockService; - - private static final Predicate UNSUCCESSFUL_TERMINAL_TASK = - task -> !task.getStatus().isSuccessful() && task.getStatus().isTerminal(); - - private static final Predicate UNSUCCESSFUL_JOIN_TASK = - UNSUCCESSFUL_TERMINAL_TASK.and(t -> TaskType.TASK_TYPE_JOIN.equals(t.getTaskType())); - - private static final Predicate NON_TERMINAL_TASK = - task -> !task.getStatus().isTerminal(); - - private final Predicate validateLastPolledTime = - pollData -> - pollData.getLastPollTime() - > System.currentTimeMillis() - activeWorkerLastPollMs; - - public WorkflowExecutor( - DeciderService deciderService, - MetadataDAO metadataDAO, - QueueDAO queueDAO, - MetadataMapperService metadataMapperService, - WorkflowStatusListener workflowStatusListener, - ExecutionDAOFacade executionDAOFacade, - ConductorProperties properties, - ExecutionLockService executionLockService, - SystemTaskRegistry systemTaskRegistry, - ParametersUtils parametersUtils, - IDGenerator idGenerator) { - this.deciderService = deciderService; - this.metadataDAO = metadataDAO; - this.queueDAO = queueDAO; - this.properties = properties; - this.metadataMapperService = metadataMapperService; - this.executionDAOFacade = executionDAOFacade; - this.activeWorkerLastPollMs = properties.getActiveWorkerLastPollTimeout().toMillis(); - this.workflowStatusListener = workflowStatusListener; - this.executionLockService = executionLockService; - this.parametersUtils = parametersUtils; - this.idGenerator = idGenerator; - this.systemTaskRegistry = systemTaskRegistry; - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Map input, - String externalInputPayloadStoragePath) { - return startWorkflow( - name, version, correlationId, input, externalInputPayloadStoragePath, null); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input, - String externalInputPayloadStoragePath) { - return startWorkflow( - name, - version, - correlationId, - priority, - input, - externalInputPayloadStoragePath, - null); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Map input, - String externalInputPayloadStoragePath, - String event) { - return startWorkflow( - name, - version, - input, - externalInputPayloadStoragePath, - correlationId, - null, - null, - event); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input, - String externalInputPayloadStoragePath, - String event) { - return startWorkflow( - name, - version, - input, - externalInputPayloadStoragePath, - correlationId, - priority, - null, - null, - event, - null); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Map input, - String externalInputPayloadStoragePath, - String event, - Map taskToDomain) { - return startWorkflow( - name, - version, - correlationId, - 0, - input, - externalInputPayloadStoragePath, - event, - taskToDomain); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input, - String externalInputPayloadStoragePath, - String event, - Map taskToDomain) { - return startWorkflow( - name, - version, - input, - externalInputPayloadStoragePath, - correlationId, - priority, - null, - null, - event, - taskToDomain); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - Map input, - String externalInputPayloadStoragePath, - String correlationId, - String parentWorkflowId, - String parentWorkflowTaskId, - String event) { - return startWorkflow( - name, - version, - input, - externalInputPayloadStoragePath, - correlationId, - parentWorkflowId, - parentWorkflowTaskId, - event, - null); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - WorkflowDef workflowDefinition, - Map workflowInput, - String externalInputPayloadStoragePath, - String correlationId, - String event, - Map taskToDomain) { - return startWorkflow( - workflowDefinition, - workflowInput, - externalInputPayloadStoragePath, - correlationId, - 0, - event, - taskToDomain); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - WorkflowDef workflowDefinition, - Map workflowInput, - String externalInputPayloadStoragePath, - String correlationId, - Integer priority, - String event, - Map taskToDomain) { - return startWorkflow( - workflowDefinition, - workflowInput, - externalInputPayloadStoragePath, - correlationId, - priority, - null, - null, - event, - taskToDomain); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - Map workflowInput, - String externalInputPayloadStoragePath, - String correlationId, - String parentWorkflowId, - String parentWorkflowTaskId, - String event, - Map taskToDomain) { - return startWorkflow( - name, - version, - workflowInput, - externalInputPayloadStoragePath, - correlationId, - 0, - parentWorkflowId, - parentWorkflowTaskId, - event, - taskToDomain); - } - - /** - * @throws ApplicationException - */ - public String startWorkflow( - String name, - Integer version, - Map workflowInput, - String externalInputPayloadStoragePath, - String correlationId, - Integer priority, - String parentWorkflowId, - String parentWorkflowTaskId, - String event, - Map taskToDomain) { - WorkflowDef workflowDefinition = - metadataMapperService.lookupForWorkflowDefinition(name, version); - - return startWorkflow( - workflowDefinition, - workflowInput, - externalInputPayloadStoragePath, - correlationId, - priority, - parentWorkflowId, - parentWorkflowTaskId, - event, - taskToDomain); - } - - /** - * @throws ApplicationException if validation fails - */ - public String startWorkflow( - WorkflowDef workflowDefinition, - Map workflowInput, - String externalInputPayloadStoragePath, - String correlationId, - Integer priority, - String parentWorkflowId, - String parentWorkflowTaskId, - String event, - Map taskToDomain) { - - workflowDefinition = metadataMapperService.populateTaskDefinitions(workflowDefinition); - - // perform validations - validateWorkflow(workflowDefinition, workflowInput, externalInputPayloadStoragePath); - - // A random UUID is assigned to the work flow instance - String workflowId = idGenerator.generate(); - - // Persist the Workflow - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - workflow.setCorrelationId(correlationId); - workflow.setPriority(priority == null ? 0 : priority); - workflow.setWorkflowDefinition(workflowDefinition); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setParentWorkflowId(parentWorkflowId); - workflow.setParentWorkflowTaskId(parentWorkflowTaskId); - workflow.setOwnerApp(WorkflowContext.get().getClientApp()); - workflow.setCreateTime(System.currentTimeMillis()); - workflow.setUpdatedBy(null); - workflow.setUpdatedTime(null); - workflow.setEvent(event); - workflow.setTaskToDomain(taskToDomain); - workflow.setVariables(workflowDefinition.getVariables()); - - if (workflowInput != null && !workflowInput.isEmpty()) { - Map parsedInput = - parametersUtils.getWorkflowInput(workflowDefinition, workflowInput); - workflow.setInput(parsedInput); - } else { - workflow.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); - } - - try { - createWorkflow(workflow); - // then decide to see if anything needs to be done as part of the workflow - decide(workflowId); - Monitors.recordWorkflowStartSuccess( - workflow.getWorkflowName(), - String.valueOf(workflow.getWorkflowVersion()), - workflow.getOwnerApp()); - return workflowId; - } catch (Exception e) { - Monitors.recordWorkflowStartError( - workflowDefinition.getName(), WorkflowContext.get().getClientApp()); - LOGGER.error("Unable to start workflow: {}", workflowDefinition.getName(), e); - - // It's possible the remove workflow call hits an exception as well, in that case we - // want to log both errors to help diagnosis. - try { - executionDAOFacade.removeWorkflow(workflowId, false); - } catch (Exception rwe) { - LOGGER.error("Could not remove the workflowId: " + workflowId, rwe); - } - throw e; - } - } - - /* - * Acquire and hold the lock till the workflow creation action is completed (in primary and secondary datastores). - * This is to ensure that workflow creation action precedes any other action on a given workflow. - */ - private void createWorkflow(WorkflowModel workflow) { - if (!executionLockService.acquireLock(workflow.getWorkflowId())) { - throw new ApplicationException( - BACKEND_ERROR, "Error acquiring lock when creating workflow: {}"); - } - try { - executionDAOFacade.createWorkflow(workflow); - LOGGER.debug( - "A new instance of workflow: {} created with id: {}", - workflow.getWorkflowName(), - workflow.getWorkflowId()); - } finally { - executionLockService.releaseLock(workflow.getWorkflowId()); - } - } - - /** - * Performs validations for starting a workflow - * - * @throws ApplicationException if the validation fails - */ - private void validateWorkflow( - WorkflowDef workflowDef, - Map workflowInput, - String externalStoragePath) { - try { - // Check if the input to the workflow is not null - if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) { - LOGGER.error( - "The input for the workflow '{}' cannot be NULL", workflowDef.getName()); - throw new ApplicationException( - INVALID_INPUT, "NULL input passed when starting workflow"); - } - } catch (Exception e) { - Monitors.recordWorkflowStartError( - workflowDef.getName(), WorkflowContext.get().getClientApp()); - throw e; - } - } - - /** - * @param workflowId the id of the workflow for which task callbacks are to be reset - * @throws ApplicationException if the workflow is in terminal state - */ - public void resetCallbacksForWorkflow(String workflowId) { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - if (workflow.getStatus().isTerminal()) { - throw new ApplicationException( - CONFLICT, "Workflow is in terminal state. Status =" + workflow.getStatus()); - } - - // Get SIMPLE tasks in SCHEDULED state that have callbackAfterSeconds > 0 and set the - // callbackAfterSeconds to 0 - workflow.getTasks().stream() - .filter( - task -> - !systemTaskRegistry.isSystemTask(task.getTaskType()) - && SCHEDULED == task.getStatus() - && task.getCallbackAfterSeconds() > 0) - .forEach( - task -> { - if (queueDAO.resetOffsetTime( - QueueUtils.getQueueName(task), task.getTaskId())) { - task.setCallbackAfterSeconds(0); - executionDAOFacade.updateTask(task); - } - }); - } - - public String rerun(RerunWorkflowRequest request) { - Utils.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing"); - if (!rerunWF( - request.getReRunFromWorkflowId(), - request.getReRunFromTaskId(), - request.getTaskInput(), - request.getWorkflowInput(), - request.getCorrelationId())) { - throw new ApplicationException( - INVALID_INPUT, "Task " + request.getReRunFromTaskId() + " not found"); - } - return request.getReRunFromWorkflowId(); - } - - /** - * @param workflowId the id of the workflow to be restarted - * @param useLatestDefinitions if true, use the latest workflow and task definitions upon - * restart - * @throws ApplicationException in the following cases: - *

    - *
  • Workflow is not in a terminal state - *
  • Workflow definition is not found - *
  • Workflow is deemed non-restartable as per workflow definition - *
- */ - public void restart(String workflowId, boolean useLatestDefinitions) { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - if (!workflow.getStatus().isTerminal()) { - String errorMsg = - String.format( - "Workflow: %s is not in terminal state, unable to restart.", workflow); - LOGGER.error(errorMsg); - throw new ApplicationException(CONFLICT, errorMsg); - } - - WorkflowDef workflowDef; - if (useLatestDefinitions) { - workflowDef = - metadataDAO - .getLatestWorkflowDef(workflow.getWorkflowName()) - .orElseThrow( - () -> - new ApplicationException( - NOT_FOUND, - String.format( - "Unable to find latest definition for %s", - workflowId))); - workflow.setWorkflowDefinition(workflowDef); - } else { - workflowDef = - Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElseGet( - () -> - metadataDAO - .getWorkflowDef( - workflow.getWorkflowName(), - workflow.getWorkflowVersion()) - .orElseThrow( - () -> - new ApplicationException( - NOT_FOUND, - String.format( - "Unable to find definition for %s", - workflowId)))); - } - - if (!workflowDef.isRestartable() - && workflow.getStatus() - .equals( - WorkflowModel.Status - .COMPLETED)) { // Can only restart non-completed workflows - // when the configuration is set to false - throw new ApplicationException( - CONFLICT, String.format("Workflow: %s is non-restartable", workflow)); - } - - // Reset the workflow in the primary datastore and remove from indexer; then re-create it - executionDAOFacade.resetWorkflow(workflowId); - - workflow.getTasks().clear(); - workflow.setReasonForIncompletion(null); - workflow.setFailedTaskId(null); - workflow.setCreateTime(System.currentTimeMillis()); - workflow.setEndTime(0); - workflow.setLastRetriedTime(0); - // Change the status to running - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOutput(null); - workflow.setExternalOutputPayloadStoragePath(null); - - try { - executionDAOFacade.createWorkflow(workflow); - } catch (Exception e) { - Monitors.recordWorkflowStartError( - workflowDef.getName(), WorkflowContext.get().getClientApp()); - LOGGER.error("Unable to restart workflow: {}", workflowDef.getName(), e); - terminateWorkflow(workflowId, "Error when restarting the workflow"); - throw e; - } - - decide(workflowId); - - updateAndPushParents(workflow, "restarted"); - } - - /** - * Gets the last instance of each failed task and reschedule each Gets all cancelled tasks and - * schedule all of them except JOIN (join should change status to INPROGRESS) Switch workflow - * back to RUNNING status and call decider. - * - * @param workflowId the id of the workflow to be retried - */ - public void retry(String workflowId, boolean resumeSubworkflowTasks) { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - if (!workflow.getStatus().isTerminal()) { - throw new ApplicationException( - CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); - } - if (workflow.getTasks().isEmpty()) { - throw new ApplicationException(CONFLICT, "Workflow has not started yet"); - } - - if (resumeSubworkflowTasks) { - Optional taskToRetry = - workflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst(); - if (taskToRetry.isPresent()) { - workflow = findLastFailedSubWorkflowIfAny(taskToRetry.get(), workflow); - retry(workflow); - updateAndPushParents(workflow, "retried"); - } - } else { - retry(workflow); - updateAndPushParents(workflow, "retried"); - } - } - - private void updateAndPushParents(WorkflowModel workflow, String operation) { - String workflowIdentifier = ""; - while (workflow.hasParent()) { - // update parent's sub workflow task - TaskModel subWorkflowTask = - executionDAOFacade.getTaskModel(workflow.getParentWorkflowTaskId()); - if (subWorkflowTask.getWorkflowTask().isOptional()) { - // break out - LOGGER.info( - "Sub workflow task {} is optional, skip updating parents", subWorkflowTask); - break; - } - subWorkflowTask.setSubworkflowChanged(true); - subWorkflowTask.setStatus(IN_PROGRESS); - executionDAOFacade.updateTask(subWorkflowTask); - - // add an execution log - String currentWorkflowIdentifier = workflow.toShortString(); - workflowIdentifier = - !workflowIdentifier.equals("") - ? String.format( - "%s -> %s", currentWorkflowIdentifier, workflowIdentifier) - : currentWorkflowIdentifier; - TaskExecLog log = - new TaskExecLog( - String.format("Sub workflow %s %s.", workflowIdentifier, operation)); - log.setTaskId(subWorkflowTask.getTaskId()); - executionDAOFacade.addTaskExecLog(Collections.singletonList(log)); - LOGGER.info("Task {} updated. {}", log.getTaskId(), log.getLog()); - - // push the parent workflow to decider queue for asynchronous 'decide' - String parentWorkflowId = workflow.getParentWorkflowId(); - WorkflowModel parentWorkflow = - executionDAOFacade.getWorkflowModel(parentWorkflowId, true); - parentWorkflow.setStatus(WorkflowModel.Status.RUNNING); - parentWorkflow.setLastRetriedTime(System.currentTimeMillis()); - executionDAOFacade.updateWorkflow(parentWorkflow); - pushParentWorkflow(parentWorkflowId); - - workflow = parentWorkflow; - } - } - - private void retry(WorkflowModel workflow) { - // Get all FAILED or CANCELED tasks that are not COMPLETED (or reach other terminal states) - // on further executions. - // // Eg: for Seq of tasks task1.CANCELED, task1.COMPLETED, task1 shouldn't be retried. - // Throw an exception if there are no FAILED tasks. - // Handle JOIN task CANCELED status as special case. - Map retriableMap = new HashMap<>(); - for (TaskModel task : workflow.getTasks()) { - switch (task.getStatus()) { - case FAILED: - case FAILED_WITH_TERMINAL_ERROR: - case TIMED_OUT: - retriableMap.put(task.getReferenceTaskName(), task); - break; - case CANCELED: - if (task.getTaskType().equalsIgnoreCase(TaskType.JOIN.toString()) - || task.getTaskType().equalsIgnoreCase(TaskType.DO_WHILE.toString())) { - task.setStatus(IN_PROGRESS); - // Task doesn't have to be updated yet. Will be updated along with other - // Workflow tasks downstream. - } else { - retriableMap.put(task.getReferenceTaskName(), task); - } - break; - default: - retriableMap.remove(task.getReferenceTaskName()); - break; - } - } - - // if workflow TIMED_OUT due to timeoutSeconds configured in the workflow definition, - // it may not have any unsuccessful tasks that can be retried - if (retriableMap.values().size() == 0 - && workflow.getStatus() != WorkflowModel.Status.TIMED_OUT) { - throw new ApplicationException( - CONFLICT, - "There are no retryable tasks! Use restart if you want to attempt entire workflow execution again."); - } - - // Update Workflow with new status. - // This should load Workflow from archive, if archived. - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setLastRetriedTime(System.currentTimeMillis()); - String lastReasonForIncompletion = workflow.getReasonForIncompletion(); - workflow.setReasonForIncompletion(null); - // Add to decider queue - queueDAO.push( - DECIDER_QUEUE, - workflow.getWorkflowId(), - workflow.getPriority(), - properties.getWorkflowOffsetTimeout().getSeconds()); - executionDAOFacade.updateWorkflow(workflow); - LOGGER.info( - "Workflow {} that failed due to '{}' was retried", - workflow.toShortString(), - lastReasonForIncompletion); - - // taskToBeRescheduled would set task `retried` to true, and hence it's important to - // updateTasks after obtaining task copy from taskToBeRescheduled. - final WorkflowModel finalWorkflow = workflow; - List retriableTasks = - retriableMap.values().stream() - .sorted(Comparator.comparingInt(TaskModel::getSeq)) - .map(task -> taskToBeRescheduled(finalWorkflow, task)) - .collect(Collectors.toList()); - - dedupAndAddTasks(workflow, retriableTasks); - // Note: updateTasks before updateWorkflow might fail when Workflow is archived and doesn't - // exist in primary store. - executionDAOFacade.updateTasks(workflow.getTasks()); - scheduleTask(workflow, retriableTasks); - } - - private WorkflowModel findLastFailedSubWorkflowIfAny( - TaskModel task, WorkflowModel parentWorkflow) { - if (TaskType.TASK_TYPE_SUB_WORKFLOW.equals(task.getTaskType()) - && UNSUCCESSFUL_TERMINAL_TASK.test(task)) { - WorkflowModel subWorkflow = - executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true); - Optional taskToRetry = - subWorkflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst(); - if (taskToRetry.isPresent()) { - return findLastFailedSubWorkflowIfAny(taskToRetry.get(), subWorkflow); - } - } - return parentWorkflow; - } - - /** - * Reschedule a task - * - * @param task failed or cancelled task - * @return new instance of a task with "SCHEDULED" status - */ - private TaskModel taskToBeRescheduled(WorkflowModel workflow, TaskModel task) { - TaskModel taskToBeRetried = task.copy(); - taskToBeRetried.setTaskId(idGenerator.generate()); - taskToBeRetried.setRetriedTaskId(task.getTaskId()); - taskToBeRetried.setStatus(SCHEDULED); - taskToBeRetried.setRetryCount(task.getRetryCount() + 1); - taskToBeRetried.setRetried(false); - taskToBeRetried.setPollCount(0); - taskToBeRetried.setCallbackAfterSeconds(0); - taskToBeRetried.setSubWorkflowId(null); - taskToBeRetried.setScheduledTime(0); - taskToBeRetried.setStartTime(0); - taskToBeRetried.setEndTime(0); - taskToBeRetried.setWorkerId(null); - taskToBeRetried.setReasonForIncompletion(null); - taskToBeRetried.setSeq(0); - - // perform parameter replacement for retried task - Map taskInput = - parametersUtils.getTaskInput( - taskToBeRetried.getWorkflowTask().getInputParameters(), - workflow, - taskToBeRetried.getWorkflowTask().getTaskDefinition(), - taskToBeRetried.getTaskId()); - taskToBeRetried.getInputData().putAll(taskInput); - - task.setRetried(true); - // since this task is being retried and a retry has been computed, task lifecycle is - // complete - task.setExecuted(true); - return taskToBeRetried; - } - - private void endExecution(WorkflowModel workflow) { - Optional terminateTask = - workflow.getTasks().stream() - .filter( - t -> - TaskType.TERMINATE.name().equals(t.getTaskType()) - && t.getStatus().isTerminal() - && t.getStatus().isSuccessful()) - .findFirst(); - if (terminateTask.isPresent()) { - String terminationStatus = - (String) - terminateTask - .get() - .getWorkflowTask() - .getInputParameters() - .get(Terminate.getTerminationStatusParameter()); - String reason = - (String) - terminateTask - .get() - .getWorkflowTask() - .getInputParameters() - .get(Terminate.getTerminationReasonParameter()); - if (StringUtils.isBlank(reason)) { - reason = - String.format( - "Workflow is %s by TERMINATE task: %s", - terminationStatus, terminateTask.get().getTaskId()); - } - if (WorkflowModel.Status.FAILED.name().equals(terminationStatus)) { - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow = - terminate( - workflow, - new TerminateWorkflowException( - reason, workflow.getStatus(), terminateTask.get())); - } else { - workflow.setReasonForIncompletion(reason); - workflow = completeWorkflow(workflow); - } - } else { - workflow = completeWorkflow(workflow); - } - cancelNonTerminalTasks(workflow); - } - - /** - * @param workflow the workflow to be completed - * @throws ApplicationException if workflow is not in terminal state - */ - @VisibleForTesting - WorkflowModel completeWorkflow(WorkflowModel workflow) { - LOGGER.debug("Completing workflow execution for {}", workflow.getWorkflowId()); - - if (workflow.getStatus().equals(WorkflowModel.Status.COMPLETED)) { - queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); // remove from the sweep queue - executionDAOFacade.removeFromPendingWorkflow( - workflow.getWorkflowName(), workflow.getWorkflowId()); - LOGGER.debug("Workflow: {} has already been completed.", workflow.getWorkflowId()); - return workflow; - } - - if (workflow.getStatus().isTerminal()) { - String msg = - "Workflow is already in terminal state. Current status: " - + workflow.getStatus(); - throw new ApplicationException(CONFLICT, msg); - } - - // FIXME Backwards compatibility for legacy workflows already running. - // This code will be removed in a future version. - if (workflow.getWorkflowDefinition() == null) { - workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); - } - deciderService.updateWorkflowOutput(workflow, null); - - workflow.setStatus(WorkflowModel.Status.COMPLETED); - - // update the failed reference task names - workflow.getFailedReferenceTaskNames() - .addAll( - workflow.getTasks().stream() - .filter( - t -> - FAILED.equals(t.getStatus()) - || FAILED_WITH_TERMINAL_ERROR.equals( - t.getStatus())) - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toSet())); - - executionDAOFacade.updateWorkflow(workflow); - LOGGER.debug("Completed workflow execution for {}", workflow.getWorkflowId()); - workflowStatusListener.onWorkflowCompletedIfEnabled(workflow); - Monitors.recordWorkflowCompletion( - workflow.getWorkflowName(), - workflow.getEndTime() - workflow.getCreateTime(), - workflow.getOwnerApp()); - - if (workflow.hasParent()) { - updateParentWorkflowTask(workflow); - LOGGER.info( - "{} updated parent {} task {}", - workflow.toShortString(), - workflow.getParentWorkflowId(), - workflow.getParentWorkflowTaskId()); - pushParentWorkflow(workflow.getParentWorkflowId()); - } - - executionLockService.releaseLock(workflow.getWorkflowId()); - executionLockService.deleteLock(workflow.getWorkflowId()); - return workflow; - } - - public void terminateWorkflow(String workflowId, String reason) { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - if (WorkflowModel.Status.COMPLETED.equals(workflow.getStatus())) { - throw new ApplicationException(CONFLICT, "Cannot terminate a COMPLETED workflow."); - } - workflow.setStatus(WorkflowModel.Status.TERMINATED); - terminateWorkflow(workflow, reason, null); - } - - /** - * @param workflow the workflow to be terminated - * @param reason the reason for termination - * @param failureWorkflow the failure workflow (if any) to be triggered as a result of this - * termination - */ - public WorkflowModel terminateWorkflow( - WorkflowModel workflow, String reason, String failureWorkflow) { - try { - executionLockService.acquireLock(workflow.getWorkflowId(), 60000); - - if (!workflow.getStatus().isTerminal()) { - workflow.setStatus(WorkflowModel.Status.TERMINATED); - } - - // FIXME Backwards compatibility for legacy workflows already running. - // This code will be removed in a future version. - if (workflow.getWorkflowDefinition() == null) { - workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); - } - - try { - deciderService.updateWorkflowOutput(workflow, null); - } catch (Exception e) { - // catch any failure in this step and continue the execution of terminating workflow - LOGGER.error( - "Failed to update output data for workflow: {}", - workflow.getWorkflowId(), - e); - Monitors.error(CLASS_NAME, "terminateWorkflow"); - } - - // update the failed reference task names - workflow.getFailedReferenceTaskNames() - .addAll( - workflow.getTasks().stream() - .filter( - t -> - FAILED.equals(t.getStatus()) - || FAILED_WITH_TERMINAL_ERROR.equals( - t.getStatus())) - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toSet())); - - String workflowId = workflow.getWorkflowId(); - workflow.setReasonForIncompletion(reason); - executionDAOFacade.updateWorkflow(workflow); - workflowStatusListener.onWorkflowTerminatedIfEnabled(workflow); - Monitors.recordWorkflowTermination( - workflow.getWorkflowName(), workflow.getStatus(), workflow.getOwnerApp()); - LOGGER.info("Workflow {} is terminated because of {}", workflowId, reason); - List tasks = workflow.getTasks(); - try { - // Remove from the task queue if they were there - tasks.forEach( - task -> queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId())); - } catch (Exception e) { - LOGGER.warn( - "Error removing task(s) from queue during workflow termination : {}", - workflowId, - e); - } - - if (workflow.hasParent()) { - updateParentWorkflowTask(workflow); - LOGGER.info( - "{} updated parent {} task {}", - workflow.toShortString(), - workflow.getParentWorkflowId(), - workflow.getParentWorkflowTaskId()); - pushParentWorkflow(workflow.getParentWorkflowId()); - } - - if (!StringUtils.isBlank(failureWorkflow)) { - Map input = new HashMap<>(workflow.getInput()); - input.put("workflowId", workflowId); - input.put("reason", reason); - input.put("failureStatus", workflow.getStatus().toString()); - if (workflow.getFailedTaskId() != null) { - input.put("failureTaskId", workflow.getFailedTaskId()); - } - - try { - WorkflowDef latestFailureWorkflow = - metadataDAO - .getLatestWorkflowDef(failureWorkflow) - .orElseThrow( - () -> - new RuntimeException( - "Failure Workflow Definition not found for: " - + failureWorkflow)); - - String failureWFId = - startWorkflow( - latestFailureWorkflow, - input, - null, - workflow.getCorrelationId(), - null, - workflow.getTaskToDomain()); - - workflow.getOutput().put("conductor.failure_workflow", failureWFId); - } catch (Exception e) { - LOGGER.error("Failed to start error workflow", e); - workflow.getOutput() - .put( - "conductor.failure_workflow", - "Error workflow " - + failureWorkflow - + " failed to start. reason: " - + e.getMessage()); - Monitors.recordWorkflowStartError( - failureWorkflow, WorkflowContext.get().getClientApp()); - } - executionDAOFacade.updateWorkflow(workflow); - } - executionDAOFacade.removeFromPendingWorkflow( - workflow.getWorkflowName(), workflow.getWorkflowId()); - - List erroredTasks = cancelNonTerminalTasks(workflow); - if (!erroredTasks.isEmpty()) { - throw new ApplicationException( - Code.INTERNAL_ERROR, - String.format( - "Error canceling system tasks: %s", - String.join(",", erroredTasks))); - } - return workflow; - } finally { - executionLockService.releaseLock(workflow.getWorkflowId()); - executionLockService.deleteLock(workflow.getWorkflowId()); - } - } - - /** - * @param taskResult the task result to be updated - * @throws ApplicationException - */ - public void updateTask(TaskResult taskResult) { - if (taskResult == null) { - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, "Task object is null"); - } - - String workflowId = taskResult.getWorkflowInstanceId(); - WorkflowModel workflowInstance = executionDAOFacade.getWorkflowModel(workflowId, true); - - // FIXME Backwards compatibility for legacy workflows already running. - // This code will be removed in a future version. - if (workflowInstance.getWorkflowDefinition() == null) { - workflowInstance = - metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); - } - - TaskModel task = - Optional.ofNullable(executionDAOFacade.getTaskModel(taskResult.getTaskId())) - .orElseThrow( - () -> - new ApplicationException( - ApplicationException.Code.NOT_FOUND, - "No such task found by id: " - + taskResult.getTaskId())); - - LOGGER.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance); - - String taskQueueName = QueueUtils.getQueueName(task); - - if (task.getStatus().isTerminal()) { - // Task was already updated.... - queueDAO.remove(taskQueueName, taskResult.getTaskId()); - LOGGER.info( - "Task: {} has already finished execution with status: {} within workflow: {}. Removed task from queue: {}", - task.getTaskId(), - task.getStatus(), - task.getWorkflowInstanceId(), - taskQueueName); - Monitors.recordUpdateConflict( - task.getTaskType(), workflowInstance.getWorkflowName(), task.getStatus()); - return; - } - - if (workflowInstance.getStatus().isTerminal()) { - // Workflow is in terminal state - queueDAO.remove(taskQueueName, taskResult.getTaskId()); - LOGGER.info( - "Workflow: {} has already finished execution. Task update for: {} ignored and removed from Queue: {}.", - workflowInstance, - taskResult.getTaskId(), - taskQueueName); - Monitors.recordUpdateConflict( - task.getTaskType(), - workflowInstance.getWorkflowName(), - workflowInstance.getStatus()); - return; - } - - // for system tasks, setting to SCHEDULED would mean restarting the task which is - // undesirable - // for worker tasks, set status to SCHEDULED and push to the queue - if (!systemTaskRegistry.isSystemTask(task.getTaskType()) - && taskResult.getStatus() == TaskResult.Status.IN_PROGRESS) { - task.setStatus(SCHEDULED); - } else { - task.setStatus(TaskModel.Status.valueOf(taskResult.getStatus().name())); - } - task.setOutputMessage(taskResult.getOutputMessage()); - task.setReasonForIncompletion(taskResult.getReasonForIncompletion()); - task.setWorkerId(taskResult.getWorkerId()); - task.setCallbackAfterSeconds(taskResult.getCallbackAfterSeconds()); - task.setOutputData(taskResult.getOutputData()); - task.setSubWorkflowId(taskResult.getSubWorkflowId()); - - if (StringUtils.isNotBlank(taskResult.getExternalOutputPayloadStoragePath())) { - task.setExternalOutputPayloadStoragePath( - taskResult.getExternalOutputPayloadStoragePath()); - } - - if (task.getStatus().isTerminal()) { - task.setEndTime(System.currentTimeMillis()); - } - - // Update message in Task queue based on Task status - switch (task.getStatus()) { - case COMPLETED: - case CANCELED: - case FAILED: - case FAILED_WITH_TERMINAL_ERROR: - case TIMED_OUT: - try { - queueDAO.remove(taskQueueName, taskResult.getTaskId()); - LOGGER.debug( - "Task: {} removed from taskQueue: {} since the task status is {}", - task, - taskQueueName, - task.getStatus().name()); - } catch (Exception e) { - // Ignore exceptions on queue remove as it wouldn't impact task and workflow - // execution, and will be cleaned up eventually - String errorMsg = - String.format( - "Error removing the message in queue for task: %s for workflow: %s", - task.getTaskId(), workflowId); - LOGGER.warn(errorMsg, e); - Monitors.recordTaskQueueOpError( - task.getTaskType(), workflowInstance.getWorkflowName()); - } - break; - case IN_PROGRESS: - case SCHEDULED: - try { - long callBack = taskResult.getCallbackAfterSeconds(); - queueDAO.postpone( - taskQueueName, task.getTaskId(), task.getWorkflowPriority(), callBack); - LOGGER.debug( - "Task: {} postponed in taskQueue: {} since the task status is {} with callbackAfterSeconds: {}", - task, - taskQueueName, - task.getStatus().name(), - callBack); - } catch (Exception e) { - // Throw exceptions on queue postpone, this would impact task execution - String errorMsg = - String.format( - "Error postponing the message in queue for task: %s for workflow: %s", - task.getTaskId(), workflowId); - LOGGER.error(errorMsg, e); - Monitors.recordTaskQueueOpError( - task.getTaskType(), workflowInstance.getWorkflowName()); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, e); - } - break; - default: - break; - } - - // Throw an ApplicationException if below operations fail to avoid workflow inconsistencies. - try { - executionDAOFacade.updateTask(task); - } catch (Exception e) { - String errorMsg = - String.format( - "Error updating task: %s for workflow: %s", - task.getTaskId(), workflowId); - LOGGER.error(errorMsg, e); - Monitors.recordTaskUpdateError(task.getTaskType(), workflowInstance.getWorkflowName()); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, e); - } - - taskResult.getLogs().forEach(taskExecLog -> taskExecLog.setTaskId(task.getTaskId())); - executionDAOFacade.addTaskExecLog(taskResult.getLogs()); - - if (task.getStatus().isTerminal()) { - long duration = getTaskDuration(0, task); - long lastDuration = task.getEndTime() - task.getStartTime(); - Monitors.recordTaskExecutionTime( - task.getTaskDefName(), duration, true, task.getStatus()); - Monitors.recordTaskExecutionTime( - task.getTaskDefName(), lastDuration, false, task.getStatus()); - } - - decide(workflowId); - } - - public TaskModel getTask(String taskId) { - return Optional.ofNullable(executionDAOFacade.getTaskModel(taskId)) - .map( - task -> { - if (task.getWorkflowTask() != null) { - return metadataMapperService.populateTaskWithDefinition(task); - } - return task; - }) - .orElse(null); - } - - public List getRunningWorkflows(String workflowName, int version) { - return executionDAOFacade.getPendingWorkflowsByName(workflowName, version); - } - - public List getWorkflows(String name, Integer version, Long startTime, Long endTime) { - return executionDAOFacade.getWorkflowsByName(name, startTime, endTime).stream() - .filter(workflow -> workflow.getWorkflowVersion() == version) - .map(Workflow::getWorkflowId) - .collect(Collectors.toList()); - } - - public List getRunningWorkflowIds(String workflowName, int version) { - return executionDAOFacade.getRunningWorkflowIds(workflowName, version); - } - - /** - * @param workflowId ID of the workflow to evaluate the state for - * @return true if the workflow has completed (success or failed), false otherwise. - * @throws ApplicationException If there was an error - caller should retry in this case. - */ - public boolean decide(String workflowId) { - if (!executionLockService.acquireLock(workflowId)) { - return false; - } - - // If it is a new workflow, the tasks will be still empty even though include tasks is true - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - - // FIXME Backwards compatibility for legacy workflows already running. - // This code will be removed in a future version. - workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); - - if (workflow.getStatus().isTerminal()) { - if (!workflow.getStatus().isSuccessful()) { - cancelNonTerminalTasks(workflow); - } - return true; - } - - // we find any sub workflow tasks that have changed - // and change the workflow/task state accordingly - adjustStateIfSubWorkflowChanged(workflow); - - try { - DeciderService.DeciderOutcome outcome = deciderService.decide(workflow); - if (outcome.isComplete) { - endExecution(workflow); - return true; - } - - List tasksToBeScheduled = outcome.tasksToBeScheduled; - setTaskDomains(tasksToBeScheduled, workflow); - List tasksToBeUpdated = outcome.tasksToBeUpdated; - - tasksToBeScheduled = dedupAndAddTasks(workflow, tasksToBeScheduled); - - boolean stateChanged = scheduleTask(workflow, tasksToBeScheduled); // start - - for (TaskModel task : outcome.tasksToBeScheduled) { - if (systemTaskRegistry.isSystemTask(task.getTaskType()) - && NON_TERMINAL_TASK.test(task)) { - WorkflowSystemTask workflowSystemTask = - systemTaskRegistry.get(task.getTaskType()); - if (!workflowSystemTask.isAsync() - && workflowSystemTask.execute(workflow, task, this)) { - tasksToBeUpdated.add(task); - stateChanged = true; - } - } - } - - if (!outcome.tasksToBeUpdated.isEmpty() || !tasksToBeScheduled.isEmpty()) { - executionDAOFacade.updateTasks(tasksToBeUpdated); - executionDAOFacade.updateWorkflow(workflow); - } - - if (stateChanged) { - decide(workflowId); - } - } catch (TerminateWorkflowException twe) { - LOGGER.info("Execution terminated of workflow: {}", workflowId, twe); - terminate(workflow, twe); - return true; - } catch (RuntimeException e) { - LOGGER.error("Error deciding workflow: {}", workflowId, e); - throw e; - } finally { - executionLockService.releaseLock(workflowId); - } - return false; - } - - private void adjustStateIfSubWorkflowChanged(WorkflowModel workflow) { - Optional changedSubWorkflowTask = findChangedSubWorkflowTask(workflow); - if (changedSubWorkflowTask.isPresent()) { - // reset the flag - TaskModel subWorkflowTask = changedSubWorkflowTask.get(); - subWorkflowTask.setSubworkflowChanged(false); - executionDAOFacade.updateTask(subWorkflowTask); - - LOGGER.info( - "{} reset subworkflowChanged flag for {}", - workflow.toShortString(), - subWorkflowTask.getTaskId()); - - // find all terminal and unsuccessful JOIN tasks and set them to IN_PROGRESS - if (workflow.getWorkflowDefinition().containsType(TaskType.TASK_TYPE_JOIN) - || workflow.getWorkflowDefinition() - .containsType(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC)) { - // if we are here, then the SUB_WORKFLOW task could be part of a FORK_JOIN or - // FORK_JOIN_DYNAMIC - // and the JOIN task(s) needs to be evaluated again, set them to IN_PROGRESS - workflow.getTasks().stream() - .filter(UNSUCCESSFUL_JOIN_TASK) - .peek(t -> t.setStatus(TaskModel.Status.IN_PROGRESS)) - .forEach(executionDAOFacade::updateTask); - } - } - } - - private Optional findChangedSubWorkflowTask(WorkflowModel workflow) { - WorkflowDef workflowDef = - Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElseGet( - () -> - metadataDAO - .getWorkflowDef( - workflow.getWorkflowName(), - workflow.getWorkflowVersion()) - .orElseThrow( - () -> - new ApplicationException( - BACKEND_ERROR, - "Workflow Definition is not found"))); - if (workflowDef.containsType(TaskType.TASK_TYPE_SUB_WORKFLOW) - || workflow.getWorkflowDefinition() - .containsType(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC)) { - return workflow.getTasks().stream() - .filter( - t -> - t.getTaskType().equals(TaskType.TASK_TYPE_SUB_WORKFLOW) - && t.isSubworkflowChanged() - && !t.isRetried()) - .findFirst(); - } - return Optional.empty(); - } - - @VisibleForTesting - List cancelNonTerminalTasks(WorkflowModel workflow) { - List erroredTasks = new ArrayList<>(); - // Update non-terminal tasks' status to CANCELED - for (TaskModel task : workflow.getTasks()) { - if (!task.getStatus().isTerminal()) { - // Cancel the ones which are not completed yet.... - task.setStatus(CANCELED); - if (systemTaskRegistry.isSystemTask(task.getTaskType())) { - WorkflowSystemTask workflowSystemTask = - systemTaskRegistry.get(task.getTaskType()); - try { - workflowSystemTask.cancel(workflow, task, this); - } catch (Exception e) { - erroredTasks.add(task.getReferenceTaskName()); - LOGGER.error( - "Error canceling system task:{}/{} in workflow: {}", - workflowSystemTask.getTaskType(), - task.getTaskId(), - workflow.getWorkflowId(), - e); - } - } - executionDAOFacade.updateTask(task); - } - } - if (erroredTasks.isEmpty()) { - try { - workflowStatusListener.onWorkflowFinalizedIfEnabled(workflow); - queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); - } catch (Exception e) { - LOGGER.error( - "Error removing workflow: {} from decider queue", - workflow.getWorkflowId(), - e); - } - } - return erroredTasks; - } - - @VisibleForTesting - List dedupAndAddTasks(WorkflowModel workflow, List tasks) { - List tasksInWorkflow = - workflow.getTasks().stream() - .map(task -> task.getReferenceTaskName() + "_" + task.getRetryCount()) - .collect(Collectors.toList()); - - List dedupedTasks = - tasks.stream() - .filter( - task -> - !tasksInWorkflow.contains( - task.getReferenceTaskName() - + "_" - + task.getRetryCount())) - .collect(Collectors.toList()); - - workflow.getTasks().addAll(dedupedTasks); - return dedupedTasks; - } - - /** - * @throws ApplicationException if the workflow cannot be paused - */ - public void pauseWorkflow(String workflowId) { - try { - executionLockService.acquireLock(workflowId, 60000); - WorkflowModel.Status status = WorkflowModel.Status.PAUSED; - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, false); - if (workflow.getStatus().isTerminal()) { - throw new ApplicationException( - CONFLICT, - "Workflow id " + workflowId + " has ended, status cannot be updated."); - } - if (workflow.getStatus().equals(status)) { - return; // Already paused! - } - workflow.setStatus(status); - executionDAOFacade.updateWorkflow(workflow); - } finally { - executionLockService.releaseLock(workflowId); - } - - // remove from the sweep queue - // any exceptions can be ignored, as this is not critical to the pause operation - try { - queueDAO.remove(DECIDER_QUEUE, workflowId); - } catch (Exception e) { - LOGGER.info( - "[pauseWorkflow] Error removing workflow: {} from decider queue", - workflowId, - e); - } - } - - /** - * @param workflowId the workflow to be resumed - * @throws IllegalStateException if the workflow is not in PAUSED state - */ - public void resumeWorkflow(String workflowId) { - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, false); - if (!workflow.getStatus().equals(WorkflowModel.Status.PAUSED)) { - throw new IllegalStateException( - "The workflow " - + workflowId - + " is not PAUSED so cannot resume. " - + "Current status is " - + workflow.getStatus().name()); - } - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setLastRetriedTime(System.currentTimeMillis()); - // Add to decider queue - queueDAO.push( - DECIDER_QUEUE, - workflow.getWorkflowId(), - workflow.getPriority(), - properties.getWorkflowOffsetTimeout().getSeconds()); - executionDAOFacade.updateWorkflow(workflow); - decide(workflowId); - } - - /** - * @param workflowId the id of the workflow - * @param taskReferenceName the referenceName of the task to be skipped - * @param skipTaskRequest the {@link SkipTaskRequest} object - * @throws IllegalStateException - */ - public void skipTaskFromWorkflow( - String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { - - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - - // FIXME Backwards compatibility for legacy workflows already running. - // This code will be removed in a future version. - workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); - - // If the workflow is not running then cannot skip any task - if (!workflow.getStatus().equals(WorkflowModel.Status.RUNNING)) { - String errorMsg = - String.format( - "The workflow %s is not running so the task referenced by %s cannot be skipped", - workflowId, taskReferenceName); - throw new IllegalStateException(errorMsg); - } - - // Check if the reference name is as per the workflowdef - WorkflowTask workflowTask = - workflow.getWorkflowDefinition().getTaskByRefName(taskReferenceName); - if (workflowTask == null) { - String errorMsg = - String.format( - "The task referenced by %s does not exist in the WorkflowDefinition %s", - taskReferenceName, workflow.getWorkflowName()); - throw new IllegalStateException(errorMsg); - } - - // If the task is already started the again it cannot be skipped - workflow.getTasks() - .forEach( - task -> { - if (task.getReferenceTaskName().equals(taskReferenceName)) { - String errorMsg = - String.format( - "The task referenced %s has already been processed, cannot be skipped", - taskReferenceName); - throw new IllegalStateException(errorMsg); - } - }); - - // Now create a "SKIPPED" task for this workflow - TaskModel taskToBeSkipped = new TaskModel(); - taskToBeSkipped.setTaskId(idGenerator.generate()); - taskToBeSkipped.setReferenceTaskName(taskReferenceName); - taskToBeSkipped.setWorkflowInstanceId(workflowId); - taskToBeSkipped.setWorkflowPriority(workflow.getPriority()); - taskToBeSkipped.setStatus(SKIPPED); - taskToBeSkipped.setTaskType(workflowTask.getName()); - taskToBeSkipped.setCorrelationId(workflow.getCorrelationId()); - if (skipTaskRequest != null) { - taskToBeSkipped.setInputData(skipTaskRequest.getTaskInput()); - taskToBeSkipped.setOutputData(skipTaskRequest.getTaskOutput()); - taskToBeSkipped.setInputMessage(skipTaskRequest.getTaskInputMessage()); - taskToBeSkipped.setOutputMessage(skipTaskRequest.getTaskOutputMessage()); - } - executionDAOFacade.createTasks(Collections.singletonList(taskToBeSkipped)); - decide(workflowId); - } - - public WorkflowModel getWorkflow(String workflowId, boolean includeTasks) { - return executionDAOFacade.getWorkflowModel(workflowId, includeTasks); - } - - public void addTaskToQueue(TaskModel task) { - // put in queue - String taskQueueName = QueueUtils.getQueueName(task); - if (task.getCallbackAfterSeconds() > 0) { - queueDAO.push( - taskQueueName, - task.getTaskId(), - task.getWorkflowPriority(), - task.getCallbackAfterSeconds()); - } else { - queueDAO.push(taskQueueName, task.getTaskId(), task.getWorkflowPriority(), 0); - } - LOGGER.debug( - "Added task {} with priority {} to queue {} with call back seconds {}", - task, - task.getWorkflowPriority(), - taskQueueName, - task.getCallbackAfterSeconds()); - } - - @VisibleForTesting - void setTaskDomains(List tasks, WorkflowModel workflow) { - Map taskToDomain = workflow.getTaskToDomain(); - if (taskToDomain != null) { - // Step 1: Apply * mapping to all tasks, if present. - String domainstr = taskToDomain.get("*"); - if (StringUtils.isNotBlank(domainstr)) { - String[] domains = domainstr.split(","); - tasks.forEach( - task -> { - // Filter out SystemTask - if (!systemTaskRegistry.isSystemTask(task.getTaskType())) { - // Check which domain worker is polling - // Set the task domain - task.setDomain(getActiveDomain(task.getTaskType(), domains)); - } - }); - } - // Step 2: Override additional mappings. - tasks.forEach( - task -> { - if (!systemTaskRegistry.isSystemTask(task.getTaskType())) { - String taskDomainstr = taskToDomain.get(task.getTaskType()); - if (taskDomainstr != null) { - task.setDomain( - getActiveDomain( - task.getTaskType(), taskDomainstr.split(","))); - } - } - }); - } - } - - /** - * Gets the active domain from the list of domains where the task is to be queued. The domain - * list must be ordered. In sequence, check if any worker has polled for last - * `activeWorkerLastPollMs`, if so that is the Active domain. When no active domains are found: - *
  • If NO_DOMAIN token is provided, return null. - *
  • Else, return last domain from list. - * - * @param taskType the taskType of the task for which active domain is to be found - * @param domains the array of domains for the task. (Must contain atleast one element). - * @return the active domain where the task will be queued - */ - @VisibleForTesting - String getActiveDomain(String taskType, String[] domains) { - if (domains == null || domains.length == 0) { - return null; - } - - return Arrays.stream(domains) - .filter(domain -> !domain.equalsIgnoreCase("NO_DOMAIN")) - .map(domain -> executionDAOFacade.getTaskPollDataByDomain(taskType, domain.trim())) - .filter(Objects::nonNull) - .filter(validateLastPolledTime) - .findFirst() - .map(PollData::getDomain) - .orElse( - domains[domains.length - 1].trim().equalsIgnoreCase("NO_DOMAIN") - ? null - : domains[domains.length - 1].trim()); - } - - private long getTaskDuration(long s, TaskModel task) { - long duration = task.getEndTime() - task.getStartTime(); - s += duration; - if (task.getRetriedTaskId() == null) { - return s; - } - return s + getTaskDuration(s, executionDAOFacade.getTaskModel(task.getRetriedTaskId())); - } - - @VisibleForTesting - boolean scheduleTask(WorkflowModel workflow, List tasks) { - List tasksToBeQueued; - boolean startedSystemTasks = false; - - try { - if (tasks == null || tasks.isEmpty()) { - return false; - } - - // Get the highest seq number - int count = workflow.getTasks().stream().mapToInt(TaskModel::getSeq).max().orElse(0); - - for (TaskModel task : tasks) { - if (task.getSeq() == 0) { // Set only if the seq was not set - task.setSeq(++count); - } - } - - // metric to track the distribution of number of tasks within a workflow - Monitors.recordNumTasksInWorkflow( - workflow.getTasks().size() + tasks.size(), - workflow.getWorkflowName(), - String.valueOf(workflow.getWorkflowVersion())); - - // Save the tasks in the DAO - executionDAOFacade.createTasks(tasks); - - List systemTasks = - tasks.stream() - .filter(task -> systemTaskRegistry.isSystemTask(task.getTaskType())) - .collect(Collectors.toList()); - - tasksToBeQueued = - tasks.stream() - .filter(task -> !systemTaskRegistry.isSystemTask(task.getTaskType())) - .collect(Collectors.toList()); - - // Traverse through all the system tasks, start the sync tasks, in case of async queue - // the tasks - for (TaskModel task : systemTasks) { - WorkflowSystemTask workflowSystemTask = systemTaskRegistry.get(task.getTaskType()); - if (workflowSystemTask == null) { - throw new ApplicationException( - NOT_FOUND, "No system task found by name " + task.getTaskType()); - } - if (task.getStatus() != null - && !task.getStatus().isTerminal() - && task.getStartTime() == 0) { - task.setStartTime(System.currentTimeMillis()); - } - if (!workflowSystemTask.isAsync()) { - try { - workflowSystemTask.start(workflow, task, this); - } catch (Exception e) { - String errorMsg = - String.format( - "Unable to start system task: %s, {id: %s, name: %s}", - task.getTaskType(), - task.getTaskId(), - task.getTaskDefName()); - throw new ApplicationException( - ApplicationException.Code.INTERNAL_ERROR, errorMsg, e); - } - startedSystemTasks = true; - executionDAOFacade.updateTask(task); - } else { - tasksToBeQueued.add(task); - } - } - } catch (Exception e) { - List taskIds = - tasks.stream().map(TaskModel::getTaskId).collect(Collectors.toList()); - String errorMsg = - String.format( - "Error scheduling tasks: %s, for workflow: %s", - taskIds, workflow.getWorkflowId()); - LOGGER.error(errorMsg, e); - Monitors.error(CLASS_NAME, "scheduleTask"); - throw new TerminateWorkflowException(errorMsg); - } - - // On addTaskToQueue failures, ignore the exceptions and let WorkflowRepairService take care - // of republishing the messages to the queue. - try { - addTaskToQueue(tasksToBeQueued); - } catch (Exception e) { - List taskIds = - tasksToBeQueued.stream().map(TaskModel::getTaskId).collect(Collectors.toList()); - String errorMsg = - String.format( - "Error pushing tasks to the queue: %s, for workflow: %s", - taskIds, workflow.getWorkflowId()); - LOGGER.warn(errorMsg, e); - Monitors.error(CLASS_NAME, "scheduleTask"); - } - return startedSystemTasks; - } - - private void addTaskToQueue(final List tasks) { - for (TaskModel task : tasks) { - addTaskToQueue(task); - } - } - - private WorkflowModel terminate( - final WorkflowModel workflow, TerminateWorkflowException terminateWorkflowException) { - if (!workflow.getStatus().isTerminal()) { - workflow.setStatus(terminateWorkflowException.getWorkflowStatus()); - } - - if (terminateWorkflowException.getTask() != null && workflow.getFailedTaskId() == null) { - workflow.setFailedTaskId(terminateWorkflowException.getTask().getTaskId()); - } - - String failureWorkflow = workflow.getWorkflowDefinition().getFailureWorkflow(); - if (failureWorkflow != null) { - if (failureWorkflow.startsWith("$")) { - String[] paramPathComponents = failureWorkflow.split("\\."); - String name = paramPathComponents[2]; // name of the input parameter - failureWorkflow = (String) workflow.getInput().get(name); - } - } - if (terminateWorkflowException.getTask() != null) { - executionDAOFacade.updateTask(terminateWorkflowException.getTask()); - } - return terminateWorkflow( - workflow, terminateWorkflowException.getMessage(), failureWorkflow); - } - - private boolean rerunWF( - String workflowId, - String taskId, - Map taskInput, - Map workflowInput, - String correlationId) { - - // Get the workflow - WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true); - if (!workflow.getStatus().isTerminal()) { - String errorMsg = - String.format( - "Workflow: %s is not in terminal state, unable to rerun.", workflow); - LOGGER.error(errorMsg); - throw new ApplicationException(CONFLICT, errorMsg); - } - updateAndPushParents(workflow, "reran"); - - // If the task Id is null it implies that the entire workflow has to be rerun - if (taskId == null) { - // remove all tasks - workflow.getTasks().forEach(task -> executionDAOFacade.removeTask(task.getTaskId())); - // Set workflow as RUNNING - workflow.setStatus(WorkflowModel.Status.RUNNING); - // Reset failure reason from previous run to default - workflow.setReasonForIncompletion(null); - workflow.setFailedTaskId(null); - workflow.setFailedReferenceTaskNames(new HashSet<>()); - if (correlationId != null) { - workflow.setCorrelationId(correlationId); - } - if (workflowInput != null) { - workflow.setInput(workflowInput); - } - - queueDAO.push( - DECIDER_QUEUE, - workflow.getWorkflowId(), - workflow.getPriority(), - properties.getWorkflowOffsetTimeout().getSeconds()); - executionDAOFacade.updateWorkflow(workflow); - - decide(workflowId); - return true; - } - - // Now iterate through the tasks and find the "specific" task - TaskModel rerunFromTask = null; - for (TaskModel task : workflow.getTasks()) { - if (task.getTaskId().equals(taskId)) { - rerunFromTask = task; - break; - } - } - - // If not found look into sub workflows - if (rerunFromTask == null) { - for (TaskModel task : workflow.getTasks()) { - if (task.getTaskType().equalsIgnoreCase(TaskType.TASK_TYPE_SUB_WORKFLOW)) { - String subWorkflowId = task.getSubWorkflowId(); - if (rerunWF(subWorkflowId, taskId, taskInput, null, null)) { - rerunFromTask = task; - break; - } - } - } - } - - if (rerunFromTask != null) { - // set workflow as RUNNING - workflow.setStatus(WorkflowModel.Status.RUNNING); - // Reset failure reason from previous run to default - workflow.setReasonForIncompletion(null); - workflow.setFailedTaskId(null); - workflow.setFailedReferenceTaskNames(new HashSet<>()); - if (correlationId != null) { - workflow.setCorrelationId(correlationId); - } - if (workflowInput != null) { - workflow.setInput(workflowInput); - } - // Add to decider queue - queueDAO.push( - DECIDER_QUEUE, - workflow.getWorkflowId(), - workflow.getPriority(), - properties.getWorkflowOffsetTimeout().getSeconds()); - executionDAOFacade.updateWorkflow(workflow); - // update tasks in datastore to update workflow-tasks relationship for archived - // workflows - executionDAOFacade.updateTasks(workflow.getTasks()); - // Remove all tasks after the "rerunFromTask" - for (TaskModel task : workflow.getTasks()) { - if (task.getSeq() > rerunFromTask.getSeq()) { - executionDAOFacade.removeTask(task.getTaskId()); - } - } - // reset fields before restarting the task - rerunFromTask.setScheduledTime(System.currentTimeMillis()); - rerunFromTask.setStartTime(0); - rerunFromTask.setUpdateTime(0); - rerunFromTask.setEndTime(0); - rerunFromTask.getOutputData().clear(); - rerunFromTask.setRetried(false); - rerunFromTask.setExecuted(false); - rerunFromTask.setExternalOutputPayloadStoragePath(null); - if (rerunFromTask.getTaskType().equalsIgnoreCase(TaskType.TASK_TYPE_SUB_WORKFLOW)) { - // if task is sub workflow set task as IN_PROGRESS and reset start time - rerunFromTask.setStatus(IN_PROGRESS); - rerunFromTask.setStartTime(System.currentTimeMillis()); - } else { - if (taskInput != null) { - rerunFromTask.setInputData(taskInput); - } - if (systemTaskRegistry.isSystemTask(rerunFromTask.getTaskType()) - && !systemTaskRegistry.get(rerunFromTask.getTaskType()).isAsync()) { - // Start the synchronous system task directly - systemTaskRegistry - .get(rerunFromTask.getTaskType()) - .start(workflow, rerunFromTask, this); - } else { - // Set the task to rerun as SCHEDULED - rerunFromTask.setStatus(SCHEDULED); - addTaskToQueue(rerunFromTask); - } - } - executionDAOFacade.updateTask(rerunFromTask); - - decide(workflowId); - return true; - } - return false; - } - - public void scheduleNextIteration(TaskModel loopTask, WorkflowModel workflow) { - // Schedule only first loop over task. Rest will be taken care in Decider Service when this - // task will get completed. - List scheduledLoopOverTasks = - deciderService.getTasksToBeScheduled( - workflow, - loopTask.getWorkflowTask().getLoopOver().get(0), - loopTask.getRetryCount(), - null); - setTaskDomains(scheduledLoopOverTasks, workflow); - scheduledLoopOverTasks.forEach( - t -> { - t.setReferenceTaskName( - TaskUtils.appendIteration( - t.getReferenceTaskName(), loopTask.getIteration())); - t.setIteration(loopTask.getIteration()); - }); - scheduleTask(workflow, scheduledLoopOverTasks); - } - - public TaskDef getTaskDefinition(TaskModel task) { - return task.getTaskDefinition() - .orElseGet( - () -> - Optional.ofNullable( - metadataDAO.getTaskDef( - task.getWorkflowTask().getName())) - .orElseThrow( - () -> { - String reason = - String.format( - "Invalid task specified. Cannot find task by name %s in the task definitions", - task.getWorkflowTask() - .getName()); - return new TerminateWorkflowException(reason); - })); - } - - @VisibleForTesting - void updateParentWorkflowTask(WorkflowModel subWorkflow) { - TaskModel subWorkflowTask = - executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()); - executeSubworkflowTaskAndSyncData(subWorkflow, subWorkflowTask); - executionDAOFacade.updateTask(subWorkflowTask); - } - - private void executeSubworkflowTaskAndSyncData( - WorkflowModel subWorkflow, TaskModel subWorkflowTask) { - WorkflowSystemTask subWorkflowSystemTask = - systemTaskRegistry.get(TaskType.TASK_TYPE_SUB_WORKFLOW); - subWorkflowSystemTask.execute(subWorkflow, subWorkflowTask, this); - } - - /** Pushes parent workflow id into the decider queue with a priority. */ - private void pushParentWorkflow(String parentWorkflowId) { - if (queueDAO.containsMessage(DECIDER_QUEUE, parentWorkflowId)) { - queueDAO.postpone(DECIDER_QUEUE, parentWorkflowId, PARENT_WF_PRIORITY, 0); - } else { - queueDAO.push(DECIDER_QUEUE, parentWorkflowId, PARENT_WF_PRIORITY, 0); - } - - LOGGER.info("Pushed parent workflow {} to {}", parentWorkflowId, DECIDER_QUEUE); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/Evaluator.java b/core/src/main/java/com/netflix/conductor/core/execution/evaluators/Evaluator.java deleted file mode 100644 index 88d01ef7f..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/Evaluator.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.evaluators; - -public interface Evaluator { - /** - * Evaluate the expression using the inputs provided, if required. Evaluation of the expression - * depends on the type of the evaluator. - * - * @param expression Expression to be evaluated. - * @param input Input object to the evaluator to help evaluate the expression. - * @return Return the evaluation result. - */ - Object evaluate(String expression, Object input); -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluator.java b/core/src/main/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluator.java deleted file mode 100644 index 2dd6963d1..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.evaluators; - -import javax.script.ScriptException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.events.ScriptEvaluator; -import com.netflix.conductor.core.exception.TerminateWorkflowException; - -@Component(JavascriptEvaluator.NAME) -public class JavascriptEvaluator implements Evaluator { - - public static final String NAME = "javascript"; - private static final Logger LOGGER = LoggerFactory.getLogger(JavascriptEvaluator.class); - - @Override - public Object evaluate(String expression, Object input) { - LOGGER.debug("Javascript evaluator -- expression: {}", expression); - try { - // Evaluate the expression by using the Javascript evaluation engine. - Object result = ScriptEvaluator.eval(expression, input); - LOGGER.debug("Javascript evaluator -- result: {}", result); - return result; - } catch (ScriptException e) { - String errorMsg = String.format("Error while evaluating script: %s", expression); - LOGGER.error(errorMsg, e); - throw new TerminateWorkflowException(errorMsg); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/ValueParamEvaluator.java b/core/src/main/java/com/netflix/conductor/core/execution/evaluators/ValueParamEvaluator.java deleted file mode 100644 index f1fda6178..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/evaluators/ValueParamEvaluator.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.evaluators; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.exception.TerminateWorkflowException; - -@Component(ValueParamEvaluator.NAME) -public class ValueParamEvaluator implements Evaluator { - - public static final String NAME = "value-param"; - private static final Logger LOGGER = LoggerFactory.getLogger(ValueParamEvaluator.class); - - @SuppressWarnings("unchecked") - @Override - public Object evaluate(String expression, Object input) { - LOGGER.debug("ValueParam evaluator -- evaluating: {}", expression); - if (input instanceof Map) { - Object result = ((Map) input).get(expression); - LOGGER.debug("ValueParam evaluator -- result: {}", result); - return result; - } else { - String errorMsg = String.format("Input has to be a JSON object: %s", input.getClass()); - LOGGER.error(errorMsg); - throw new TerminateWorkflowException(errorMsg); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java deleted file mode 100644 index 45b2add89..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import javax.script.ScriptException; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.events.ScriptEvaluator; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#DECISION} to a List {@link TaskModel} starting with Task of type {@link - * TaskType#DECISION} which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} - * based on the case expression evaluation in the Decision task. - * - * @deprecated {@link com.netflix.conductor.core.execution.tasks.Decision} is also deprecated. Use - * {@link com.netflix.conductor.core.execution.tasks.Switch} and so ${@link SwitchTaskMapper} - * will be used as a result. - */ -@Deprecated -@Component -public class DecisionTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(DecisionTaskMapper.class); - - @Override - public TaskType getTaskType() { - return TaskType.DECISION; - } - - /** - * This method gets the list of tasks that need to scheduled when the task to scheduled is of - * type {@link TaskType#DECISION}. - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return List of tasks in the following order: - *

      - *
    • {@link TaskType#DECISION} with {@link TaskModel.Status#IN_PROGRESS} - *
    • List of task based on the evaluation of {@link WorkflowTask#getCaseExpression()} - * are scheduled. - *
    • In case of no matching result after the evaluation of the {@link - * WorkflowTask#getCaseExpression()}, the {@link WorkflowTask#getDefaultCase()} Tasks - * are scheduled. - *
    - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - LOGGER.debug("TaskMapperContext {} in DecisionTaskMapper", taskMapperContext); - List tasksToBeScheduled = new LinkedList<>(); - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - Map taskInput = taskMapperContext.getTaskInput(); - int retryCount = taskMapperContext.getRetryCount(); - - // get the expression to be evaluated - String caseValue = getEvaluatedCaseValue(workflowTask, taskInput); - - // QQ why is the case value and the caseValue passed and caseOutput passes as the same ?? - TaskModel decisionTask = taskMapperContext.createTaskModel(); - decisionTask.setTaskType(TaskType.TASK_TYPE_DECISION); - decisionTask.setTaskDefName(TaskType.TASK_TYPE_DECISION); - decisionTask.addInput("case", caseValue); - decisionTask.addOutput("caseOutput", Collections.singletonList(caseValue)); - decisionTask.setStartTime(System.currentTimeMillis()); - decisionTask.setStatus(TaskModel.Status.IN_PROGRESS); - tasksToBeScheduled.add(decisionTask); - - // get the list of tasks based on the decision - List selectedTasks = workflowTask.getDecisionCases().get(caseValue); - // if the tasks returned are empty based on evaluated case value, then get the default case - // if there is one - if (selectedTasks == null || selectedTasks.isEmpty()) { - selectedTasks = workflowTask.getDefaultCase(); - } - // once there are selected tasks that need to proceeded as part of the decision, get the - // next task to be scheduled by using the decider service - if (selectedTasks != null && !selectedTasks.isEmpty()) { - WorkflowTask selectedTask = - selectedTasks.get(0); // Schedule the first task to be executed... - // TODO break out this recursive call using function composition of what needs to be - // done and then walk back the condition tree - List caseTasks = - taskMapperContext - .getDeciderService() - .getTasksToBeScheduled( - workflowModel, - selectedTask, - retryCount, - taskMapperContext.getRetryTaskId()); - tasksToBeScheduled.addAll(caseTasks); - decisionTask.addInput("hasChildren", "true"); - } - return tasksToBeScheduled; - } - - /** - * This method evaluates the case expression of a decision task and returns a string - * representation of the evaluated result. - * - * @param workflowTask: The decision task that has the case expression to be evaluated. - * @param taskInput: the input which has the values that will be used in evaluating the case - * expression. - * @return A String representation of the evaluated result - */ - @VisibleForTesting - String getEvaluatedCaseValue(WorkflowTask workflowTask, Map taskInput) { - String expression = workflowTask.getCaseExpression(); - String caseValue; - if (StringUtils.isNotBlank(expression)) { - LOGGER.debug("Case being evaluated using decision expression: {}", expression); - try { - // Evaluate the expression by using the Nashhorn based script evaluator - Object returnValue = ScriptEvaluator.eval(expression, taskInput); - caseValue = (returnValue == null) ? "null" : returnValue.toString(); - } catch (ScriptException e) { - String errorMsg = String.format("Error while evaluating script: %s", expression); - LOGGER.error(errorMsg, e); - throw new TerminateWorkflowException(errorMsg); - } - - } else { // In case of no case expression, get the caseValueParam and treat it as a string - // representation of caseValue - LOGGER.debug( - "No Expression available on the decision task, case value being assigned as param name"); - String paramName = workflowTask.getCaseValueParam(); - caseValue = "" + taskInput.get(paramName); - } - return caseValue; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapper.java deleted file mode 100644 index 51609b919..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapper.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#DO_WHILE} to a {@link TaskModel} of type {@link TaskType#DO_WHILE} - */ -@Component -public class DoWhileTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(DoWhileTaskMapper.class); - - private final MetadataDAO metadataDAO; - - @Autowired - public DoWhileTaskMapper(MetadataDAO metadataDAO) { - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.DO_WHILE; - } - - /** - * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#DO_WHILE} to a {@link TaskModel} of type {@link TaskType#DO_WHILE} with a status of - * {@link TaskModel.Status#IN_PROGRESS} - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return: A {@link TaskModel} of type {@link TaskType#DO_WHILE} in a List - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - LOGGER.debug("TaskMapperContext {} in DoWhileTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - - TaskModel task = workflowModel.getTaskByRefName(workflowTask.getTaskReferenceName()); - if (task != null && task.getStatus().isTerminal()) { - // Since loopTask is already completed no need to schedule task again. - return List.of(); - } - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet( - () -> - Optional.ofNullable( - metadataDAO.getTaskDef( - workflowTask.getName())) - .orElseGet(TaskDef::new)); - - TaskModel doWhileTask = taskMapperContext.createTaskModel(); - doWhileTask.setTaskType(TaskType.TASK_TYPE_DO_WHILE); - doWhileTask.setStatus(TaskModel.Status.IN_PROGRESS); - doWhileTask.setStartTime(System.currentTimeMillis()); - doWhileTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); - doWhileTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds()); - doWhileTask.setRetryCount(taskMapperContext.getRetryCount()); - - return List.of(doWhileTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java deleted file mode 100644 index 506fbbdc6..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#DYNAMIC} to a {@link TaskModel} based on definition derived from the dynamic task name - * defined in {@link WorkflowTask#getInputParameters()} - */ -@Component -public class DynamicTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(DynamicTaskMapper.class); - - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - @Autowired - public DynamicTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.DYNAMIC; - } - - /** - * This method maps a dynamic task to a {@link TaskModel} based on the input params - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return A {@link List} that contains a single {@link TaskModel} with a {@link - * TaskModel.Status#SCHEDULED} - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - LOGGER.debug("TaskMapperContext {} in DynamicTaskMapper", taskMapperContext); - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - Map taskInput = taskMapperContext.getTaskInput(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - int retryCount = taskMapperContext.getRetryCount(); - String retriedTaskId = taskMapperContext.getRetryTaskId(); - - String taskNameParam = workflowTask.getDynamicTaskNameParam(); - String taskName = getDynamicTaskName(taskInput, taskNameParam); - workflowTask.setName(taskName); - TaskDef taskDefinition = getDynamicTaskDefinition(workflowTask); - workflowTask.setTaskDefinition(taskDefinition); - - Map input = - parametersUtils.getTaskInput( - workflowTask.getInputParameters(), - workflowModel, - taskDefinition, - taskMapperContext.getTaskId()); - - // IMPORTANT: The WorkflowTask that is inside TaskMapperContext is changed above - // createTaskModel() must be called here so the changes are reflected in the created - // TaskModel - TaskModel dynamicTask = taskMapperContext.createTaskModel(); - dynamicTask.setStartDelayInSeconds(workflowTask.getStartDelay()); - dynamicTask.setInputData(input); - dynamicTask.setStatus(TaskModel.Status.SCHEDULED); - dynamicTask.setRetryCount(retryCount); - dynamicTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - dynamicTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds()); - dynamicTask.setTaskType(taskName); - dynamicTask.setRetriedTaskId(retriedTaskId); - dynamicTask.setWorkflowPriority(workflowModel.getPriority()); - return Collections.singletonList(dynamicTask); - } - - /** - * Helper method that looks into the input params and returns the dynamic task name - * - * @param taskInput: a map which contains different input parameters and also contains the - * mapping between the dynamic task name param and the actual name representing the dynamic - * task - * @param taskNameParam: the key that is used to look up the dynamic task name. - * @return The name of the dynamic task - * @throws TerminateWorkflowException : In case is there is no value dynamic task name in the - * input parameters. - */ - @VisibleForTesting - String getDynamicTaskName(Map taskInput, String taskNameParam) - throws TerminateWorkflowException { - return Optional.ofNullable(taskInput.get(taskNameParam)) - .map(String::valueOf) - .orElseThrow( - () -> { - String reason = - String.format( - "Cannot map a dynamic task based on the parameter and input. " - + "Parameter= %s, input= %s", - taskNameParam, taskInput); - return new TerminateWorkflowException(reason); - }); - } - - /** - * This method gets the TaskDefinition for a specific {@link WorkflowTask} - * - * @param workflowTask: An instance of {@link WorkflowTask} which has the name of the using - * which the {@link TaskDef} can be retrieved. - * @return An instance of TaskDefinition - * @throws TerminateWorkflowException : in case of no workflow definition available - */ - @VisibleForTesting - TaskDef getDynamicTaskDefinition(WorkflowTask workflowTask) - throws TerminateWorkflowException { // TODO this is a common pattern in code base can - // be moved to DAO - return Optional.ofNullable(workflowTask.getTaskDefinition()) - .orElseGet( - () -> - Optional.ofNullable(metadataDAO.getTaskDef(workflowTask.getName())) - .orElseThrow( - () -> { - String reason = - String.format( - "Invalid task specified. Cannot find task by name %s in the task definitions", - workflowTask.getName()); - return new TerminateWorkflowException(reason); - })); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java deleted file mode 100644 index a581ec17e..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EVENT; - -@Component -public class EventTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(EventTaskMapper.class); - - private final ParametersUtils parametersUtils; - - @Autowired - public EventTaskMapper(ParametersUtils parametersUtils) { - this.parametersUtils = parametersUtils; - } - - @Override - public TaskType getTaskType() { - return TaskType.EVENT; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in EventTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - workflowTask.getInputParameters().put("sink", workflowTask.getSink()); - workflowTask.getInputParameters().put("asyncComplete", workflowTask.isAsyncComplete()); - Map eventTaskInput = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflowModel, taskId, null); - String sink = (String) eventTaskInput.get("sink"); - Boolean asynComplete = (Boolean) eventTaskInput.get("asyncComplete"); - - TaskModel eventTask = taskMapperContext.createTaskModel(); - eventTask.setTaskType(TASK_TYPE_EVENT); - eventTask.setStatus(TaskModel.Status.SCHEDULED); - - eventTask.setInputData(eventTaskInput); - eventTask.getInputData().put("sink", sink); - eventTask.getInputData().put("asyncComplete", asynComplete); - - return List.of(eventTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ExclusiveJoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ExclusiveJoinTaskMapper.java deleted file mode 100644 index 5fdb86ea2..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ExclusiveJoinTaskMapper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.model.TaskModel; - -@Component -public class ExclusiveJoinTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(ExclusiveJoinTaskMapper.class); - - @Override - public TaskType getTaskType() { - return TaskType.EXCLUSIVE_JOIN; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in ExclusiveJoinTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - - Map joinInput = new HashMap<>(); - joinInput.put("joinOn", workflowTask.getJoinOn()); - - if (workflowTask.getDefaultExclusiveJoinTask() != null) { - joinInput.put("defaultExclusiveJoinTask", workflowTask.getDefaultExclusiveJoinTask()); - } - - TaskModel joinTask = taskMapperContext.createTaskModel(); - joinTask.setTaskType(TaskType.TASK_TYPE_EXCLUSIVE_JOIN); - joinTask.setTaskDefName(TaskType.TASK_TYPE_EXCLUSIVE_JOIN); - joinTask.setStartTime(System.currentTimeMillis()); - joinTask.setInputData(joinInput); - joinTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(joinTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java deleted file mode 100644 index 0a013b3b9..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#FORK_JOIN_DYNAMIC} to a LinkedList of {@link TaskModel} beginning with a {@link - * TaskType#TASK_TYPE_FORK}, followed by the user defined dynamic tasks and a {@link TaskType#JOIN} - * at the end - */ -@Component -public class ForkJoinDynamicTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(ForkJoinDynamicTaskMapper.class); - - private final IDGenerator idGenerator; - private final ParametersUtils parametersUtils; - private final ObjectMapper objectMapper; - private final MetadataDAO metadataDAO; - private static final TypeReference> ListOfWorkflowTasks = - new TypeReference<>() {}; - - @Autowired - public ForkJoinDynamicTaskMapper( - IDGenerator idGenerator, - ParametersUtils parametersUtils, - ObjectMapper objectMapper, - MetadataDAO metadataDAO) { - this.idGenerator = idGenerator; - this.parametersUtils = parametersUtils; - this.objectMapper = objectMapper; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.FORK_JOIN_DYNAMIC; - } - - /** - * This method gets the list of tasks that need to scheduled when the task to scheduled is of - * type {@link TaskType#FORK_JOIN_DYNAMIC}. Creates a Fork Task, followed by the Dynamic tasks - * and a final JOIN task. - * - *

    The definitions of the dynamic forks that need to be scheduled are available in the {@link - * WorkflowTask#getInputParameters()} which are accessed using the {@link - * TaskMapperContext#getWorkflowTask()}. The dynamic fork task definitions are referred by a key - * value either by {@link WorkflowTask#getDynamicForkTasksParam()} or by {@link - * WorkflowTask#getDynamicForkJoinTasksParam()} When creating the list of tasks to be scheduled - * a set of preconditions are validated: - * - *

      - *
    • If the input parameter representing the Dynamic fork tasks is available as part of - * {@link WorkflowTask#getDynamicForkTasksParam()} then the input for the dynamic task is - * validated to be a map by using {@link WorkflowTask#getDynamicForkTasksInputParamName()} - *
    • If the input parameter representing the Dynamic fork tasks is available as part of - * {@link WorkflowTask#getDynamicForkJoinTasksParam()} then the input for the dynamic - * tasks is available in the payload of the tasks definition. - *
    • A check is performed that the next following task in the {@link WorkflowDef} is a - * {@link TaskType#JOIN} - *
    - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return List of tasks in the following order: - *
      - *
    • {@link TaskType#TASK_TYPE_FORK} with {@link TaskModel.Status#COMPLETED} - *
    • Might be any kind of task, but this is most cases is a UserDefinedTask with {@link - * TaskModel.Status#SCHEDULED} - *
    • {@link TaskType#JOIN} with {@link TaskModel.Status#IN_PROGRESS} - *
    - * - * @throws TerminateWorkflowException In case of: - *
      - *
    • When the task after {@link TaskType#FORK_JOIN_DYNAMIC} is not a {@link - * TaskType#JOIN} - *
    • When the input parameters for the dynamic tasks are not of type {@link Map} - *
    - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - LOGGER.debug("TaskMapperContext {} in ForkJoinDynamicTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - int retryCount = taskMapperContext.getRetryCount(); - - List mappedTasks = new LinkedList<>(); - // Get the list of dynamic tasks and the input for the tasks - Pair, Map>> workflowTasksAndInputPair = - Optional.ofNullable(workflowTask.getDynamicForkTasksParam()) - .map( - dynamicForkTaskParam -> - getDynamicForkTasksAndInput( - workflowTask, workflowModel, dynamicForkTaskParam)) - .orElseGet( - () -> getDynamicForkJoinTasksAndInput(workflowTask, workflowModel)); - - List dynForkTasks = workflowTasksAndInputPair.getLeft(); - Map> tasksInput = workflowTasksAndInputPair.getRight(); - - // Create Fork Task which needs to be followed by the dynamic tasks - TaskModel forkDynamicTask = createDynamicForkTask(taskMapperContext, dynForkTasks); - - mappedTasks.add(forkDynamicTask); - - List joinOnTaskRefs = new LinkedList<>(); - // Add each dynamic task to the mapped tasks and also get the last dynamic task in the list, - // which indicates that the following task after that needs to be a join task - for (WorkflowTask dynForkTask : - dynForkTasks) { // TODO this is a cyclic dependency, break it out using function - // composition - List forkedTasks = - taskMapperContext - .getDeciderService() - .getTasksToBeScheduled(workflowModel, dynForkTask, retryCount); - - // It's an error state if no forkedTasks can be decided upon. In the cases where we've - // seen - // this happen is when a dynamic task is attempting to be created here, but a task with - // the - // same reference name has already been created in the Workflow. - if (forkedTasks == null || forkedTasks.isEmpty()) { - Optional existingTaskRefName = - workflowModel.getTasks().stream() - .filter( - runningTask -> - runningTask - .getStatus() - .equals( - TaskModel.Status - .IN_PROGRESS) - || runningTask.getStatus().isTerminal()) - .map(TaskModel::getReferenceTaskName) - .filter( - refTaskName -> - refTaskName.equals( - dynForkTask.getTaskReferenceName())) - .findAny(); - - // Construct an informative error message - String terminateMessage = - "No dynamic tasks could be created for the Workflow: " - + workflowModel.toShortString() - + ", Dynamic Fork Task: " - + dynForkTask; - if (existingTaskRefName.isPresent()) { - terminateMessage += - "Attempted to create a duplicate task reference name: " - + existingTaskRefName.get(); - } - throw new TerminateWorkflowException(terminateMessage); - } - - for (TaskModel forkedTask : forkedTasks) { - Map forkedTaskInput = - tasksInput.get(forkedTask.getReferenceTaskName()); - forkedTask.getInputData().putAll(forkedTaskInput); - } - mappedTasks.addAll(forkedTasks); - // Get the last of the dynamic tasks so that the join can be performed once this task is - // done - TaskModel last = forkedTasks.get(forkedTasks.size() - 1); - joinOnTaskRefs.add(last.getReferenceTaskName()); - } - - // From the workflow definition get the next task and make sure that it is a JOIN task. - // The dynamic fork tasks need to be followed by a join task - WorkflowTask joinWorkflowTask = - workflowModel - .getWorkflowDefinition() - .getNextTask(workflowTask.getTaskReferenceName()); - - if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { - throw new TerminateWorkflowException( - "Dynamic join definition is not followed by a join task. Check the workflow definition."); - } - - // Create Join task - HashMap joinInput = new HashMap<>(); - joinInput.put("joinOn", joinOnTaskRefs); - TaskModel joinTask = createJoinTask(workflowModel, joinWorkflowTask, joinInput); - mappedTasks.add(joinTask); - - return mappedTasks; - } - - /** - * This method creates a FORK task and adds the list of dynamic fork tasks keyed by - * "forkedTaskDefs" and their names keyed by "forkedTasks" into {@link TaskModel#getInputData()} - * - * @param taskMapperContext: The {@link TaskMapperContext} which wraps workflowTask, workflowDef - * and workflowModel - * @param dynForkTasks: The list of dynamic forked tasks, the reference names of these tasks - * will be added to the forkDynamicTask - * @return A new instance of {@link TaskModel} representing a {@link TaskType#TASK_TYPE_FORK} - */ - @VisibleForTesting - TaskModel createDynamicForkTask( - TaskMapperContext taskMapperContext, List dynForkTasks) { - TaskModel forkDynamicTask = taskMapperContext.createTaskModel(); - forkDynamicTask.setTaskType(TaskType.TASK_TYPE_FORK); - forkDynamicTask.setTaskDefName(TaskType.TASK_TYPE_FORK); - forkDynamicTask.setStartTime(System.currentTimeMillis()); - forkDynamicTask.setEndTime(System.currentTimeMillis()); - List forkedTaskNames = - dynForkTasks.stream() - .map(WorkflowTask::getTaskReferenceName) - .collect(Collectors.toList()); - forkDynamicTask.getInputData().put("forkedTasks", forkedTaskNames); - forkDynamicTask - .getInputData() - .put( - "forkedTaskDefs", - dynForkTasks); // TODO: Remove this parameter in the later releases - forkDynamicTask.setStatus(TaskModel.Status.COMPLETED); - return forkDynamicTask; - } - - /** - * This method creates a JOIN task that is used in the {@link - * this#getMappedTasks(TaskMapperContext)} at the end to add a join task to be scheduled after - * all the fork tasks - * - * @param workflowModel: A instance of the {@link WorkflowModel} which represents the workflow - * being executed. - * @param joinWorkflowTask: A instance of {@link WorkflowTask} which is of type {@link - * TaskType#JOIN} - * @param joinInput: The input which is set in the {@link TaskModel#setInputData(Map)} - * @return a new instance of {@link TaskModel} representing a {@link TaskType#JOIN} - */ - @VisibleForTesting - TaskModel createJoinTask( - WorkflowModel workflowModel, - WorkflowTask joinWorkflowTask, - HashMap joinInput) { - TaskModel joinTask = new TaskModel(); - joinTask.setTaskType(TaskType.TASK_TYPE_JOIN); - joinTask.setTaskDefName(TaskType.TASK_TYPE_JOIN); - joinTask.setReferenceTaskName(joinWorkflowTask.getTaskReferenceName()); - joinTask.setWorkflowInstanceId(workflowModel.getWorkflowId()); - joinTask.setWorkflowType(workflowModel.getWorkflowName()); - joinTask.setCorrelationId(workflowModel.getCorrelationId()); - joinTask.setScheduledTime(System.currentTimeMillis()); - joinTask.setStartTime(System.currentTimeMillis()); - joinTask.setInputData(joinInput); - joinTask.setTaskId(idGenerator.generate()); - joinTask.setStatus(TaskModel.Status.IN_PROGRESS); - joinTask.setWorkflowTask(joinWorkflowTask); - joinTask.setWorkflowPriority(workflowModel.getPriority()); - return joinTask; - } - - /** - * This method is used to get the List of dynamic workflow tasks and their input based on the - * {@link WorkflowTask#getDynamicForkTasksParam()} - * - * @param workflowTask: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has - * the input parameters - * @param workflowModel: The instance of the {@link WorkflowModel} which represents the workflow - * being executed. - * @param dynamicForkTaskParam: The key representing the dynamic fork join json payload which is - * available in {@link WorkflowTask#getInputParameters()} - * @return a {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} - * and the input for the dynamic fork tasks in {@link Pair#getRight()} - * @throws TerminateWorkflowException : In case of input parameters of the dynamic fork tasks - * not represented as {@link Map} - */ - @SuppressWarnings("unchecked") - @VisibleForTesting - Pair, Map>> getDynamicForkTasksAndInput( - WorkflowTask workflowTask, WorkflowModel workflowModel, String dynamicForkTaskParam) - throws TerminateWorkflowException { - - Map input = - parametersUtils.getTaskInput( - workflowTask.getInputParameters(), workflowModel, null, null); - Object dynamicForkTasksJson = input.get(dynamicForkTaskParam); - List dynamicForkWorkflowTasks = - objectMapper.convertValue(dynamicForkTasksJson, ListOfWorkflowTasks); - if (dynamicForkWorkflowTasks == null) { - dynamicForkWorkflowTasks = new ArrayList<>(); - } - for (WorkflowTask dynamicForkWorkflowTask : dynamicForkWorkflowTasks) { - if ((dynamicForkWorkflowTask.getTaskDefinition() == null) - && StringUtils.isNotBlank(dynamicForkWorkflowTask.getName())) { - dynamicForkWorkflowTask.setTaskDefinition( - metadataDAO.getTaskDef(dynamicForkWorkflowTask.getName())); - } - } - Object dynamicForkTasksInput = input.get(workflowTask.getDynamicForkTasksInputParamName()); - if (!(dynamicForkTasksInput instanceof Map)) { - throw new TerminateWorkflowException( - "Input to the dynamically forked tasks is not a map -> expecting a map of K,V but found " - + dynamicForkTasksInput); - } - return new ImmutablePair<>( - dynamicForkWorkflowTasks, (Map>) dynamicForkTasksInput); - } - - /** - * This method is used to get the List of dynamic workflow tasks and their input based on the - * {@link WorkflowTask#getDynamicForkJoinTasksParam()} - * - *

    NOTE: This method is kept for legacy reasons, new workflows should use the {@link - * #getDynamicForkTasksAndInput} - * - * @param workflowTask: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has - * the input parameters - * @param workflowModel: The instance of the {@link WorkflowModel} which represents the workflow - * being executed. - * @return {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} - * and the input for the dynamic fork tasks in {@link Pair#getRight()} - * @throws TerminateWorkflowException : In case of the {@link WorkflowTask#getInputParameters()} - * does not have a payload that contains the list of the dynamic tasks - */ - @VisibleForTesting - Pair, Map>> getDynamicForkJoinTasksAndInput( - WorkflowTask workflowTask, WorkflowModel workflowModel) - throws TerminateWorkflowException { - String dynamicForkJoinTaskParam = workflowTask.getDynamicForkJoinTasksParam(); - Map input = - parametersUtils.getTaskInput( - workflowTask.getInputParameters(), workflowModel, null, null); - Object paramValue = input.get(dynamicForkJoinTaskParam); - DynamicForkJoinTaskList dynamicForkJoinTaskList = - objectMapper.convertValue(paramValue, DynamicForkJoinTaskList.class); - - if (dynamicForkJoinTaskList == null) { - String reason = - String.format( - "Dynamic tasks could not be created. The value of %s from task's input %s has no dynamic tasks to be scheduled", - dynamicForkJoinTaskParam, input); - LOGGER.error(reason); - throw new TerminateWorkflowException(reason); - } - - Map> dynamicForkJoinTasksInput = new HashMap<>(); - - List dynamicForkJoinWorkflowTasks = - dynamicForkJoinTaskList.getDynamicTasks().stream() - .peek( - dynamicForkJoinTask -> - dynamicForkJoinTasksInput.put( - dynamicForkJoinTask.getReferenceName(), - dynamicForkJoinTask - .getInput())) // TODO create a custom pair - // collector - .map( - dynamicForkJoinTask -> { - WorkflowTask dynamicForkJoinWorkflowTask = new WorkflowTask(); - dynamicForkJoinWorkflowTask.setTaskReferenceName( - dynamicForkJoinTask.getReferenceName()); - dynamicForkJoinWorkflowTask.setName( - dynamicForkJoinTask.getTaskName()); - dynamicForkJoinWorkflowTask.setType( - dynamicForkJoinTask.getType()); - if (dynamicForkJoinWorkflowTask.getTaskDefinition() == null - && StringUtils.isNotBlank( - dynamicForkJoinWorkflowTask.getName())) { - dynamicForkJoinWorkflowTask.setTaskDefinition( - metadataDAO.getTaskDef( - dynamicForkJoinTask.getTaskName())); - } - return dynamicForkJoinWorkflowTask; - }) - .collect(Collectors.toCollection(LinkedList::new)); - - return new ImmutablePair<>(dynamicForkJoinWorkflowTasks, dynamicForkJoinTasksInput); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java deleted file mode 100644 index 755d54a96..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#FORK_JOIN} to a LinkedList of {@link TaskModel} beginning with a completed {@link - * TaskType#TASK_TYPE_FORK}, followed by the user defined fork tasks - */ -@Component -public class ForkJoinTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(ForkJoinTaskMapper.class); - - @Override - public TaskType getTaskType() { - return TaskType.FORK_JOIN; - } - - /** - * This method gets the list of tasks that need to scheduled when the task to scheduled is of - * type {@link TaskType#FORK_JOIN}. - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return List of tasks in the following order: * - *

      - *
    • {@link TaskType#TASK_TYPE_FORK} with {@link TaskModel.Status#COMPLETED} - *
    • Might be any kind of task, but in most cases is a UserDefinedTask with {@link - * TaskModel.Status#SCHEDULED} - *
    - * - * @throws TerminateWorkflowException When the task after {@link TaskType#FORK_JOIN} is not a - * {@link TaskType#JOIN} - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - - LOGGER.debug("TaskMapperContext {} in ForkJoinTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - Map taskInput = taskMapperContext.getTaskInput(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - int retryCount = taskMapperContext.getRetryCount(); - - List tasksToBeScheduled = new LinkedList<>(); - TaskModel forkTask = taskMapperContext.createTaskModel(); - forkTask.setTaskType(TaskType.TASK_TYPE_FORK); - forkTask.setTaskDefName(TaskType.TASK_TYPE_FORK); - long epochMillis = System.currentTimeMillis(); - forkTask.setStartTime(epochMillis); - forkTask.setEndTime(epochMillis); - forkTask.setInputData(taskInput); - forkTask.setStatus(TaskModel.Status.COMPLETED); - - tasksToBeScheduled.add(forkTask); - List> forkTasks = workflowTask.getForkTasks(); - for (List wfts : forkTasks) { - WorkflowTask wft = wfts.get(0); - List tasks2 = - taskMapperContext - .getDeciderService() - .getTasksToBeScheduled(workflowModel, wft, retryCount); - tasksToBeScheduled.addAll(tasks2); - } - - WorkflowTask joinWorkflowTask = - workflowModel - .getWorkflowDefinition() - .getNextTask(workflowTask.getTaskReferenceName()); - - if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { - throw new TerminateWorkflowException( - "Fork task definition is not followed by a join task. Check the blueprint"); - } - return tasksToBeScheduled; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapper.java deleted file mode 100644 index cdedc5c2e..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapper.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#HTTP} to a {@link TaskModel} of type {@link TaskType#HTTP} with {@link - * TaskModel.Status#SCHEDULED} - */ -@Component -public class HTTPTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(HTTPTaskMapper.class); - - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - @Autowired - public HTTPTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.HTTP; - } - - /** - * This method maps a {@link WorkflowTask} of type {@link TaskType#HTTP} to a {@link TaskModel} - * in a {@link TaskModel.Status#SCHEDULED} state - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return a List with just one HTTP task - * @throws TerminateWorkflowException In case if the task definition does not exist - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - - LOGGER.debug("TaskMapperContext {} in HTTPTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - workflowTask.getInputParameters().put("asyncComplete", workflowTask.isAsyncComplete()); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - int retryCount = taskMapperContext.getRetryCount(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); - - Map input = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); - Boolean asynComplete = (Boolean) input.get("asyncComplete"); - - TaskModel httpTask = taskMapperContext.createTaskModel(); - httpTask.setInputData(input); - httpTask.getInputData().put("asyncComplete", asynComplete); - httpTask.setStatus(TaskModel.Status.SCHEDULED); - httpTask.setRetryCount(retryCount); - httpTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - if (Objects.nonNull(taskDefinition)) { - httpTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); - httpTask.setRateLimitFrequencyInSeconds( - taskDefinition.getRateLimitFrequencyInSeconds()); - httpTask.setIsolationGroupId(taskDefinition.getIsolationGroupId()); - httpTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace()); - } - return List.of(httpTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapper.java deleted file mode 100644 index 9a18bfd2f..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapper.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.tasks.Human; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#HUMAN} to a {@link TaskModel} of type {@link Human} with {@link - * TaskModel.Status#IN_PROGRESS} - */ -@Component -public class HumanTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(HumanTaskMapper.class); - - private final ParametersUtils parametersUtils; - - public HumanTaskMapper(ParametersUtils parametersUtils) { - this.parametersUtils = parametersUtils; - } - - @Override - public TaskType getTaskType() { - return TaskType.HUMAN; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - Map humanTaskInput = - parametersUtils.getTaskInputV2( - taskMapperContext.getWorkflowTask().getInputParameters(), - workflowModel, - taskId, - null); - - TaskModel humanTask = taskMapperContext.createTaskModel(); - humanTask.setTaskType(TASK_TYPE_HUMAN); - humanTask.setInputData(humanTaskInput); - humanTask.setStartTime(System.currentTimeMillis()); - humanTask.setStatus(TaskModel.Status.IN_PROGRESS); - return List.of(humanTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapper.java deleted file mode 100644 index 9125f0139..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapper.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#INLINE} to a List {@link TaskModel} starting with Task of type {@link TaskType#INLINE} - * which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} based on the case - * expression evaluation in the Inline task. - */ -@Component -public class InlineTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(InlineTaskMapper.class); - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - public InlineTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.INLINE; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in InlineTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); - - Map taskInput = - parametersUtils.getTaskInputV2( - taskMapperContext.getWorkflowTask().getInputParameters(), - workflowModel, - taskId, - taskDefinition); - - TaskModel inlineTask = taskMapperContext.createTaskModel(); - inlineTask.setTaskType(TaskType.TASK_TYPE_INLINE); - inlineTask.setStartTime(System.currentTimeMillis()); - inlineTask.setInputData(taskInput); - inlineTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(inlineTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java deleted file mode 100644 index 02f2bb866..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#JOIN} to a {@link TaskModel} of type {@link TaskType#JOIN} - */ -@Component -public class JoinTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(JoinTaskMapper.class); - - @Override - public TaskType getTaskType() { - return TaskType.JOIN; - } - - /** - * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#JOIN} to a {@link TaskModel} of type {@link TaskType#JOIN} with a status of {@link - * TaskModel.Status#IN_PROGRESS} - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return A {@link TaskModel} of type {@link TaskType#JOIN} in a List - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in JoinTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - - Map joinInput = new HashMap<>(); - joinInput.put("joinOn", workflowTask.getJoinOn()); - - TaskModel joinTask = taskMapperContext.createTaskModel(); - joinTask.setTaskType(TaskType.TASK_TYPE_JOIN); - joinTask.setTaskDefName(TaskType.TASK_TYPE_JOIN); - joinTask.setStartTime(System.currentTimeMillis()); - joinTask.setInputData(joinInput); - joinTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(joinTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapper.java deleted file mode 100644 index 7aeabd81c..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapper.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -@Component -public class JsonJQTransformTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(JsonJQTransformTaskMapper.class); - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - public JsonJQTransformTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.JSON_JQ_TRANSFORM; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in JsonJQTransformTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); - - Map taskInput = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); - - TaskModel jsonJQTransformTask = taskMapperContext.createTaskModel(); - jsonJQTransformTask.setStartTime(System.currentTimeMillis()); - jsonJQTransformTask.setInputData(taskInput); - jsonJQTransformTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(jsonJQTransformTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapper.java deleted file mode 100644 index de852c4f5..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapper.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -@Component -public class KafkaPublishTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(KafkaPublishTaskMapper.class); - - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - @Autowired - public KafkaPublishTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.KAFKA_PUBLISH; - } - - /** - * This method maps a {@link WorkflowTask} of type {@link TaskType#KAFKA_PUBLISH} to a {@link - * TaskModel} in a {@link TaskModel.Status#SCHEDULED} state - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return a List with just one Kafka task - * @throws TerminateWorkflowException In case if the task definition does not exist - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - - LOGGER.debug("TaskMapperContext {} in KafkaPublishTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - int retryCount = taskMapperContext.getRetryCount(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); - - Map input = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); - - TaskModel kafkaPublishTask = taskMapperContext.createTaskModel(); - kafkaPublishTask.setInputData(input); - kafkaPublishTask.setStatus(TaskModel.Status.SCHEDULED); - kafkaPublishTask.setRetryCount(retryCount); - kafkaPublishTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - if (Objects.nonNull(taskDefinition)) { - kafkaPublishTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace()); - kafkaPublishTask.setIsolationGroupId(taskDefinition.getIsolationGroupId()); - kafkaPublishTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); - kafkaPublishTask.setRateLimitFrequencyInSeconds( - taskDefinition.getRateLimitFrequencyInSeconds()); - } - return Collections.singletonList(kafkaPublishTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapper.java deleted file mode 100644 index 52e08be77..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapper.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * @author x-ultra - * @deprecated {@link com.netflix.conductor.core.execution.tasks.Lambda} is also deprecated. Use - * {@link com.netflix.conductor.core.execution.tasks.Inline} and so ${@link InlineTaskMapper} - * will be used as a result. - */ -@Deprecated -@Component -public class LambdaTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(LambdaTaskMapper.class); - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - public LambdaTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.LAMBDA; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in LambdaTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); - - Map taskInput = - parametersUtils.getTaskInputV2( - taskMapperContext.getWorkflowTask().getInputParameters(), - workflowModel, - taskId, - taskDefinition); - - TaskModel lambdaTask = taskMapperContext.createTaskModel(); - lambdaTask.setTaskType(TaskType.TASK_TYPE_LAMBDA); - lambdaTask.setStartTime(System.currentTimeMillis()); - lambdaTask.setInputData(taskInput); - lambdaTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(lambdaTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapper.java deleted file mode 100644 index a5ba3a443..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapper.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; - -@Component -public class SetVariableTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(SetVariableTaskMapper.class); - - @Override - public TaskType getTaskType() { - return TaskType.SET_VARIABLE; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - LOGGER.debug("TaskMapperContext {} in SetVariableMapper", taskMapperContext); - - TaskModel varTask = taskMapperContext.createTaskModel(); - varTask.setStartTime(System.currentTimeMillis()); - varTask.setInputData(taskMapperContext.getTaskInput()); - varTask.setStatus(TaskModel.Status.IN_PROGRESS); - - return List.of(varTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java deleted file mode 100644 index 6f5a1f991..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#SIMPLE} to a {@link TaskModel} with status {@link TaskModel.Status#SCHEDULED}. - * NOTE: There is not type defined for simples task. - */ -@Component -public class SimpleTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(SimpleTaskMapper.class); - private final ParametersUtils parametersUtils; - - public SimpleTaskMapper(ParametersUtils parametersUtils) { - this.parametersUtils = parametersUtils; - } - - @Override - public TaskType getTaskType() { - return TaskType.SIMPLE; - } - - /** - * This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE} to a {@link - * TaskModel} - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @throws TerminateWorkflowException In case if the task definition does not exist - * @return a List with just one simple task - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - - LOGGER.debug("TaskMapperContext {} in SimpleTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - int retryCount = taskMapperContext.getRetryCount(); - String retriedTaskId = taskMapperContext.getRetryTaskId(); - - TaskDef taskDefinition = - Optional.ofNullable(workflowTask.getTaskDefinition()) - .orElseThrow( - () -> { - String reason = - String.format( - "Invalid task. Task %s does not have a definition", - workflowTask.getName()); - return new TerminateWorkflowException(reason); - }); - - Map input = - parametersUtils.getTaskInput( - workflowTask.getInputParameters(), - workflowModel, - taskDefinition, - taskMapperContext.getTaskId()); - TaskModel simpleTask = taskMapperContext.createTaskModel(); - simpleTask.setTaskType(workflowTask.getName()); - simpleTask.setStartDelayInSeconds(workflowTask.getStartDelay()); - simpleTask.setInputData(input); - simpleTask.setStatus(TaskModel.Status.SCHEDULED); - simpleTask.setRetryCount(retryCount); - simpleTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - simpleTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds()); - simpleTask.setRetriedTaskId(retriedTaskId); - simpleTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); - simpleTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds()); - return List.of(simpleTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/StartWorkflowTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/StartWorkflowTaskMapper.java deleted file mode 100644 index 30f04b9be..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/StartWorkflowTaskMapper.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.START_WORKFLOW; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_START_WORKFLOW; - -@Component -public class StartWorkflowTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(StartWorkflowTaskMapper.class); - - @Override - public TaskType getTaskType() { - return START_WORKFLOW; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - - TaskModel startWorkflowTask = taskMapperContext.createTaskModel(); - startWorkflowTask.setTaskType(TASK_TYPE_START_WORKFLOW); - startWorkflowTask.addInput(taskMapperContext.getTaskInput()); - startWorkflowTask.setStatus(TaskModel.Status.SCHEDULED); - startWorkflowTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - LOGGER.debug("{} created", startWorkflowTask); - return List.of(startWorkflowTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java deleted file mode 100644 index 167cd3aef..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; - -@Component -public class SubWorkflowTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(SubWorkflowTaskMapper.class); - - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - public SubWorkflowTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.SUB_WORKFLOW; - } - - @SuppressWarnings("rawtypes") - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - LOGGER.debug("TaskMapperContext {} in SubWorkflowTaskMapper", taskMapperContext); - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - // Check if there are sub workflow parameters, if not throw an exception, cannot initiate a - // sub-workflow without workflow params - SubWorkflowParams subWorkflowParams = getSubWorkflowParams(workflowTask); - - Map resolvedParams = - getSubWorkflowInputParameters(workflowModel, subWorkflowParams); - - String subWorkflowName = resolvedParams.get("name").toString(); - Integer subWorkflowVersion = getSubWorkflowVersion(resolvedParams, subWorkflowName); - - Object subWorkflowDefinition = resolvedParams.get("workflowDefinition"); - - Map subWorkflowTaskToDomain = null; - Object uncheckedTaskToDomain = resolvedParams.get("taskToDomain"); - if (uncheckedTaskToDomain instanceof Map) { - subWorkflowTaskToDomain = (Map) uncheckedTaskToDomain; - } - - TaskModel subWorkflowTask = taskMapperContext.createTaskModel(); - subWorkflowTask.setTaskType(TASK_TYPE_SUB_WORKFLOW); - subWorkflowTask.addInput("subWorkflowName", subWorkflowName); - subWorkflowTask.addInput("subWorkflowVersion", subWorkflowVersion); - subWorkflowTask.addInput("subWorkflowTaskToDomain", subWorkflowTaskToDomain); - subWorkflowTask.addInput("subWorkflowDefinition", subWorkflowDefinition); - subWorkflowTask.addInput("workflowInput", taskMapperContext.getTaskInput()); - subWorkflowTask.setStatus(TaskModel.Status.SCHEDULED); - subWorkflowTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - LOGGER.debug("SubWorkflowTask {} created to be Scheduled", subWorkflowTask); - return List.of(subWorkflowTask); - } - - @VisibleForTesting - SubWorkflowParams getSubWorkflowParams(WorkflowTask workflowTask) { - return Optional.ofNullable(workflowTask.getSubWorkflowParam()) - .orElseThrow( - () -> { - String reason = - String.format( - "Task %s is defined as sub-workflow and is missing subWorkflowParams. " - + "Please check the workflow definition", - workflowTask.getName()); - LOGGER.error(reason); - return new TerminateWorkflowException(reason); - }); - } - - private Map getSubWorkflowInputParameters( - WorkflowModel workflowModel, SubWorkflowParams subWorkflowParams) { - Map params = new HashMap<>(); - params.put("name", subWorkflowParams.getName()); - - Integer version = subWorkflowParams.getVersion(); - if (version != null) { - params.put("version", version); - } - Map taskToDomain = subWorkflowParams.getTaskToDomain(); - if (taskToDomain != null) { - params.put("taskToDomain", taskToDomain); - } - - params = parametersUtils.getTaskInputV2(params, workflowModel, null, null); - - // do not resolve params inside subworkflow definition - Object subWorkflowDefinition = subWorkflowParams.getWorkflowDefinition(); - if (subWorkflowDefinition != null) { - params.put("workflowDefinition", subWorkflowDefinition); - } - - return params; - } - - private Integer getSubWorkflowVersion( - Map resolvedParams, String subWorkflowName) { - return Optional.ofNullable(resolvedParams.get("version")) - .map(Object::toString) - .map(Integer::parseInt) - .orElseGet( - () -> - metadataDAO - .getLatestWorkflowDef(subWorkflowName) - .map(WorkflowDef::getVersion) - .orElseThrow( - () -> { - String reason = - String.format( - "The Task %s defined as a sub-workflow has no workflow definition available ", - subWorkflowName); - LOGGER.error(reason); - return new TerminateWorkflowException(reason); - })); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapper.java deleted file mode 100644 index 6f860fd86..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapper.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#SWITCH} to a List {@link TaskModel} starting with Task of type {@link TaskType#SWITCH} - * which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} based on the case - * expression evaluation in the Switch task. - */ -@Component -public class SwitchTaskMapper implements TaskMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(SwitchTaskMapper.class); - - private final Map evaluators; - - @Autowired - public SwitchTaskMapper(Map evaluators) { - this.evaluators = evaluators; - } - - @Override - public TaskType getTaskType() { - return TaskType.SWITCH; - } - - /** - * This method gets the list of tasks that need to scheduled when the task to scheduled is of - * type {@link TaskType#SWITCH}. - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return List of tasks in the following order: - *

      - *
    • {@link TaskType#SWITCH} with {@link TaskModel.Status#IN_PROGRESS} - *
    • List of tasks based on the evaluation of {@link WorkflowTask#getEvaluatorType()} - * and {@link WorkflowTask#getExpression()} are scheduled. - *
    • In the case of no matching {@link WorkflowTask#getEvaluatorType()}, workflow will - * be terminated with error message. In case of no matching result after the - * evaluation of the {@link WorkflowTask#getExpression()}, the {@link - * WorkflowTask#getDefaultCase()} Tasks are scheduled. - *
    - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - LOGGER.debug("TaskMapperContext {} in SwitchTaskMapper", taskMapperContext); - List tasksToBeScheduled = new LinkedList<>(); - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - Map taskInput = taskMapperContext.getTaskInput(); - int retryCount = taskMapperContext.getRetryCount(); - - // get the expression to be evaluated - String evaluatorType = workflowTask.getEvaluatorType(); - Evaluator evaluator = evaluators.get(evaluatorType); - if (evaluator == null) { - String errorMsg = String.format("No evaluator registered for type: %s", evaluatorType); - LOGGER.error(errorMsg); - throw new TerminateWorkflowException(errorMsg); - } - String evalResult = "" + evaluator.evaluate(workflowTask.getExpression(), taskInput); - - // QQ why is the case value and the caseValue passed and caseOutput passes as the same ?? - TaskModel switchTask = taskMapperContext.createTaskModel(); - switchTask.setTaskType(TaskType.TASK_TYPE_SWITCH); - switchTask.setTaskDefName(TaskType.TASK_TYPE_SWITCH); - switchTask.getInputData().put("case", evalResult); - switchTask.getOutputData().put("evaluationResult", Collections.singletonList(evalResult)); - switchTask.setStartTime(System.currentTimeMillis()); - switchTask.setStatus(TaskModel.Status.IN_PROGRESS); - tasksToBeScheduled.add(switchTask); - - // get the list of tasks based on the evaluated expression - List selectedTasks = workflowTask.getDecisionCases().get(evalResult); - // if the tasks returned are empty based on evaluated result, then get the default case if - // there is one - if (selectedTasks == null || selectedTasks.isEmpty()) { - selectedTasks = workflowTask.getDefaultCase(); - } - // once there are selected tasks that need to proceeded as part of the switch, get the next - // task to be scheduled by using the decider service - if (selectedTasks != null && !selectedTasks.isEmpty()) { - WorkflowTask selectedTask = - selectedTasks.get(0); // Schedule the first task to be executed... - // TODO break out this recursive call using function composition of what needs to be - // done and then walk back the condition tree - List caseTasks = - taskMapperContext - .getDeciderService() - .getTasksToBeScheduled( - workflowModel, - selectedTask, - retryCount, - taskMapperContext.getRetryTaskId()); - tasksToBeScheduled.addAll(caseTasks); - switchTask.getInputData().put("hasChildren", "true"); - } - return tasksToBeScheduled; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapper.java deleted file mode 100644 index 016fe231b..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapper.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; - -public interface TaskMapper { - - TaskType getTaskType(); - - List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException; -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java deleted file mode 100644 index a34c4a0e4..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Map; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** Business Object class used for interaction between the DeciderService and Different Mappers */ -public class TaskMapperContext { - - private final WorkflowModel workflowModel; - private final TaskDef taskDefinition; - private final WorkflowTask workflowTask; - private final Map taskInput; - private final int retryCount; - private final String retryTaskId; - private final String taskId; - private final DeciderService deciderService; - - private TaskMapperContext(Builder builder) { - workflowModel = builder.workflowModel; - taskDefinition = builder.taskDefinition; - workflowTask = builder.workflowTask; - taskInput = builder.taskInput; - retryCount = builder.retryCount; - retryTaskId = builder.retryTaskId; - taskId = builder.taskId; - deciderService = builder.deciderService; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public static Builder newBuilder(TaskMapperContext copy) { - Builder builder = new Builder(); - builder.workflowModel = copy.getWorkflowModel(); - builder.taskDefinition = copy.getTaskDefinition(); - builder.workflowTask = copy.getWorkflowTask(); - builder.taskInput = copy.getTaskInput(); - builder.retryCount = copy.getRetryCount(); - builder.retryTaskId = copy.getRetryTaskId(); - builder.taskId = copy.getTaskId(); - builder.deciderService = copy.getDeciderService(); - return builder; - } - - public WorkflowDef getWorkflowDefinition() { - return workflowModel.getWorkflowDefinition(); - } - - public WorkflowModel getWorkflowModel() { - return workflowModel; - } - - public TaskDef getTaskDefinition() { - return taskDefinition; - } - - public WorkflowTask getWorkflowTask() { - return workflowTask; - } - - public int getRetryCount() { - return retryCount; - } - - public String getRetryTaskId() { - return retryTaskId; - } - - public String getTaskId() { - return taskId; - } - - public Map getTaskInput() { - return taskInput; - } - - public DeciderService getDeciderService() { - return deciderService; - } - - public TaskModel createTaskModel() { - TaskModel taskModel = new TaskModel(); - taskModel.setReferenceTaskName(workflowTask.getTaskReferenceName()); - taskModel.setWorkflowInstanceId(workflowModel.getWorkflowId()); - taskModel.setWorkflowType(workflowModel.getWorkflowName()); - taskModel.setCorrelationId(workflowModel.getCorrelationId()); - taskModel.setScheduledTime(System.currentTimeMillis()); - - taskModel.setTaskId(taskId); - taskModel.setWorkflowTask(workflowTask); - taskModel.setWorkflowPriority(workflowModel.getPriority()); - - // the following properties are overridden by some TaskMapper implementations - taskModel.setTaskType(workflowTask.getType()); - taskModel.setTaskDefName(workflowTask.getName()); - return taskModel; - } - - @Override - public String toString() { - return "TaskMapperContext{" - + "workflowDefinition=" - + getWorkflowDefinition() - + ", workflowModel=" - + workflowModel - + ", workflowTask=" - + workflowTask - + ", taskInput=" - + taskInput - + ", retryCount=" - + retryCount - + ", retryTaskId='" - + retryTaskId - + '\'' - + ", taskId='" - + taskId - + '\'' - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof TaskMapperContext)) { - return false; - } - - TaskMapperContext that = (TaskMapperContext) o; - - if (getRetryCount() != that.getRetryCount()) { - return false; - } - if (!getWorkflowDefinition().equals(that.getWorkflowDefinition())) { - return false; - } - if (!getWorkflowModel().equals(that.getWorkflowModel())) { - return false; - } - if (!getWorkflowTask().equals(that.getWorkflowTask())) { - return false; - } - if (!getTaskInput().equals(that.getTaskInput())) { - return false; - } - if (getRetryTaskId() != null - ? !getRetryTaskId().equals(that.getRetryTaskId()) - : that.getRetryTaskId() != null) { - return false; - } - return getTaskId().equals(that.getTaskId()); - } - - @Override - public int hashCode() { - int result = getWorkflowDefinition().hashCode(); - result = 31 * result + getWorkflowModel().hashCode(); - result = 31 * result + getWorkflowTask().hashCode(); - result = 31 * result + getTaskInput().hashCode(); - result = 31 * result + getRetryCount(); - result = 31 * result + (getRetryTaskId() != null ? getRetryTaskId().hashCode() : 0); - result = 31 * result + getTaskId().hashCode(); - return result; - } - - /** {@code TaskMapperContext} builder static inner class. */ - public static final class Builder { - - private WorkflowModel workflowModel; - private TaskDef taskDefinition; - private WorkflowTask workflowTask; - private Map taskInput; - private int retryCount; - private String retryTaskId; - private String taskId; - private DeciderService deciderService; - - private Builder() {} - - /** - * Sets the {@code workflowModel} and returns a reference to this Builder so that the - * methods can be chained together. - * - * @param val the {@code workflowModel} to set - * @return a reference to this Builder - */ - public Builder withWorkflowModel(WorkflowModel val) { - workflowModel = val; - return this; - } - - /** - * Sets the {@code taskDefinition} and returns a reference to this Builder so that the - * methods can be chained together. - * - * @param val the {@code taskDefinition} to set - * @return a reference to this Builder - */ - public Builder withTaskDefinition(TaskDef val) { - taskDefinition = val; - return this; - } - - /** - * Sets the {@code workflowTask} and returns a reference to this Builder so that the methods - * can be chained together. - * - * @param val the {@code workflowTask} to set - * @return a reference to this Builder - */ - public Builder withWorkflowTask(WorkflowTask val) { - workflowTask = val; - return this; - } - - /** - * Sets the {@code taskInput} and returns a reference to this Builder so that the methods - * can be chained together. - * - * @param val the {@code taskInput} to set - * @return a reference to this Builder - */ - public Builder withTaskInput(Map val) { - taskInput = val; - return this; - } - - /** - * Sets the {@code retryCount} and returns a reference to this Builder so that the methods - * can be chained together. - * - * @param val the {@code retryCount} to set - * @return a reference to this Builder - */ - public Builder withRetryCount(int val) { - retryCount = val; - return this; - } - - /** - * Sets the {@code retryTaskId} and returns a reference to this Builder so that the methods - * can be chained together. - * - * @param val the {@code retryTaskId} to set - * @return a reference to this Builder - */ - public Builder withRetryTaskId(String val) { - retryTaskId = val; - return this; - } - - /** - * Sets the {@code taskId} and returns a reference to this Builder so that the methods can - * be chained together. - * - * @param val the {@code taskId} to set - * @return a reference to this Builder - */ - public Builder withTaskId(String val) { - taskId = val; - return this; - } - - /** - * Sets the {@code deciderService} and returns a reference to this Builder so that the - * methods can be chained together. - * - * @param val the {@code deciderService} to set - * @return a reference to this Builder - */ - public Builder withDeciderService(DeciderService val) { - deciderService = val; - return this; - } - - /** - * Returns a {@code TaskMapperContext} built from the parameters previously set. - * - * @return a {@code TaskMapperContext} built with parameters of this {@code - * TaskMapperContext.Builder} - */ - public TaskMapperContext build() { - return new TaskMapperContext(this); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapper.java deleted file mode 100644 index e5fcf5c2b..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapper.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_TERMINATE; - -@Component -public class TerminateTaskMapper implements TaskMapper { - - public static final Logger logger = LoggerFactory.getLogger(TerminateTaskMapper.class); - private final ParametersUtils parametersUtils; - - public TerminateTaskMapper(ParametersUtils parametersUtils) { - this.parametersUtils = parametersUtils; - } - - @Override - public TaskType getTaskType() { - return TaskType.TERMINATE; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - logger.debug("TaskMapperContext {} in TerminateTaskMapper", taskMapperContext); - - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - Map taskInput = - parametersUtils.getTaskInputV2( - taskMapperContext.getWorkflowTask().getInputParameters(), - workflowModel, - taskId, - null); - - TaskModel task = taskMapperContext.createTaskModel(); - task.setTaskType(TASK_TYPE_TERMINATE); - task.setStartTime(System.currentTimeMillis()); - task.setInputData(taskInput); - task.setStatus(TaskModel.Status.IN_PROGRESS); - return List.of(task); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java deleted file mode 100644 index c7919d7dd..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#USER_DEFINED} to a {@link TaskModel} of type {@link TaskType#USER_DEFINED} with {@link - * TaskModel.Status#SCHEDULED} - */ -@Component -public class UserDefinedTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(UserDefinedTaskMapper.class); - - private final ParametersUtils parametersUtils; - private final MetadataDAO metadataDAO; - - public UserDefinedTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; - } - - @Override - public TaskType getTaskType() { - return TaskType.USER_DEFINED; - } - - /** - * This method maps a {@link WorkflowTask} of type {@link TaskType#USER_DEFINED} to a {@link - * TaskModel} in a {@link TaskModel.Status#SCHEDULED} state - * - * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link - * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId - * @return a List with just one User defined task - * @throws TerminateWorkflowException In case if the task definition does not exist - */ - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) - throws TerminateWorkflowException { - - LOGGER.debug("TaskMapperContext {} in UserDefinedTaskMapper", taskMapperContext); - - WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - int retryCount = taskMapperContext.getRetryCount(); - - TaskDef taskDefinition = - Optional.ofNullable(taskMapperContext.getTaskDefinition()) - .orElseGet( - () -> - Optional.ofNullable( - metadataDAO.getTaskDef( - workflowTask.getName())) - .orElseThrow( - () -> { - String reason = - String.format( - "Invalid task specified. Cannot find task by name %s in the task definitions", - workflowTask.getName()); - return new TerminateWorkflowException( - reason); - })); - - Map input = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); - - TaskModel userDefinedTask = taskMapperContext.createTaskModel(); - userDefinedTask.setInputData(input); - userDefinedTask.setStatus(TaskModel.Status.SCHEDULED); - userDefinedTask.setRetryCount(retryCount); - userDefinedTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); - userDefinedTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); - userDefinedTask.setRateLimitFrequencyInSeconds( - taskDefinition.getRateLimitFrequencyInSeconds()); - - return List.of(userDefinedTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java deleted file mode 100644 index 378d56bd9..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.tasks.Wait; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; - -/** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link - * TaskType#WAIT} to a {@link TaskModel} of type {@link Wait} with {@link - * TaskModel.Status#IN_PROGRESS} - */ -@Component -public class WaitTaskMapper implements TaskMapper { - - public static final Logger LOGGER = LoggerFactory.getLogger(WaitTaskMapper.class); - - private final ParametersUtils parametersUtils; - - public WaitTaskMapper(ParametersUtils parametersUtils) { - this.parametersUtils = parametersUtils; - } - - @Override - public TaskType getTaskType() { - return TaskType.WAIT; - } - - @Override - public List getMappedTasks(TaskMapperContext taskMapperContext) { - - LOGGER.debug("TaskMapperContext {} in WaitTaskMapper", taskMapperContext); - - WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); - String taskId = taskMapperContext.getTaskId(); - - Map waitTaskInput = - parametersUtils.getTaskInputV2( - taskMapperContext.getWorkflowTask().getInputParameters(), - workflowModel, - taskId, - null); - - TaskModel waitTask = taskMapperContext.createTaskModel(); - waitTask.setTaskType(TASK_TYPE_WAIT); - waitTask.setInputData(waitTaskInput); - waitTask.setStartTime(System.currentTimeMillis()); - waitTask.setStatus(TaskModel.Status.IN_PROGRESS); - return List.of(waitTask); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Decision.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Decision.java deleted file mode 100644 index 0a96845f7..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Decision.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DECISION; - -/** - * @deprecated {@link Decision} is deprecated. Use {@link Switch} task for condition evaluation - * using the extensible evaluation framework. Also see ${@link - * com.netflix.conductor.common.metadata.workflow.WorkflowTask}). - */ -@Deprecated -@Component(TASK_TYPE_DECISION) -public class Decision extends WorkflowSystemTask { - - public Decision() { - super(TASK_TYPE_DECISION); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - task.setStatus(TaskModel.Status.COMPLETED); - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/DoWhile.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/DoWhile.java deleted file mode 100644 index af9bedc76..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/DoWhile.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.*; -import java.util.stream.Collectors; - -import javax.script.ScriptException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.events.ScriptEvaluator; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE; - -@Component(TASK_TYPE_DO_WHILE) -public class DoWhile extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(DoWhile.class); - - private final ParametersUtils parametersUtils; - - public DoWhile(ParametersUtils parametersUtils) { - super(TASK_TYPE_DO_WHILE); - this.parametersUtils = parametersUtils; - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - task.setStatus(TaskModel.Status.CANCELED); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel doWhileTaskModel, WorkflowExecutor workflowExecutor) { - - boolean allDone = true; - boolean hasFailures = false; - StringBuilder failureReason = new StringBuilder(); - Map output = new HashMap<>(); - - /* - * Get the latest set of tasks (the ones that have the highest retry count). We don't want to evaluate any tasks - * that have already failed if there is a more current one (a later retry count). - */ - Map relevantTasks = new LinkedHashMap<>(); - TaskModel relevantTask; - for (TaskModel t : workflow.getTasks()) { - if (doWhileTaskModel - .getWorkflowTask() - .has(TaskUtils.removeIterationFromTaskRefName(t.getReferenceTaskName())) - && !doWhileTaskModel.getReferenceTaskName().equals(t.getReferenceTaskName()) - && doWhileTaskModel.getIteration() == t.getIteration()) { - relevantTask = relevantTasks.get(t.getReferenceTaskName()); - if (relevantTask == null || t.getRetryCount() > relevantTask.getRetryCount()) { - relevantTasks.put(t.getReferenceTaskName(), t); - } - } - } - Collection loopOverTasks = relevantTasks.values(); - LOGGER.debug( - "Workflow {} waiting for tasks {} to complete iteration {}", - workflow.getWorkflowId(), - loopOverTasks.stream() - .map(TaskModel::getReferenceTaskName) - .collect(Collectors.toList()), - doWhileTaskModel.getIteration()); - - // if the loopOver collection is empty, no tasks inside the loop have been scheduled. - // so schedule it and exit the method. - if (loopOverTasks.isEmpty()) { - doWhileTaskModel.setIteration(1); - doWhileTaskModel.addOutput("iteration", doWhileTaskModel.getIteration()); - return scheduleNextIteration(doWhileTaskModel, workflow, workflowExecutor); - } - - for (TaskModel loopOverTask : loopOverTasks) { - TaskModel.Status taskStatus = loopOverTask.getStatus(); - hasFailures = !taskStatus.isSuccessful(); - if (hasFailures) { - failureReason.append(loopOverTask.getReasonForIncompletion()).append(" "); - } - output.put( - TaskUtils.removeIterationFromTaskRefName(loopOverTask.getReferenceTaskName()), - loopOverTask.getOutputData()); - allDone = taskStatus.isTerminal(); - if (!allDone || hasFailures) { - break; - } - } - doWhileTaskModel - .getOutputData() - .put(String.valueOf(doWhileTaskModel.getIteration()), output); - if (hasFailures) { - LOGGER.debug( - "Task {} failed in {} iteration", - doWhileTaskModel.getTaskId(), - doWhileTaskModel.getIteration() + 1); - return updateLoopTask( - doWhileTaskModel, TaskModel.Status.FAILED, failureReason.toString()); - } else if (!allDone) { - return false; - } - boolean shouldContinue; - try { - shouldContinue = getEvaluatedCondition(workflow, doWhileTaskModel, workflowExecutor); - LOGGER.debug( - "Task {} condition evaluated to {}", - doWhileTaskModel.getTaskId(), - shouldContinue); - if (shouldContinue) { - doWhileTaskModel.setIteration(doWhileTaskModel.getIteration() + 1); - doWhileTaskModel.getOutputData().put("iteration", doWhileTaskModel.getIteration()); - return scheduleNextIteration(doWhileTaskModel, workflow, workflowExecutor); - } else { - LOGGER.debug( - "Task {} took {} iterations to complete", - doWhileTaskModel.getTaskId(), - doWhileTaskModel.getIteration() + 1); - return markLoopTaskSuccess(doWhileTaskModel); - } - } catch (ScriptException e) { - String message = - String.format( - "Unable to evaluate condition %s , exception %s", - doWhileTaskModel.getWorkflowTask().getLoopCondition(), e.getMessage()); - LOGGER.error(message); - LOGGER.error("Marking task {} failed with error.", doWhileTaskModel.getTaskId()); - return updateLoopTask( - doWhileTaskModel, TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, message); - } - } - - boolean scheduleNextIteration( - TaskModel task, WorkflowModel workflow, WorkflowExecutor workflowExecutor) { - LOGGER.debug( - "Scheduling loop tasks for task {} as condition {} evaluated to true", - task.getTaskId(), - task.getWorkflowTask().getLoopCondition()); - workflowExecutor.scheduleNextIteration(task, workflow); - return true; // Return true even though status not changed. Iteration has to be updated in - // execution DAO. - } - - boolean updateLoopTask(TaskModel task, TaskModel.Status status, String failureReason) { - task.setReasonForIncompletion(failureReason); - task.setStatus(status); - return true; - } - - boolean markLoopTaskSuccess(TaskModel task) { - LOGGER.debug( - "task {} took {} iterations to complete", - task.getTaskId(), - task.getIteration() + 1); - task.setStatus(TaskModel.Status.COMPLETED); - return true; - } - - @VisibleForTesting - boolean getEvaluatedCondition( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) - throws ScriptException { - TaskDef taskDefinition = null; - try { - taskDefinition = workflowExecutor.getTaskDefinition(task); - } catch (TerminateWorkflowException e) { - // It is ok to not have a task definition for a DO_WHILE task - } - - Map taskInput = - parametersUtils.getTaskInputV2( - task.getWorkflowTask().getInputParameters(), - workflow, - task.getTaskId(), - taskDefinition); - taskInput.put(task.getReferenceTaskName(), task.getOutputData()); - List loopOver = - workflow.getTasks().stream() - .filter( - t -> - (task.getWorkflowTask() - .has( - TaskUtils - .removeIterationFromTaskRefName( - t - .getReferenceTaskName())) - && !task.getReferenceTaskName() - .equals(t.getReferenceTaskName()))) - .collect(Collectors.toList()); - - for (TaskModel loopOverTask : loopOver) { - taskInput.put( - TaskUtils.removeIterationFromTaskRefName(loopOverTask.getReferenceTaskName()), - loopOverTask.getOutputData()); - } - String condition = task.getWorkflowTask().getLoopCondition(); - boolean shouldContinue = false; - if (condition != null) { - LOGGER.debug("Condition: {} is being evaluated", condition); - // Evaluate the expression by using the Nashorn based script evaluator - shouldContinue = ScriptEvaluator.evalBool(condition, taskInput); - } - return shouldContinue; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java deleted file mode 100644 index 5e9a9aef4..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.core.events.EventQueues; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EVENT; - -@Component(TASK_TYPE_EVENT) -public class Event extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(Event.class); - public static final String NAME = "EVENT"; - - private final ObjectMapper objectMapper; - private final ParametersUtils parametersUtils; - private final EventQueues eventQueues; - - public Event( - EventQueues eventQueues, ParametersUtils parametersUtils, ObjectMapper objectMapper) { - super(TASK_TYPE_EVENT); - this.parametersUtils = parametersUtils; - this.eventQueues = eventQueues; - this.objectMapper = objectMapper; - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - Map payload = new HashMap<>(task.getInputData()); - payload.put("workflowInstanceId", workflow.getWorkflowId()); - payload.put("workflowType", workflow.getWorkflowName()); - payload.put("workflowVersion", workflow.getWorkflowVersion()); - payload.put("correlationId", workflow.getCorrelationId()); - - try { - String payloadJson = objectMapper.writeValueAsString(payload); - Message message = new Message(task.getTaskId(), payloadJson, task.getTaskId()); - ObservableQueue queue = getQueue(workflow, task); - queue.publish(List.of(message)); - LOGGER.debug("Published message:{} to queue:{}", message.getId(), queue.getName()); - task.getOutputData().putAll(payload); - task.setStatus( - isAsyncComplete(task) - ? TaskModel.Status.IN_PROGRESS - : TaskModel.Status.COMPLETED); - } catch (ApplicationException ae) { - if (ae.isRetryable()) { - LOGGER.info( - "A transient backend error happened when task {} tried to publish an event.", - task.getTaskId()); - } else { - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(ae.getMessage()); - LOGGER.error( - "Error executing task: {}, workflow: {}", - task.getTaskId(), - workflow.getWorkflowId(), - ae); - } - } catch (JsonProcessingException jpe) { - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion("Error serializing JSON payload: " + jpe.getMessage()); - LOGGER.error( - "Error serializing JSON payload for task: {}, workflow: {}", - task.getTaskId(), - workflow.getWorkflowId()); - } catch (Exception e) { - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(e.getMessage()); - LOGGER.error( - "Error executing task: {}, workflow: {}", - task.getTaskId(), - workflow.getWorkflowId(), - e); - } - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - Message message = new Message(task.getTaskId(), null, task.getTaskId()); - ObservableQueue queue = getQueue(workflow, task); - queue.ack(List.of(message)); - } - - @Override - public boolean isAsync() { - return false; - } - - @VisibleForTesting - ObservableQueue getQueue(WorkflowModel workflow, TaskModel task) { - String sinkValueRaw = (String) task.getInputData().get("sink"); - Map input = new HashMap<>(); - input.put("sink", sinkValueRaw); - Map replaced = - parametersUtils.getTaskInputV2(input, workflow, task.getTaskId(), null); - String sinkValue = (String) replaced.get("sink"); - String queueName = sinkValue; - - if (sinkValue.startsWith("conductor")) { - if ("conductor".equals(sinkValue)) { - queueName = - sinkValue - + ":" - + workflow.getWorkflowName() - + ":" - + task.getReferenceTaskName(); - } else if (sinkValue.startsWith("conductor:")) { - queueName = - "conductor:" - + workflow.getWorkflowName() - + ":" - + sinkValue.replaceAll("conductor:", ""); - } else { - throw new IllegalStateException( - "Invalid / Unsupported sink specified: " + sinkValue); - } - } - - task.getOutputData().put("event_produced", queueName); - - try { - return eventQueues.getQueue(queueName); - } catch (IllegalArgumentException e) { - throw new IllegalStateException( - "Error loading queue for name:" - + queueName - + ", sink:" - + sinkValue - + ", error: " - + e.getMessage()); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExclusiveJoin.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExclusiveJoin.java deleted file mode 100644 index e2bf0ac0b..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExclusiveJoin.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.List; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EXCLUSIVE_JOIN; - -@Component(TASK_TYPE_EXCLUSIVE_JOIN) -public class ExclusiveJoin extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExclusiveJoin.class); - - private static final String DEFAULT_EXCLUSIVE_JOIN_TASKS = "defaultExclusiveJoinTask"; - - public ExclusiveJoin() { - super(TASK_TYPE_EXCLUSIVE_JOIN); - } - - @Override - @SuppressWarnings("unchecked") - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - - boolean foundExlusiveJoinOnTask = false; - boolean hasFailures = false; - StringBuilder failureReason = new StringBuilder(); - TaskModel.Status taskStatus; - List joinOn = (List) task.getInputData().get("joinOn"); - if (task.isLoopOverTask()) { - // If exclusive join is part of loop over task, wait for specific iteration to get - // complete - joinOn = - joinOn.stream() - .map(name -> TaskUtils.appendIteration(name, task.getIteration())) - .collect(Collectors.toList()); - } - TaskModel exclusiveTask = null; - for (String joinOnRef : joinOn) { - LOGGER.debug("Exclusive Join On Task {} ", joinOnRef); - exclusiveTask = workflow.getTaskByRefName(joinOnRef); - if (exclusiveTask == null || exclusiveTask.getStatus() == TaskModel.Status.SKIPPED) { - LOGGER.debug("The task {} is either not scheduled or skipped.", joinOnRef); - continue; - } - taskStatus = exclusiveTask.getStatus(); - foundExlusiveJoinOnTask = taskStatus.isTerminal(); - hasFailures = !taskStatus.isSuccessful(); - if (hasFailures) { - failureReason.append(exclusiveTask.getReasonForIncompletion()).append(" "); - } - - break; - } - - if (!foundExlusiveJoinOnTask) { - List defaultExclusiveJoinTasks = - (List) task.getInputData().get(DEFAULT_EXCLUSIVE_JOIN_TASKS); - LOGGER.info( - "Could not perform exclusive on Join Task(s). Performing now on default exclusive join task(s) {}, workflow: {}", - defaultExclusiveJoinTasks, - workflow.getWorkflowId()); - if (defaultExclusiveJoinTasks != null && !defaultExclusiveJoinTasks.isEmpty()) { - for (String defaultExclusiveJoinTask : defaultExclusiveJoinTasks) { - // Pick the first task that we should join on and break. - exclusiveTask = workflow.getTaskByRefName(defaultExclusiveJoinTask); - if (exclusiveTask == null - || exclusiveTask.getStatus() == TaskModel.Status.SKIPPED) { - LOGGER.debug( - "The task {} is either not scheduled or skipped.", - defaultExclusiveJoinTask); - continue; - } - - taskStatus = exclusiveTask.getStatus(); - foundExlusiveJoinOnTask = taskStatus.isTerminal(); - hasFailures = !taskStatus.isSuccessful(); - if (hasFailures) { - failureReason.append(exclusiveTask.getReasonForIncompletion()).append(" "); - } - break; - } - } else { - LOGGER.debug( - "Could not evaluate last tasks output. Verify the task configuration in the workflow definition."); - } - } - - LOGGER.debug( - "Status of flags: foundExlusiveJoinOnTask: {}, hasFailures {}", - foundExlusiveJoinOnTask, - hasFailures); - if (foundExlusiveJoinOnTask || hasFailures) { - if (hasFailures) { - task.setReasonForIncompletion(failureReason.toString()); - task.setStatus(TaskModel.Status.FAILED); - } else { - task.setOutputData(exclusiveTask.getOutputData()); - task.setStatus(TaskModel.Status.COMPLETED); - } - LOGGER.debug("Task: {} status is: {}", task.getTaskId(), task.getStatus()); - return true; - } - return false; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java deleted file mode 100644 index 7115dfd1d..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.apache.commons.lang3.concurrent.BasicThreadFactory; - -import com.netflix.conductor.core.utils.SemaphoreUtil; - -class ExecutionConfig { - - private final ExecutorService executorService; - private final SemaphoreUtil semaphoreUtil; - - ExecutionConfig(int threadCount, String threadNameFormat) { - - this.executorService = - Executors.newFixedThreadPool( - threadCount, - new BasicThreadFactory.Builder().namingPattern(threadNameFormat).build()); - - this.semaphoreUtil = new SemaphoreUtil(threadCount); - } - - public ExecutorService getExecutorService() { - return executorService; - } - - public SemaphoreUtil getSemaphoreUtil() { - return semaphoreUtil; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java deleted file mode 100644 index 9f31af750..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import org.springframework.stereotype.Component; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK; - -@Component(TASK_TYPE_FORK) -public class Fork extends WorkflowSystemTask { - - public Fork() { - super(TASK_TYPE_FORK); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Human.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Human.java deleted file mode 100644 index c4dfc4e31..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Human.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN; -import static com.netflix.conductor.model.TaskModel.Status.IN_PROGRESS; - -@Component(TASK_TYPE_HUMAN) -public class Human extends WorkflowSystemTask { - - public Human() { - super(TASK_TYPE_HUMAN); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - task.setStatus(IN_PROGRESS); - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - task.setStatus(TaskModel.Status.CANCELED); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java deleted file mode 100644 index ecfdbd7f6..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Map; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_INLINE; - -/** - * @author X-Ultra - *

    Task that enables execute inline script at workflow execution. For example, - *

    - * ...
    - * {
    - *  "tasks": [
    - *      {
    - *          "name": "INLINE",
    - *          "taskReferenceName": "inline_test",
    - *          "type": "INLINE",
    - *          "inputParameters": {
    - *              "input": "${workflow.input}",
    - *              "evaluatorType": "javascript"
    - *              "expression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"
    - *          }
    - *      }
    - *  ]
    - * }
    - * ...
    - * 
    - * then to use task output, e.g. script_test.output.testvalue {@link Inline} is a - * replacement for deprecated {@link Lambda} - */ -@Component(TASK_TYPE_INLINE) -public class Inline extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(Inline.class); - private static final String QUERY_EVALUATOR_TYPE = "evaluatorType"; - private static final String QUERY_EXPRESSION_PARAMETER = "expression"; - public static final String NAME = "INLINE"; - - private final Map evaluators; - - public Inline(Map evaluators) { - super(TASK_TYPE_INLINE); - this.evaluators = evaluators; - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - Map taskInput = task.getInputData(); - Map taskOutput = task.getOutputData(); - String evaluatorType = (String) taskInput.get(QUERY_EVALUATOR_TYPE); - String expression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER); - - try { - checkEvaluatorType(evaluatorType); - checkExpression(expression); - Evaluator evaluator = evaluators.get(evaluatorType); - Object evalResult = evaluator.evaluate(expression, taskInput); - taskOutput.put("result", evalResult); - task.setStatus(TaskModel.Status.COMPLETED); - } catch (Exception e) { - LOGGER.error( - "Failed to execute Inline Task: {} in workflow: {}", - task.getTaskId(), - workflow.getWorkflowId(), - e); - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(e.getMessage()); - taskOutput.put( - "error", e.getCause() != null ? e.getCause().getMessage() : e.getMessage()); - } - - return true; - } - - private void checkEvaluatorType(String evaluatorType) { - if (StringUtils.isBlank(evaluatorType)) { - LOGGER.error("Empty {} in Inline task. ", QUERY_EVALUATOR_TYPE); - throw new TerminateWorkflowException( - "Empty '" - + QUERY_EVALUATOR_TYPE - + "' in Inline task's input parameters. A non-empty String value must be provided."); - } - if (evaluators.get(evaluatorType) == null) { - LOGGER.error("Evaluator {} for Inline task not registered", evaluatorType); - throw new TerminateWorkflowException( - "Unknown evaluator '" + evaluatorType + "' in Inline task."); - } - } - - private void checkExpression(String expression) { - if (StringUtils.isBlank(expression)) { - LOGGER.error("Empty {} in Inline task. ", QUERY_EXPRESSION_PARAMETER); - throw new TerminateWorkflowException( - "Empty '" - + QUERY_EXPRESSION_PARAMETER - + "' in Inline task's input parameters. A non-empty String value must be provided."); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java deleted file mode 100644 index 5a5ac33a8..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.time.Duration; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.service.MetadataService; - -import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER; - -@Component -@ConditionalOnProperty( - name = "conductor.system-task-workers.enabled", - havingValue = "true", - matchIfMissing = true) -public class IsolatedTaskQueueProducer { - - private static final Logger LOGGER = LoggerFactory.getLogger(IsolatedTaskQueueProducer.class); - private final MetadataService metadataService; - private final Set asyncSystemTasks; - private final SystemTaskWorker systemTaskWorker; - - private final Set listeningQueues = new HashSet<>(); - - public IsolatedTaskQueueProducer( - MetadataService metadataService, - @Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set asyncSystemTasks, - SystemTaskWorker systemTaskWorker, - @Value("${conductor.app.isolatedSystemTaskEnabled:false}") - boolean isolatedSystemTaskEnabled, - @Value("${conductor.app.isolatedSystemTaskQueuePollInterval:10s}") - Duration isolatedSystemTaskQueuePollInterval) { - - this.metadataService = metadataService; - this.asyncSystemTasks = asyncSystemTasks; - this.systemTaskWorker = systemTaskWorker; - - if (isolatedSystemTaskEnabled) { - LOGGER.info("Listening for isolation groups"); - - Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay( - this::addTaskQueues, - 1000, - isolatedSystemTaskQueuePollInterval.toMillis(), - TimeUnit.MILLISECONDS); - } else { - LOGGER.info("Isolated System Task Worker DISABLED"); - } - } - - private Set getIsolationExecutionNameSpaces() { - Set isolationExecutionNameSpaces = Collections.emptySet(); - try { - List taskDefs = metadataService.getTaskDefs(); - isolationExecutionNameSpaces = - taskDefs.stream() - .filter( - taskDef -> - StringUtils.isNotBlank(taskDef.getIsolationGroupId()) - || StringUtils.isNotBlank( - taskDef.getExecutionNameSpace())) - .collect(Collectors.toSet()); - } catch (RuntimeException e) { - LOGGER.error( - "Unknown exception received in getting isolation groups, sleeping and retrying", - e); - } - return isolationExecutionNameSpaces; - } - - @VisibleForTesting - void addTaskQueues() { - Set isolationTaskDefs = getIsolationExecutionNameSpaces(); - LOGGER.debug("Retrieved queues {}", isolationTaskDefs); - - for (TaskDef isolatedTaskDef : isolationTaskDefs) { - for (WorkflowSystemTask systemTask : this.asyncSystemTasks) { - String taskQueue = - QueueUtils.getQueueName( - systemTask.getTaskType(), - null, - isolatedTaskDef.getIsolationGroupId(), - isolatedTaskDef.getExecutionNameSpace()); - LOGGER.debug("Adding taskQueue:'{}' to system task worker coordinator", taskQueue); - if (!listeningQueues.contains(taskQueue)) { - systemTaskWorker.startPolling(systemTask, taskQueue); - listeningQueues.add(taskQueue); - } - } - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java deleted file mode 100644 index eab4961c7..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.List; -import java.util.stream.Collectors; - -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN; - -@Component(TASK_TYPE_JOIN) -public class Join extends WorkflowSystemTask { - - public Join() { - super(TASK_TYPE_JOIN); - } - - @Override - @SuppressWarnings("unchecked") - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - - boolean allDone = true; - boolean hasFailures = false; - StringBuilder failureReason = new StringBuilder(); - StringBuilder optionalTaskFailures = new StringBuilder(); - List joinOn = (List) task.getInputData().get("joinOn"); - if (task.isLoopOverTask()) { - // If join is part of loop over task, wait for specific iteration to get complete - joinOn = - joinOn.stream() - .map(name -> TaskUtils.appendIteration(name, task.getIteration())) - .collect(Collectors.toList()); - } - for (String joinOnRef : joinOn) { - TaskModel forkedTask = workflow.getTaskByRefName(joinOnRef); - if (forkedTask == null) { - // Task is not even scheduled yet - allDone = false; - break; - } - TaskModel.Status taskStatus = forkedTask.getStatus(); - hasFailures = !taskStatus.isSuccessful() && !forkedTask.getWorkflowTask().isOptional(); - if (hasFailures) { - failureReason.append(forkedTask.getReasonForIncompletion()).append(" "); - } - task.addOutput(joinOnRef, forkedTask.getOutputData()); - if (!taskStatus.isTerminal()) { - allDone = false; - } - if (hasFailures) { - break; - } - - // check for optional task failures - if (forkedTask.getWorkflowTask().isOptional() - && taskStatus == TaskModel.Status.COMPLETED_WITH_ERRORS) { - optionalTaskFailures - .append( - String.format( - "%s/%s", - forkedTask.getTaskDefName(), forkedTask.getTaskId())) - .append(" "); - } - } - if (allDone || hasFailures || optionalTaskFailures.length() > 0) { - if (hasFailures) { - task.setReasonForIncompletion(failureReason.toString()); - task.setStatus(TaskModel.Status.FAILED); - } else if (optionalTaskFailures.length() > 0) { - task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS); - optionalTaskFailures.append("completed with errors"); - task.setReasonForIncompletion(optionalTaskFailures.toString()); - } else { - task.setStatus(TaskModel.Status.COMPLETED); - } - return true; - } - return false; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Lambda.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Lambda.java deleted file mode 100644 index 8f8aae6b7..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Lambda.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Map; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.events.ScriptEvaluator; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_LAMBDA; - -/** - * @author X-Ultra - *

    Task that enables execute Lambda script at workflow execution, For example, - *

    - * ...
    - * {
    - *  "tasks": [
    - *      {
    - *          "name": "LAMBDA",
    - *          "taskReferenceName": "lambda_test",
    - *          "type": "LAMBDA",
    - *          "inputParameters": {
    - *              "input": "${workflow.input}",
    - *              "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"
    - *          }
    - *      }
    - *  ]
    - * }
    - * ...
    - * 
    - * then to use task output, e.g. script_test.output.testvalue - * @deprecated {@link Lambda} is deprecated. Use {@link Inline} task for inline expression - * evaluation. Also see ${@link com.netflix.conductor.common.metadata.workflow.WorkflowTask}) - */ -@Deprecated -@Component(TASK_TYPE_LAMBDA) -public class Lambda extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(Lambda.class); - private static final String QUERY_EXPRESSION_PARAMETER = "scriptExpression"; - public static final String NAME = "LAMBDA"; - - public Lambda() { - super(TASK_TYPE_LAMBDA); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - Map taskInput = task.getInputData(); - Map taskOutput = task.getOutputData(); - String scriptExpression; - try { - scriptExpression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER); - if (StringUtils.isNotBlank(scriptExpression)) { - String scriptExpressionBuilder = - "function scriptFun(){" + scriptExpression + "} scriptFun();"; - - LOGGER.debug( - "scriptExpressionBuilder: {}, task: {}", - scriptExpressionBuilder, - task.getTaskId()); - Object returnValue = ScriptEvaluator.eval(scriptExpressionBuilder, taskInput); - taskOutput.put("result", returnValue); - task.setStatus(TaskModel.Status.COMPLETED); - } else { - LOGGER.error("Empty {} in Lambda task. ", QUERY_EXPRESSION_PARAMETER); - task.setReasonForIncompletion( - "Empty '" - + QUERY_EXPRESSION_PARAMETER - + "' in Lambda task's input parameters. A non-empty String value must be provided."); - task.setStatus(TaskModel.Status.FAILED); - } - } catch (Exception e) { - LOGGER.error( - "Failed to execute Lambda Task: {} in workflow: {}", - task.getTaskId(), - workflow.getWorkflowId(), - e); - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(e.getMessage()); - taskOutput.put( - "error", e.getCause() != null ? e.getCause().getMessage() : e.getMessage()); - } - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SetVariable.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SetVariable.java deleted file mode 100644 index 2e8474214..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SetVariable.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SET_VARIABLE; - -@Component(TASK_TYPE_SET_VARIABLE) -public class SetVariable extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(SetVariable.class); - - private final ConductorProperties properties; - private final ObjectMapper objectMapper; - - public SetVariable(ConductorProperties properties, ObjectMapper objectMapper) { - super(TASK_TYPE_SET_VARIABLE); - this.properties = properties; - this.objectMapper = objectMapper; - } - - private boolean validateVariablesSize( - WorkflowModel workflow, TaskModel task, Map variables) { - String workflowId = workflow.getWorkflowId(); - long maxThreshold = properties.getMaxWorkflowVariablesPayloadSizeThreshold().toKilobytes(); - - try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { - this.objectMapper.writeValue(byteArrayOutputStream, variables); - byte[] payloadBytes = byteArrayOutputStream.toByteArray(); - long payloadSize = payloadBytes.length; - - if (payloadSize > maxThreshold * 1024) { - String errorMsg = - String.format( - "The variables payload size: %d of workflow: %s is greater than the permissible limit: %d bytes", - payloadSize, workflowId, maxThreshold); - LOGGER.error(errorMsg); - task.setReasonForIncompletion(errorMsg); - return false; - } - return true; - } catch (IOException e) { - LOGGER.error( - "Unable to validate variables payload size of workflow: {}", workflowId, e); - throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, e); - } - } - - @Override - public boolean execute(WorkflowModel workflow, TaskModel task, WorkflowExecutor provider) { - Map variables = workflow.getVariables(); - Map input = task.getInputData(); - String taskId = task.getTaskId(); - ArrayList newKeys; - Map previousValues; - - if (input != null && input.size() > 0) { - newKeys = new ArrayList<>(); - previousValues = new HashMap<>(); - input.keySet() - .forEach( - key -> { - if (variables.containsKey(key)) { - previousValues.put(key, variables.get(key)); - } else { - newKeys.add(key); - } - variables.put(key, input.get(key)); - LOGGER.debug( - "Task: {} setting value for variable: {}", taskId, key); - }); - if (!validateVariablesSize(workflow, task, variables)) { - // restore previous variables - previousValues - .keySet() - .forEach( - key -> { - variables.put(key, previousValues.get(key)); - }); - newKeys.forEach(variables::remove); - task.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR); - return true; - } - } - - task.setStatus(TaskModel.Status.COMPLETED); - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/StartWorkflow.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/StartWorkflow.java deleted file mode 100644 index 59abde2a8..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/StartWorkflow.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Map; - -import javax.validation.Validator; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_START_WORKFLOW; -import static com.netflix.conductor.model.TaskModel.Status.COMPLETED; -import static com.netflix.conductor.model.TaskModel.Status.FAILED; - -@Component(TASK_TYPE_START_WORKFLOW) -public class StartWorkflow extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(StartWorkflow.class); - - private static final String WORKFLOW_ID = "workflowId"; - private static final String START_WORKFLOW_PARAMETER = "startWorkflow"; - - private final ObjectMapper objectMapper; - private final Validator validator; - - public StartWorkflow(ObjectMapper objectMapper, Validator validator) { - super(TASK_TYPE_START_WORKFLOW); - this.objectMapper = objectMapper; - this.validator = validator; - } - - @Override - public void start( - WorkflowModel workflow, TaskModel taskModel, WorkflowExecutor workflowExecutor) { - StartWorkflowRequest request = getRequest(taskModel); - if (request == null) { - return; - } - - // set the correlation id of starter workflow, if its empty in the StartWorkflowRequest - request.setCorrelationId( - StringUtils.defaultIfBlank( - request.getCorrelationId(), workflow.getCorrelationId())); - - try { - String workflowId = startWorkflow(request, workflowExecutor); - taskModel.addOutput(WORKFLOW_ID, workflowId); - taskModel.setStatus(COMPLETED); - } catch (ApplicationException ae) { - if (ae.isRetryable()) { - LOGGER.info( - "A transient backend error happened when task {} in {} tried to start workflow {}.", - taskModel.getTaskId(), - workflow.toShortString(), - request.getName()); - } else { - taskModel.setStatus(FAILED); - taskModel.setReasonForIncompletion(ae.getMessage()); - LOGGER.error( - "Error starting workflow: {} from workflow: {}", - request.getName(), - workflow.toShortString(), - ae); - } - } catch (Exception e) { - taskModel.setStatus(FAILED); - taskModel.setReasonForIncompletion(e.getMessage()); - LOGGER.error( - "Error starting workflow: {} from workflow: {}", - request.getName(), - workflow.toShortString(), - e); - } - } - - private StartWorkflowRequest getRequest(TaskModel taskModel) { - Map taskInput = taskModel.getInputData(); - - StartWorkflowRequest startWorkflowRequest = null; - - if (taskInput.get(START_WORKFLOW_PARAMETER) == null) { - taskModel.setStatus(FAILED); - taskModel.setReasonForIncompletion( - "Missing '" + START_WORKFLOW_PARAMETER + "' in input data."); - } else { - try { - startWorkflowRequest = - objectMapper.convertValue( - taskInput.get(START_WORKFLOW_PARAMETER), - StartWorkflowRequest.class); - - var violations = validator.validate(startWorkflowRequest); - if (!violations.isEmpty()) { - StringBuilder reasonForIncompletion = - new StringBuilder(START_WORKFLOW_PARAMETER) - .append(" validation failed. "); - for (var violation : violations) { - reasonForIncompletion - .append("'") - .append(violation.getPropertyPath().toString()) - .append("' -> ") - .append(violation.getMessage()) - .append(". "); - } - taskModel.setStatus(FAILED); - taskModel.setReasonForIncompletion(reasonForIncompletion.toString()); - startWorkflowRequest = null; - } - } catch (IllegalArgumentException e) { - LOGGER.error("Error reading StartWorkflowRequest for {}", taskModel, e); - taskModel.setStatus(FAILED); - taskModel.setReasonForIncompletion( - "Error reading StartWorkflowRequest. " + e.getMessage()); - } - } - - return startWorkflowRequest; - } - - private String startWorkflow(StartWorkflowRequest request, WorkflowExecutor workflowExecutor) { - if (request.getWorkflowDef() == null) { - return workflowExecutor.startWorkflow( - request.getName(), - request.getVersion(), - request.getCorrelationId(), - request.getPriority(), - request.getInput(), - request.getExternalInputPayloadStoragePath(), - null, - request.getTaskToDomain()); - } else { - return workflowExecutor.startWorkflow( - request.getWorkflowDef(), - request.getInput(), - request.getExternalInputPayloadStoragePath(), - request.getCorrelationId(), - request.getPriority(), - null, - request.getTaskToDomain()); - } - } - - @Override - public boolean isAsync() { - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java deleted file mode 100644 index 75995832d..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Map; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; - -@Component(TASK_TYPE_SUB_WORKFLOW) -public class SubWorkflow extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(SubWorkflow.class); - private static final String SUB_WORKFLOW_ID = "subWorkflowId"; - - private final ObjectMapper objectMapper; - - public SubWorkflow(ObjectMapper objectMapper) { - super(TASK_TYPE_SUB_WORKFLOW); - this.objectMapper = objectMapper; - } - - @SuppressWarnings("unchecked") - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - Map input = task.getInputData(); - String name = input.get("subWorkflowName").toString(); - int version = (int) input.get("subWorkflowVersion"); - - WorkflowDef workflowDefinition = null; - if (input.get("subWorkflowDefinition") != null) { - // convert the value back to workflow definition object - workflowDefinition = - objectMapper.convertValue( - input.get("subWorkflowDefinition"), WorkflowDef.class); - name = workflowDefinition.getName(); - } - - Map taskToDomain = workflow.getTaskToDomain(); - if (input.get("subWorkflowTaskToDomain") instanceof Map) { - taskToDomain = (Map) input.get("subWorkflowTaskToDomain"); - } - - var wfInput = (Map) input.get("workflowInput"); - if (wfInput == null || wfInput.isEmpty()) { - wfInput = input; - } - String correlationId = workflow.getCorrelationId(); - - try { - String subWorkflowId; - if (workflowDefinition != null) { - subWorkflowId = - workflowExecutor.startWorkflow( - workflowDefinition, - wfInput, - null, - correlationId, - 0, - workflow.getWorkflowId(), - task.getTaskId(), - null, - taskToDomain); - } else { - subWorkflowId = - workflowExecutor.startWorkflow( - name, - version, - wfInput, - null, - correlationId, - workflow.getWorkflowId(), - task.getTaskId(), - null, - taskToDomain); - } - - task.setSubWorkflowId(subWorkflowId); - // For backwards compatibility - task.addOutput(SUB_WORKFLOW_ID, subWorkflowId); - - // Set task status based on current sub-workflow status, as the status can change in - // recursion by the time we update here. - WorkflowModel subWorkflow = workflowExecutor.getWorkflow(subWorkflowId, false); - updateTaskStatus(subWorkflow, task); - } catch (ApplicationException ae) { - if (ae.isRetryable()) { - LOGGER.info( - "A transient backend error happened when task {} in {} tried to start sub workflow {}.", - task.getTaskId(), - workflow.toShortString(), - name); - } else { - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(ae.getMessage()); - LOGGER.error( - "Error starting sub workflow: {} from workflow: {}", - name, - workflow.toShortString(), - ae); - } - } catch (Exception e) { - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion(e.getMessage()); - LOGGER.error( - "Error starting sub workflow: {} from workflow: {}", - name, - workflow.toShortString(), - e); - } - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - String workflowId = task.getSubWorkflowId(); - if (StringUtils.isEmpty(workflowId)) { - return false; - } - - WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, false); - WorkflowModel.Status subWorkflowStatus = subWorkflow.getStatus(); - if (!subWorkflowStatus.isTerminal()) { - return false; - } - - updateTaskStatus(subWorkflow, task); - return true; - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - String workflowId = task.getSubWorkflowId(); - if (StringUtils.isEmpty(workflowId)) { - return; - } - WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, true); - subWorkflow.setStatus(WorkflowModel.Status.TERMINATED); - String reason = - StringUtils.isEmpty(workflow.getReasonForIncompletion()) - ? "Parent workflow has been terminated with status " + workflow.getStatus() - : "Parent workflow has been terminated with reason: " - + workflow.getReasonForIncompletion(); - workflowExecutor.terminateWorkflow(subWorkflow, reason, null); - } - - @Override - public boolean isAsync() { - return true; - } - - /** - * Keep Subworkflow task asyncComplete. The Subworkflow task will be executed once - * asynchronously to move to IN_PROGRESS state, and will move to termination by Subworkflow's - * completeWorkflow logic, there by avoiding periodic polling. - * - * @param task - * @return - */ - @Override - public boolean isAsyncComplete(TaskModel task) { - return true; - } - - private void updateTaskStatus(WorkflowModel subworkflow, TaskModel task) { - WorkflowModel.Status status = subworkflow.getStatus(); - switch (status) { - case RUNNING: - case PAUSED: - task.setStatus(TaskModel.Status.IN_PROGRESS); - break; - case COMPLETED: - task.setStatus(TaskModel.Status.COMPLETED); - break; - case FAILED: - task.setStatus(TaskModel.Status.FAILED); - break; - case TERMINATED: - task.setStatus(TaskModel.Status.CANCELED); - break; - case TIMED_OUT: - task.setStatus(TaskModel.Status.TIMED_OUT); - break; - default: - throw new ApplicationException( - ApplicationException.Code.INTERNAL_ERROR, - "Subworkflow status does not conform to relevant task status."); - } - - if (status.isTerminal()) { - if (subworkflow.getExternalOutputPayloadStoragePath() != null) { - task.setExternalOutputPayloadStoragePath( - subworkflow.getExternalOutputPayloadStoragePath()); - } else { - task.getOutputData().putAll(subworkflow.getOutput()); - } - if (!status.isSuccessful()) { - task.setReasonForIncompletion( - String.format( - "Sub workflow %s failure reason: %s", - subworkflow.toShortString(), - subworkflow.getReasonForIncompletion())); - } - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Switch.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Switch.java deleted file mode 100644 index 2d3d040a2..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Switch.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SWITCH; - -/** {@link Switch} task is a replacement for now deprecated {@link Decision} task. */ -@Component(TASK_TYPE_SWITCH) -public class Switch extends WorkflowSystemTask { - - public Switch() { - super(TASK_TYPE_SWITCH); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - task.setStatus(TaskModel.Status.COMPLETED); - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java deleted file mode 100644 index 4486680e8..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; - -import org.springframework.stereotype.Component; - -/** - * A container class that holds a mapping of system task types {@link - * com.netflix.conductor.common.metadata.tasks.TaskType} to {@link WorkflowSystemTask} instances. - */ -@Component -public class SystemTaskRegistry { - - public static final String ASYNC_SYSTEM_TASKS_QUALIFIER = "asyncSystemTasks"; - - private final Map registry; - - public SystemTaskRegistry(Set tasks) { - this.registry = - tasks.stream() - .collect( - Collectors.toMap( - WorkflowSystemTask::getTaskType, Function.identity())); - } - - public WorkflowSystemTask get(String taskType) { - return Optional.ofNullable(registry.get(taskType)) - .orElseThrow( - () -> - new IllegalStateException( - taskType + "not found in " + getClass().getSimpleName())); - } - - public boolean isSystemTask(String taskType) { - return registry.containsKey(taskType); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorker.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorker.java deleted file mode 100644 index 29bf6bb2a..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorker.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.core.LifecycleAwareComponent; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.core.utils.SemaphoreUtil; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.service.ExecutionService; - -/** The worker that polls and executes an async system task. */ -@Component -@ConditionalOnProperty( - name = "conductor.system-task-workers.enabled", - havingValue = "true", - matchIfMissing = true) -public class SystemTaskWorker extends LifecycleAwareComponent { - - private static final Logger LOGGER = LoggerFactory.getLogger(SystemTaskWorker.class); - - private final long pollInterval; - private final QueueDAO queueDAO; - - ExecutionConfig defaultExecutionConfig; - private final AsyncSystemTaskExecutor asyncSystemTaskExecutor; - private final ConductorProperties properties; - private final int maxPollCount; - private final ExecutionService executionService; - - ConcurrentHashMap queueExecutionConfigMap = new ConcurrentHashMap<>(); - - public SystemTaskWorker( - QueueDAO queueDAO, - AsyncSystemTaskExecutor asyncSystemTaskExecutor, - ConductorProperties properties, - ExecutionService executionService) { - this.properties = properties; - int threadCount = properties.getSystemTaskWorkerThreadCount(); - this.defaultExecutionConfig = new ExecutionConfig(threadCount, "system-task-worker-%d"); - this.asyncSystemTaskExecutor = asyncSystemTaskExecutor; - this.queueDAO = queueDAO; - this.maxPollCount = properties.getSystemTaskMaxPollCount(); - this.pollInterval = properties.getSystemTaskWorkerPollInterval().toMillis(); - this.executionService = executionService; - - LOGGER.info("SystemTaskWorker initialized with {} threads", threadCount); - } - - public void startPolling(WorkflowSystemTask systemTask) { - startPolling(systemTask, systemTask.getTaskType()); - } - - public void startPolling(WorkflowSystemTask systemTask, String queueName) { - Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay( - () -> this.pollAndExecute(systemTask, queueName), - 1000, - pollInterval, - TimeUnit.MILLISECONDS); - LOGGER.info("Started listening for task: {} in queue: {}", systemTask, queueName); - } - - void pollAndExecute(WorkflowSystemTask systemTask, String queueName) { - if (!isRunning()) { - LOGGER.debug( - "{} stopped. Not polling for task: {}", getClass().getSimpleName(), systemTask); - return; - } - - // get the remaining capacity of worker queue to prevent queue full exception - ExecutionConfig executionConfig = getExecutionConfig(queueName); - SemaphoreUtil semaphoreUtil = executionConfig.getSemaphoreUtil(); - ExecutorService executorService = executionConfig.getExecutorService(); - String taskName = QueueUtils.getTaskType(queueName); - - if (!semaphoreUtil.acquireSlots(1)) { - // no available permits, do not poll - Monitors.recordSystemTaskWorkerPollingLimited(queueName); - return; - } - - int acquiredSlots = 1; - - try { - // Since already one slot is acquired, now try if maxSlot-1 is available - int slotsToAcquire = Math.min(semaphoreUtil.availableSlots(), maxPollCount - 1); - - // Try to acquire remaining permits to achieve maxPollCount - if (slotsToAcquire > 0 && semaphoreUtil.acquireSlots(slotsToAcquire)) { - acquiredSlots += slotsToAcquire; - } - LOGGER.debug("Polling queue: {} with {} slots acquired", queueName, acquiredSlots); - - List polledTaskIds = queueDAO.pop(queueName, acquiredSlots, 200); - - Monitors.recordTaskPoll(queueName); - LOGGER.debug("Polling queue:{}, got {} tasks", queueName, polledTaskIds.size()); - - if (polledTaskIds.size() > 0) { - // Immediately release unused permits when polled no. of messages are less than - // acquired permits - if (polledTaskIds.size() < acquiredSlots) { - semaphoreUtil.completeProcessing(acquiredSlots - polledTaskIds.size()); - } - - for (String taskId : polledTaskIds) { - if (StringUtils.isNotBlank(taskId)) { - LOGGER.debug( - "Task: {} from queue: {} being sent to the workflow executor", - taskId, - queueName); - Monitors.recordTaskPollCount(queueName, 1); - - executionService.ackTaskReceived(taskId); - - CompletableFuture taskCompletableFuture = - CompletableFuture.runAsync( - () -> asyncSystemTaskExecutor.execute(systemTask, taskId), - executorService); - - // release permit after processing is complete - taskCompletableFuture.whenComplete( - (r, e) -> semaphoreUtil.completeProcessing(1)); - } else { - semaphoreUtil.completeProcessing(1); - } - } - } else { - // no task polled, release permit - semaphoreUtil.completeProcessing(acquiredSlots); - } - } catch (Exception e) { - // release the permit if exception is thrown during polling, because the thread would - // not be busy - semaphoreUtil.completeProcessing(acquiredSlots); - Monitors.recordTaskPollError(taskName, e.getClass().getSimpleName()); - LOGGER.error("Error polling system task in queue:{}", queueName, e); - } - } - - @VisibleForTesting - ExecutionConfig getExecutionConfig(String taskQueue) { - if (!QueueUtils.isIsolatedQueue(taskQueue)) { - return this.defaultExecutionConfig; - } - return queueExecutionConfigMap.computeIfAbsent( - taskQueue, __ -> this.createExecutionConfig()); - } - - private ExecutionConfig createExecutionConfig() { - int threadCount = properties.getIsolatedSystemTaskWorkerThreadCount(); - String threadNameFormat = "isolated-system-task-worker-%d"; - return new ExecutionConfig(threadCount, threadNameFormat); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorkerCoordinator.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorkerCoordinator.java deleted file mode 100644 index b1a9ed988..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorkerCoordinator.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.context.event.EventListener; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.utils.QueueUtils; - -import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER; - -@Component -@ConditionalOnProperty( - name = "conductor.system-task-workers.enabled", - havingValue = "true", - matchIfMissing = true) -public class SystemTaskWorkerCoordinator { - - private static final Logger LOGGER = LoggerFactory.getLogger(SystemTaskWorkerCoordinator.class); - - private final SystemTaskWorker systemTaskWorker; - private final String executionNameSpace; - private final Set asyncSystemTasks; - - public SystemTaskWorkerCoordinator( - SystemTaskWorker systemTaskWorker, - ConductorProperties properties, - @Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set asyncSystemTasks) { - this.systemTaskWorker = systemTaskWorker; - this.asyncSystemTasks = asyncSystemTasks; - this.executionNameSpace = properties.getSystemTaskWorkerExecutionNamespace(); - } - - @EventListener(ApplicationReadyEvent.class) - public void initSystemTaskExecutor() { - this.asyncSystemTasks.stream() - .filter(this::isFromCoordinatorExecutionNameSpace) - .forEach(this.systemTaskWorker::startPolling); - LOGGER.info( - "{} initialized with {} async tasks", - SystemTaskWorkerCoordinator.class.getSimpleName(), - this.asyncSystemTasks.size()); - } - - @VisibleForTesting - boolean isFromCoordinatorExecutionNameSpace(WorkflowSystemTask systemTask) { - String queueExecutionNameSpace = QueueUtils.getExecutionNameSpace(systemTask.getTaskType()); - return StringUtils.equals(queueExecutionNameSpace, executionNameSpace); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java deleted file mode 100644 index 75745987a..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_TERMINATE; -import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.COMPLETED; -import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.FAILED; - -/** - * Task that can terminate a workflow with a given status and modify the workflow's output with a - * given parameter, it can act as a "return" statement for conditions where you simply want to - * terminate your workflow. For example, if you have a decision where the first condition is met, - * you want to execute some tasks, otherwise you want to finish your workflow. - * - *

    - * ...
    - * {
    - *  "tasks": [
    - *      {
    - *          "name": "terminate",
    - *          "taskReferenceName": "terminate0",
    - *          "inputParameters": {
    - *              "terminationStatus": "COMPLETED",
    - *              "workflowOutput": "${task0.output}"
    - *          },
    - *          "type": "TERMINATE",
    - *          "startDelay": 0,
    - *          "optional": false
    - *      }
    - *   ]
    - * }
    - * ...
    - * 
    - * - * This task has some validations on creation and execution, they are: - the "terminationStatus" - * parameter is mandatory and it can only receive the values "COMPLETED" or "FAILED" - the terminate - * task cannot be optional - */ -@Component(TASK_TYPE_TERMINATE) -public class Terminate extends WorkflowSystemTask { - - private static final String TERMINATION_STATUS_PARAMETER = "terminationStatus"; - private static final String TERMINATION_REASON_PARAMETER = "terminationReason"; - private static final String TERMINATION_WORKFLOW_OUTPUT = "workflowOutput"; - - public Terminate() { - super(TASK_TYPE_TERMINATE); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - String returnStatus = (String) task.getInputData().get(TERMINATION_STATUS_PARAMETER); - - if (validateInputStatus(returnStatus)) { - task.setOutputData(getInputFromParam(task.getInputData())); - task.setStatus(TaskModel.Status.COMPLETED); - return true; - } - task.setReasonForIncompletion("given termination status is not valid"); - task.setStatus(TaskModel.Status.FAILED); - return false; - } - - public static String getTerminationStatusParameter() { - return TERMINATION_STATUS_PARAMETER; - } - - public static String getTerminationReasonParameter() { - return TERMINATION_REASON_PARAMETER; - } - - public static String getTerminationWorkflowOutputParameter() { - return TERMINATION_WORKFLOW_OUTPUT; - } - - public static Boolean validateInputStatus(String status) { - return COMPLETED.name().equals(status) || FAILED.name().equals(status); - } - - @SuppressWarnings("unchecked") - private Map getInputFromParam(Map taskInput) { - HashMap output = new HashMap<>(); - if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) == null) { - return output; - } - if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) instanceof HashMap) { - output.putAll((HashMap) taskInput.get(TERMINATION_WORKFLOW_OUTPUT)); - return output; - } - output.put("output", taskInput.get(TERMINATION_WORKFLOW_OUTPUT)); - return output; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Wait.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Wait.java deleted file mode 100644 index 47db20727..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Wait.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.text.ParseException; -import java.time.Duration; -import java.util.Date; -import java.util.Optional; - -import org.apache.commons.lang3.StringUtils; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; -import static com.netflix.conductor.core.utils.DateTimeUtils.parseDate; -import static com.netflix.conductor.core.utils.DateTimeUtils.parseDuration; -import static com.netflix.conductor.model.TaskModel.Status.*; - -@Component(TASK_TYPE_WAIT) -public class Wait extends WorkflowSystemTask { - - public static final String DURATION_INPUT = "duration"; - public static final String UNTIL_INPUT = "until"; - - public Wait() { - super(TASK_TYPE_WAIT); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - - String duration = - Optional.ofNullable(task.getInputData().get(DURATION_INPUT)).orElse("").toString(); - String until = - Optional.ofNullable(task.getInputData().get(UNTIL_INPUT)).orElse("").toString(); - - if (StringUtils.isNotBlank(duration) && StringUtils.isNotBlank(until)) { - task.setReasonForIncompletion( - "Both 'duration' and 'until' specified. Please provide only one input"); - task.setStatus(FAILED_WITH_TERMINAL_ERROR); - return; - } - - if (StringUtils.isNotBlank(duration)) { - - Duration timeDuration = parseDuration(duration); - long waitTimeout = System.currentTimeMillis() + (timeDuration.getSeconds() * 1000); - task.setWaitTimeout(waitTimeout); - - long seconds = timeDuration.getSeconds(); - task.setCallbackAfterSeconds(seconds); - } else if (StringUtils.isNotBlank(until)) { - try { - Date expiryDate = parseDate(until); - long timeInMS = expiryDate.getTime(); - long now = System.currentTimeMillis(); - long seconds = (timeInMS - now) / 1000; - task.setWaitTimeout(timeInMS); - - } catch (ParseException parseException) { - task.setReasonForIncompletion( - "Invalid/Unsupported Wait Until format. Provided: " + until); - task.setStatus(FAILED_WITH_TERMINAL_ERROR); - } - } - task.setStatus(IN_PROGRESS); - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - task.setStatus(TaskModel.Status.CANCELED); - } - - @Override - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - long timeOut = task.getWaitTimeout(); - if (timeOut == 0) { - return false; - } - if (System.currentTimeMillis() > timeOut) { - task.setStatus(COMPLETED); - return true; - } - - return false; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/WorkflowSystemTask.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/WorkflowSystemTask.java deleted file mode 100644 index e8db7c6d8..000000000 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/WorkflowSystemTask.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Optional; - -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -public abstract class WorkflowSystemTask { - - private final String taskType; - - public WorkflowSystemTask(String taskType) { - this.taskType = taskType; - } - - /** - * Start the task execution. - * - *

    Called only once, and first, when the task status is SCHEDULED. - * - * @param workflow Workflow for which the task is being started - * @param task Instance of the Task - * @param workflowExecutor Workflow Executor - */ - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - // Do nothing unless overridden by the task implementation - } - - /** - * "Execute" the task. - * - *

    Called after {@link #start(WorkflowModel, TaskModel, WorkflowExecutor)}, if the task - * status is not terminal. Can be called more than once. - * - * @param workflow Workflow for which the task is being started - * @param task Instance of the Task - * @param workflowExecutor Workflow Executor - * @return true, if the execution has changed the task status. return false otherwise. - */ - public boolean execute( - WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { - return false; - } - - /** - * Cancel task execution - * - * @param workflow Workflow for which the task is being started - * @param task Instance of the Task - * @param workflowExecutor Workflow Executor - */ - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {} - - /** - * @return True if the task is supposed to be started asynchronously using internal queues. - */ - public boolean isAsync() { - return false; - } - - /** - * @return True to keep task in 'IN_PROGRESS' state, and 'COMPLETE' later by an external - * message. - */ - public boolean isAsyncComplete(TaskModel task) { - if (task.getInputData().containsKey("asyncComplete")) { - return Optional.ofNullable(task.getInputData().get("asyncComplete")) - .map(result -> (Boolean) result) - .orElse(false); - } else { - return Optional.ofNullable(task.getWorkflowTask()) - .map(WorkflowTask::isAsyncComplete) - .orElse(false); - } - } - - /** - * @return name of the system task - */ - public String getTaskType() { - return taskType; - } - - @Override - public String toString() { - return taskType; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java b/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java deleted file mode 100644 index 4a7f427cb..000000000 --- a/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.index; - -import java.util.Collections; -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.dao.IndexDAO; - -/** - * Dummy implementation of {@link IndexDAO} which does nothing. Nothing is ever indexed, and no - * results are ever returned. - */ -public class NoopIndexDAO implements IndexDAO { - - @Override - public void setup() {} - - @Override - public void indexWorkflow(WorkflowSummary workflowSummary) {} - - @Override - public CompletableFuture asyncIndexWorkflow(WorkflowSummary workflowSummary) { - return CompletableFuture.completedFuture(null); - } - - @Override - public void indexTask(TaskSummary taskSummary) {} - - @Override - public CompletableFuture asyncIndexTask(TaskSummary taskSummary) { - return CompletableFuture.completedFuture(null); - } - - @Override - public SearchResult searchWorkflows( - String query, String freeText, int start, int count, List sort) { - return new SearchResult<>(0, Collections.emptyList()); - } - - @Override - public SearchResult searchTasks( - String query, String freeText, int start, int count, List sort) { - return new SearchResult<>(0, Collections.emptyList()); - } - - @Override - public void removeWorkflow(String workflowId) {} - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.completedFuture(null); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {} - - @Override - public CompletableFuture asyncUpdateWorkflow( - String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.completedFuture(null); - } - - @Override - public String get(String workflowInstanceId, String key) { - return null; - } - - @Override - public void addTaskExecutionLogs(List logs) {} - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.completedFuture(null); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - return Collections.emptyList(); - } - - @Override - public void addEventExecution(EventExecution eventExecution) {} - - @Override - public List getEventExecutions(String event) { - return Collections.emptyList(); - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return null; - } - - @Override - public void addMessage(String queue, Message msg) {} - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return CompletableFuture.completedFuture(null); - } - - @Override - public List getMessages(String queue) { - return Collections.emptyList(); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - return Collections.emptyList(); - } - - @Override - public long getWorkflowCount(String query, String freeText) { - return 0; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java b/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java deleted file mode 100644 index 0e9e2466b..000000000 --- a/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.index; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.dao.IndexDAO; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.indexing.enabled", havingValue = "false") -public class NoopIndexDAOConfiguration { - - @Bean - public IndexDAO noopIndexDAO() { - return new NoopIndexDAO(); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListener.java b/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListener.java deleted file mode 100644 index 1c0c4395a..000000000 --- a/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListener.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.listener; - -import com.netflix.conductor.model.WorkflowModel; - -/** Listener for the completed and terminated workflows */ -public interface WorkflowStatusListener { - - default void onWorkflowCompletedIfEnabled(WorkflowModel workflow) { - if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) { - onWorkflowCompleted(workflow); - } - } - - default void onWorkflowTerminatedIfEnabled(WorkflowModel workflow) { - if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) { - onWorkflowTerminated(workflow); - } - } - - default void onWorkflowFinalizedIfEnabled(WorkflowModel workflow) { - if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) { - onWorkflowFinalized(workflow); - } - } - - void onWorkflowCompleted(WorkflowModel workflow); - - void onWorkflowTerminated(WorkflowModel workflow); - - default void onWorkflowFinalized(WorkflowModel workflow) {} -} diff --git a/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListenerStub.java b/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListenerStub.java deleted file mode 100644 index 36bb5e699..000000000 --- a/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListenerStub.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.listener; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.model.WorkflowModel; - -/** Stub listener default implementation */ -public class WorkflowStatusListenerStub implements WorkflowStatusListener { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowStatusListenerStub.class); - - @Override - public void onWorkflowCompleted(WorkflowModel workflow) { - LOGGER.debug("Workflow {} is completed", workflow.getWorkflowId()); - } - - @Override - public void onWorkflowTerminated(WorkflowModel workflow) { - LOGGER.debug("Workflow {} is terminated", workflow.getWorkflowId()); - } - - @Override - public void onWorkflowFinalized(WorkflowModel workflow) { - LOGGER.debug("Workflow {} is finalized", workflow.getWorkflowId()); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java deleted file mode 100644 index d63285320..000000000 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.metadata; - -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * Populates metadata definitions within workflow objects. Benefits of loading and populating - * metadata definitions upfront could be: - * - *

      - *
    • Immutable definitions within a workflow execution with the added benefit of guaranteeing - * consistency at runtime. - *
    • Stress is reduced on the storage layer - *
    - */ -@Component -public class MetadataMapperService { - - public static final Logger LOGGER = LoggerFactory.getLogger(MetadataMapperService.class); - private final MetadataDAO metadataDAO; - - public MetadataMapperService(MetadataDAO metadataDAO) { - this.metadataDAO = metadataDAO; - } - - public WorkflowDef lookupForWorkflowDefinition(String name, Integer version) { - Optional potentialDef = - version == null - ? lookupLatestWorkflowDefinition(name) - : lookupWorkflowDefinition(name, version); - - // Check if the workflow definition is valid - return potentialDef.orElseThrow( - () -> { - LOGGER.error( - "There is no workflow defined with name {} and version {}", - name, - version); - return new ApplicationException( - ApplicationException.Code.NOT_FOUND, - String.format( - "No such workflow defined. name=%s, version=%s", - name, version)); - }); - } - - @VisibleForTesting - Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { - Utils.checkArgument( - StringUtils.isNotBlank(workflowName), - "Workflow name must be specified when searching for a definition"); - return metadataDAO.getWorkflowDef(workflowName, workflowVersion); - } - - @VisibleForTesting - Optional lookupLatestWorkflowDefinition(String workflowName) { - Utils.checkArgument( - StringUtils.isNotBlank(workflowName), - "Workflow name must be specified when searching for a definition"); - return metadataDAO.getLatestWorkflowDef(workflowName); - } - - public WorkflowModel populateWorkflowWithDefinitions(WorkflowModel workflow) { - Utils.checkNotNull(workflow, "workflow cannot be null"); - WorkflowDef workflowDefinition = - Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElseGet( - () -> { - WorkflowDef wd = - lookupForWorkflowDefinition( - workflow.getWorkflowName(), - workflow.getWorkflowVersion()); - workflow.setWorkflowDefinition(wd); - return wd; - }); - - workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition); - checkNotEmptyDefinitions(workflowDefinition); - - return workflow; - } - - public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { - Utils.checkNotNull(workflowDefinition, "workflowDefinition cannot be null"); - workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition); - checkNotEmptyDefinitions(workflowDefinition); - return workflowDefinition; - } - - private void populateWorkflowTaskWithDefinition(WorkflowTask workflowTask) { - Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); - if (shouldPopulateTaskDefinition(workflowTask)) { - workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); - if (workflowTask.getTaskDefinition() == null - && workflowTask.getType().equals(TaskType.SIMPLE.name())) { - // ad-hoc task def - workflowTask.setTaskDefinition(new TaskDef(workflowTask.getName())); - } - } - if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) { - populateVersionForSubWorkflow(workflowTask); - } - } - - private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { - Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); - SubWorkflowParams subworkflowParams = workflowTask.getSubWorkflowParam(); - if (subworkflowParams.getVersion() == null) { - String subWorkflowName = subworkflowParams.getName(); - Integer subWorkflowVersion = - metadataDAO - .getLatestWorkflowDef(subWorkflowName) - .map(WorkflowDef::getVersion) - .orElseThrow( - () -> { - String reason = - String.format( - "The Task %s defined as a sub-workflow has no workflow definition available ", - subWorkflowName); - LOGGER.error(reason); - return new TerminateWorkflowException(reason); - }); - subworkflowParams.setVersion(subWorkflowVersion); - } - } - - private void checkNotEmptyDefinitions(WorkflowDef workflowDefinition) { - Utils.checkNotNull(workflowDefinition, "WorkflowDefinition cannot be null"); - - // Obtain the names of the tasks with missing definitions - Set missingTaskDefinitionNames = - workflowDefinition.collectTasks().stream() - .filter( - workflowTask -> - workflowTask.getType().equals(TaskType.SIMPLE.name())) - .filter(this::shouldPopulateTaskDefinition) - .map(WorkflowTask::getName) - .collect(Collectors.toSet()); - - if (!missingTaskDefinitionNames.isEmpty()) { - LOGGER.error( - "Cannot find the task definitions for the following tasks used in workflow: {}", - missingTaskDefinitionNames); - Monitors.recordWorkflowStartError( - workflowDefinition.getName(), WorkflowContext.get().getClientApp()); - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "Cannot find the task definitions for the following tasks used in workflow: " - + missingTaskDefinitionNames); - } - } - - public TaskModel populateTaskWithDefinition(TaskModel task) { - Utils.checkNotNull(task, "Task cannot be null"); - populateWorkflowTaskWithDefinition(task.getWorkflowTask()); - return task; - } - - @VisibleForTesting - boolean shouldPopulateTaskDefinition(WorkflowTask workflowTask) { - Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); - Utils.checkNotNull(workflowTask.getType(), "WorkflowTask type cannot be null"); - return workflowTask.getTaskDefinition() == null - && StringUtils.isNotBlank(workflowTask.getName()); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowReconciler.java b/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowReconciler.java deleted file mode 100644 index 05086f341..000000000 --- a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowReconciler.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.reconciliation; - -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.LifecycleAwareComponent; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; - -import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE; - -/** - * Periodically polls all running workflows in the system and evaluates them for timeouts and/or - * maintain consistency. - */ -@Component -@ConditionalOnProperty( - name = "conductor.workflow-reconciler.enabled", - havingValue = "true", - matchIfMissing = true) -public class WorkflowReconciler extends LifecycleAwareComponent { - - private final WorkflowSweeper workflowSweeper; - private final QueueDAO queueDAO; - private final int sweeperThreadCount; - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowReconciler.class); - - public WorkflowReconciler( - WorkflowSweeper workflowSweeper, QueueDAO queueDAO, ConductorProperties properties) { - this.workflowSweeper = workflowSweeper; - this.queueDAO = queueDAO; - this.sweeperThreadCount = properties.getSweeperThreadCount(); - LOGGER.info( - "WorkflowReconciler initialized with {} sweeper threads", - properties.getSweeperThreadCount()); - } - - @Scheduled( - fixedDelayString = "${conductor.sweep-frequency.millis:500}", - initialDelayString = "${conductor.sweep-frequency.millis:500}") - public void pollAndSweep() { - try { - if (!isRunning()) { - LOGGER.debug("Component stopped, skip workflow sweep"); - } else { - List workflowIds = queueDAO.pop(DECIDER_QUEUE, sweeperThreadCount, 2000); - if (workflowIds != null) { - // wait for all workflow ids to be "swept" - CompletableFuture.allOf( - workflowIds.stream() - .map(workflowSweeper::sweepAsync) - .toArray(CompletableFuture[]::new)) - .get(); - LOGGER.debug( - "Sweeper processed {} from the decider queue", - String.join(",", workflowIds)); - } - // NOTE: Disabling the sweeper implicitly disables this metric. - recordQueueDepth(); - } - } catch (Exception e) { - Monitors.error(WorkflowReconciler.class.getSimpleName(), "poll"); - LOGGER.error("Error when polling for workflows", e); - if (e instanceof InterruptedException) { - // Restore interrupted state... - Thread.currentThread().interrupt(); - } - } - } - - private void recordQueueDepth() { - int currentQueueSize = queueDAO.getSize(DECIDER_QUEUE); - Monitors.recordGauge(DECIDER_QUEUE, currentQueueSize); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowRepairService.java b/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowRepairService.java deleted file mode 100644 index 769fe8ddb..000000000 --- a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowRepairService.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.reconciliation; - -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** - * A helper service that tries to keep ExecutionDAO and QueueDAO in sync, based on the task or - * workflow state. - * - *

    This service expects that the underlying Queueing layer implements {@link - * QueueDAO#containsMessage(String, String)} method. This can be controlled with - * conductor.workflow-repair-service.enabled property. - */ -@Service -@ConditionalOnProperty(name = "conductor.workflow-repair-service.enabled", havingValue = "true") -public class WorkflowRepairService { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowRepairService.class); - private final ExecutionDAO executionDAO; - private final QueueDAO queueDAO; - private final ConductorProperties properties; - private SystemTaskRegistry systemTaskRegistry; - - /* - For system task -> Verify the task isAsync() and not isAsyncComplete() or isAsyncComplete() in SCHEDULED state, - and in SCHEDULED or IN_PROGRESS state. (Example: SUB_WORKFLOW tasks in SCHEDULED state) - For simple task -> Verify the task is in SCHEDULED state. - */ - private final Predicate isTaskRepairable = - task -> { - if (systemTaskRegistry.isSystemTask(task.getTaskType())) { // If system task - WorkflowSystemTask workflowSystemTask = - systemTaskRegistry.get(task.getTaskType()); - return workflowSystemTask.isAsync() - && (!workflowSystemTask.isAsyncComplete(task) - || (workflowSystemTask.isAsyncComplete(task) - && task.getStatus() == TaskModel.Status.SCHEDULED)) - && (task.getStatus() == TaskModel.Status.IN_PROGRESS - || task.getStatus() == TaskModel.Status.SCHEDULED); - } else { // Else if simple task - return task.getStatus() == TaskModel.Status.SCHEDULED; - } - }; - - public WorkflowRepairService( - ExecutionDAO executionDAO, - QueueDAO queueDAO, - ConductorProperties properties, - SystemTaskRegistry systemTaskRegistry) { - this.executionDAO = executionDAO; - this.queueDAO = queueDAO; - this.properties = properties; - this.systemTaskRegistry = systemTaskRegistry; - LOGGER.info("WorkflowRepairService Initialized"); - } - - /** - * Verify and repair if the workflowId exists in deciderQueue, and then if each scheduled task - * has relevant message in the queue. - */ - public boolean verifyAndRepairWorkflow(String workflowId, boolean includeTasks) { - WorkflowModel workflow = executionDAO.getWorkflow(workflowId, includeTasks); - AtomicBoolean repaired = new AtomicBoolean(false); - repaired.set(verifyAndRepairDeciderQueue(workflow)); - if (includeTasks) { - workflow.getTasks().forEach(task -> repaired.set(verifyAndRepairTask(task))); - } - return repaired.get(); - } - - /** Verify and repair tasks in a workflow. */ - public void verifyAndRepairWorkflowTasks(String workflowId) { - WorkflowModel workflow = executionDAO.getWorkflow(workflowId, true); - workflow.getTasks().forEach(this::verifyAndRepairTask); - // repair the parent workflow if needed - verifyAndRepairWorkflow(workflow.getParentWorkflowId()); - } - - /** - * Verify and fix if Workflow decider queue contains this workflowId. - * - * @return true - if the workflow was queued for repair - */ - private boolean verifyAndRepairDeciderQueue(WorkflowModel workflow) { - if (!workflow.getStatus().isTerminal()) { - return verifyAndRepairWorkflow(workflow.getWorkflowId()); - } - return false; - } - - /** - * Verify if ExecutionDAO and QueueDAO agree for the provided task. - * - * @param task the task to be repaired - * @return true - if the task was queued for repair - */ - @VisibleForTesting - boolean verifyAndRepairTask(TaskModel task) { - if (isTaskRepairable.test(task)) { - // Ensure QueueDAO contains this taskId - String taskQueueName = QueueUtils.getQueueName(task); - if (!queueDAO.containsMessage(taskQueueName, task.getTaskId())) { - queueDAO.push(taskQueueName, task.getTaskId(), task.getCallbackAfterSeconds()); - LOGGER.info( - "Task {} in workflow {} re-queued for repairs", - task.getTaskId(), - task.getWorkflowInstanceId()); - Monitors.recordQueueMessageRepushFromRepairService(task.getTaskDefName()); - return true; - } - } - return false; - } - - private boolean verifyAndRepairWorkflow(String workflowId) { - if (StringUtils.isNotEmpty(workflowId)) { - String queueName = Utils.DECIDER_QUEUE; - if (!queueDAO.containsMessage(queueName, workflowId)) { - queueDAO.push( - queueName, workflowId, properties.getWorkflowOffsetTimeout().getSeconds()); - LOGGER.info("Workflow {} re-queued for repairs", workflowId); - Monitors.recordQueueMessageRepushFromRepairService(queueName); - return true; - } - return false; - } - return false; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowSweeper.java b/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowSweeper.java deleted file mode 100644 index 2d3c1eaa5..000000000 --- a/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowSweeper.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.reconciliation; - -import java.util.Optional; -import java.util.concurrent.CompletableFuture; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Async; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; - -import static com.netflix.conductor.core.config.SchedulerConfiguration.SWEEPER_EXECUTOR_NAME; -import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE; - -@Component -public class WorkflowSweeper { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowSweeper.class); - - private final ConductorProperties properties; - private final WorkflowExecutor workflowExecutor; - private final WorkflowRepairService workflowRepairService; - private final QueueDAO queueDAO; - - private static final String CLASS_NAME = WorkflowSweeper.class.getSimpleName(); - - @Autowired - public WorkflowSweeper( - WorkflowExecutor workflowExecutor, - Optional workflowRepairService, - ConductorProperties properties, - QueueDAO queueDAO) { - this.properties = properties; - this.queueDAO = queueDAO; - this.workflowExecutor = workflowExecutor; - this.workflowRepairService = workflowRepairService.orElse(null); - LOGGER.info("WorkflowSweeper initialized."); - } - - @Async(SWEEPER_EXECUTOR_NAME) - public CompletableFuture sweepAsync(String workflowId) { - sweep(workflowId); - return CompletableFuture.completedFuture(null); - } - - public void sweep(String workflowId) { - try { - WorkflowContext workflowContext = new WorkflowContext(properties.getAppId()); - WorkflowContext.set(workflowContext); - LOGGER.debug("Running sweeper for workflow {}", workflowId); - - if (workflowRepairService != null) { - // Verify and repair tasks in the workflow. - workflowRepairService.verifyAndRepairWorkflowTasks(workflowId); - } - - boolean done = workflowExecutor.decide(workflowId); - if (done) { - queueDAO.remove(DECIDER_QUEUE, workflowId); - return; - } - } catch (ApplicationException e) { - if (e.getCode() == ApplicationException.Code.NOT_FOUND) { - queueDAO.remove(DECIDER_QUEUE, workflowId); - LOGGER.info( - "Workflow NOT found for id:{}. Removed it from decider queue", - workflowId, - e); - return; - } - } catch (Exception e) { - Monitors.error(CLASS_NAME, "sweep"); - LOGGER.error("Error running sweep for " + workflowId, e); - } - queueDAO.setUnackTimeout( - DECIDER_QUEUE, workflowId, properties.getWorkflowOffsetTimeout().toMillis()); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java b/core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java deleted file mode 100644 index 47c94c761..000000000 --- a/core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.storage; - -import java.io.InputStream; - -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; - -/** - * A dummy implementation of {@link ExternalPayloadStorage} used when no external payload is - * configured - */ -public class DummyPayloadStorage implements ExternalPayloadStorage { - - @Override - public ExternalStorageLocation getLocation( - Operation operation, PayloadType payloadType, String path) { - return null; - } - - @Override - public void upload(String path, InputStream payload, long payloadSize) {} - - @Override - public InputStream download(String path) { - return null; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/sync/Lock.java b/core/src/main/java/com/netflix/conductor/core/sync/Lock.java deleted file mode 100644 index cb8d5ccd4..000000000 --- a/core/src/main/java/com/netflix/conductor/core/sync/Lock.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.sync; - -import java.util.concurrent.TimeUnit; - -/** - * Interface implemented by a distributed lock client. - * - *

    A typical usage: - * - *

    - *   if (acquireLock(workflowId, 5, TimeUnit.MILLISECONDS)) {
    - *      [load and execute workflow....]
    - *      ExecutionDAO.updateWorkflow(workflow);  //use optimistic locking
    - *   } finally {
    - *     releaseLock(workflowId)
    - *   }
    - * 
    - */ -public interface Lock { - - /** - * Acquires a re-entrant lock on lockId, blocks indefinitely on lockId until it succeeds - * - * @param lockId resource to lock on - */ - void acquireLock(String lockId); - - /** - * Acquires a re-entrant lock on lockId, blocks for timeToTry duration before giving up - * - * @param lockId resource to lock on - * @param timeToTry blocks up to timeToTry duration in attempt to acquire the lock - * @param unit time unit - * @return true, if successfully acquired - */ - boolean acquireLock(String lockId, long timeToTry, TimeUnit unit); - - /** - * Acquires a re-entrant lock on lockId with provided leaseTime duration. Blocks for timeToTry - * duration before giving up - * - * @param lockId resource to lock on - * @param timeToTry blocks up to timeToTry duration in attempt to acquire the lock - * @param leaseTime Lock lease expiration duration. - * @param unit time unit - * @return true, if successfully acquired - */ - boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit); - - /** - * Release a previously acquired lock - * - * @param lockId resource to lock on - */ - void releaseLock(String lockId); - - /** - * Explicitly cleanup lock resources, if releasing it wouldn't do so. - * - * @param lockId resource to lock on - */ - void deleteLock(String lockId); -} diff --git a/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLock.java b/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLock.java deleted file mode 100644 index ec5f6eec0..000000000 --- a/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLock.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.sync.local; - -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.Semaphore; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.core.sync.Lock; - -import com.github.benmanes.caffeine.cache.CacheLoader; -import com.github.benmanes.caffeine.cache.Caffeine; -import com.github.benmanes.caffeine.cache.LoadingCache; - -public class LocalOnlyLock implements Lock { - - private static final Logger LOGGER = LoggerFactory.getLogger(LocalOnlyLock.class); - - private static final CacheLoader LOADER = - new CacheLoader() { - @Override - public Semaphore load(String key) { - return new Semaphore(1, true); - } - }; - private static final ConcurrentHashMap> SCHEDULEDFUTURES = - new ConcurrentHashMap<>(); - private static final LoadingCache LOCKIDTOSEMAPHOREMAP = - Caffeine.newBuilder().build(LOADER); - private static final ThreadGroup THREAD_GROUP = new ThreadGroup("LocalOnlyLock-scheduler"); - private static final ThreadFactory THREAD_FACTORY = - runnable -> new Thread(THREAD_GROUP, runnable); - private static final ScheduledExecutorService SCHEDULER = - Executors.newScheduledThreadPool(1, THREAD_FACTORY); - - @Override - public void acquireLock(String lockId) { - LOGGER.trace("Locking {}", lockId); - LOCKIDTOSEMAPHOREMAP.get(lockId).acquireUninterruptibly(); - } - - @Override - public boolean acquireLock(String lockId, long timeToTry, TimeUnit unit) { - try { - LOGGER.trace("Locking {} with timeout {} {}", lockId, timeToTry, unit); - return LOCKIDTOSEMAPHOREMAP.get(lockId).tryAcquire(timeToTry, unit); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RuntimeException(e); - } - } - - @Override - public boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit) { - LOGGER.trace( - "Locking {} with timeout {} {} for {} {}", - lockId, - timeToTry, - unit, - leaseTime, - unit); - if (acquireLock(lockId, timeToTry, unit)) { - LOGGER.trace("Releasing {} automatically after {} {}", lockId, leaseTime, unit); - SCHEDULEDFUTURES.put( - lockId, SCHEDULER.schedule(() -> releaseLock(lockId), leaseTime, unit)); - return true; - } - return false; - } - - private void removeLeaseExpirationJob(String lockId) { - ScheduledFuture schedFuture = SCHEDULEDFUTURES.get(lockId); - if (schedFuture != null && schedFuture.cancel(false)) { - SCHEDULEDFUTURES.remove(lockId); - LOGGER.trace("lockId {} removed from lease expiration job", lockId); - } - } - - @Override - public void releaseLock(String lockId) { - // Synchronized to prevent race condition between semaphore check and actual release - // The check is here to prevent semaphore getting above 1 - // e.g. in case when lease runs out but release is also called - synchronized (LOCKIDTOSEMAPHOREMAP) { - if (LOCKIDTOSEMAPHOREMAP.get(lockId).availablePermits() == 0) { - LOGGER.trace("Releasing {}", lockId); - LOCKIDTOSEMAPHOREMAP.get(lockId).release(); - removeLeaseExpirationJob(lockId); - } - } - } - - @Override - public void deleteLock(String lockId) { - LOGGER.trace("Deleting {}", lockId); - LOCKIDTOSEMAPHOREMAP.invalidate(lockId); - } - - @VisibleForTesting - LoadingCache cache() { - return LOCKIDTOSEMAPHOREMAP; - } - - @VisibleForTesting - ConcurrentHashMap> scheduledFutures() { - return SCHEDULEDFUTURES; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLockConfiguration.java b/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLockConfiguration.java deleted file mode 100644 index 41a025406..000000000 --- a/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLockConfiguration.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.sync.local; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.sync.Lock; - -@Configuration -@ConditionalOnProperty(name = "conductor.workflow-execution-lock.type", havingValue = "local_only") -public class LocalOnlyLockConfiguration { - - @Bean - public Lock provideLock() { - return new LocalOnlyLock(); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/sync/noop/NoopLock.java b/core/src/main/java/com/netflix/conductor/core/sync/noop/NoopLock.java deleted file mode 100644 index 5d492da1e..000000000 --- a/core/src/main/java/com/netflix/conductor/core/sync/noop/NoopLock.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.sync.noop; - -import java.util.concurrent.TimeUnit; - -import com.netflix.conductor.core.sync.Lock; - -public class NoopLock implements Lock { - - @Override - public void acquireLock(String lockId) {} - - @Override - public boolean acquireLock(String lockId, long timeToTry, TimeUnit unit) { - return true; - } - - @Override - public boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit) { - return true; - } - - @Override - public void releaseLock(String lockId) {} - - @Override - public void deleteLock(String lockId) {} -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/DateTimeUtils.java b/core/src/main/java/com/netflix/conductor/core/utils/DateTimeUtils.java deleted file mode 100644 index ec7ce83f0..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/DateTimeUtils.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.text.ParseException; -import java.time.Duration; -import java.util.Date; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.commons.lang3.time.DateUtils; - -public class DateTimeUtils { - - private static final String[] patterns = - new String[] {"yyyy-MM-dd HH:mm", "yyyy-MM-dd HH:mm z", "yyyy-MM-dd"}; - - public static Duration parseDuration(String text) { - Matcher m = - Pattern.compile( - "\\s*(?:(\\d+)\\s*(?:days?|d))?" - + "\\s*(?:(\\d+)\\s*(?:hours?|hrs?|h))?" - + "\\s*(?:(\\d+)\\s*(?:minutes?|mins?|m))?" - + "\\s*(?:(\\d+)\\s*(?:seconds?|secs?|s))?" - + "\\s*", - Pattern.CASE_INSENSITIVE) - .matcher(text); - if (!m.matches()) throw new IllegalArgumentException("Not valid duration: " + text); - - int days = (m.start(1) == -1 ? 0 : Integer.parseInt(m.group(1))); - int hours = (m.start(2) == -1 ? 0 : Integer.parseInt(m.group(2))); - int mins = (m.start(3) == -1 ? 0 : Integer.parseInt(m.group(3))); - int secs = (m.start(4) == -1 ? 0 : Integer.parseInt(m.group(4))); - return Duration.ofSeconds((days * 86400) + (hours * 60L + mins) * 60L + secs); - } - - public static Date parseDate(String date) throws ParseException { - return DateUtils.parseDate(date, patterns); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtils.java b/core/src/main/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtils.java deleted file mode 100644 index eb6e86265..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtils.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.VisibleForTesting; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** Provides utility functions to upload and download payloads to {@link ExternalPayloadStorage} */ -@Component -public class ExternalPayloadStorageUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExternalPayloadStorageUtils.class); - - private final ExternalPayloadStorage externalPayloadStorage; - private final ConductorProperties properties; - private final ObjectMapper objectMapper; - - public ExternalPayloadStorageUtils( - ExternalPayloadStorage externalPayloadStorage, - ConductorProperties properties, - ObjectMapper objectMapper) { - this.externalPayloadStorage = externalPayloadStorage; - this.properties = properties; - this.objectMapper = objectMapper; - } - - /** - * Download the payload from the given path. - * - * @param path the relative path of the payload in the {@link ExternalPayloadStorage} - * @return the payload object - * @throws ApplicationException in case of JSON parsing errors or download errors - */ - @SuppressWarnings("unchecked") - public Map downloadPayload(String path) { - try (InputStream inputStream = externalPayloadStorage.download(path)) { - return objectMapper.readValue( - IOUtils.toString(inputStream, StandardCharsets.UTF_8), Map.class); - } catch (IOException e) { - LOGGER.error("Unable to download payload from external storage path: {}", path, e); - throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, e); - } - } - - /** - * Verify the payload size and upload to external storage if necessary. - * - * @param entity the task or workflow for which the payload is to be verified and uploaded - * @param payloadType the {@link PayloadType} of the payload - * @param {@link TaskModel} or {@link WorkflowModel} - * @throws ApplicationException in case of JSON parsing errors or upload errors - * @throws TerminateWorkflowException if the payload size is bigger than permissible limit as - * per {@link ConductorProperties} - */ - public void verifyAndUpload(T entity, PayloadType payloadType) { - long threshold = 0L; - long maxThreshold = 0L; - Map payload = new HashMap<>(); - String workflowId = ""; - switch (payloadType) { - case TASK_INPUT: - threshold = properties.getTaskInputPayloadSizeThreshold().toKilobytes(); - maxThreshold = properties.getMaxTaskInputPayloadSizeThreshold().toKilobytes(); - payload = ((TaskModel) entity).getInputData(); - workflowId = ((TaskModel) entity).getWorkflowInstanceId(); - break; - case TASK_OUTPUT: - threshold = properties.getTaskOutputPayloadSizeThreshold().toKilobytes(); - maxThreshold = properties.getMaxTaskOutputPayloadSizeThreshold().toKilobytes(); - payload = ((TaskModel) entity).getOutputData(); - workflowId = ((TaskModel) entity).getWorkflowInstanceId(); - break; - case WORKFLOW_INPUT: - threshold = properties.getWorkflowInputPayloadSizeThreshold().toKilobytes(); - maxThreshold = properties.getMaxWorkflowInputPayloadSizeThreshold().toKilobytes(); - payload = ((WorkflowModel) entity).getInput(); - workflowId = ((WorkflowModel) entity).getWorkflowId(); - break; - case WORKFLOW_OUTPUT: - threshold = properties.getWorkflowOutputPayloadSizeThreshold().toKilobytes(); - maxThreshold = properties.getMaxWorkflowOutputPayloadSizeThreshold().toKilobytes(); - payload = ((WorkflowModel) entity).getOutput(); - workflowId = ((WorkflowModel) entity).getWorkflowId(); - break; - } - - try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { - objectMapper.writeValue(byteArrayOutputStream, payload); - byte[] payloadBytes = byteArrayOutputStream.toByteArray(); - long payloadSize = payloadBytes.length; - - if (payloadSize > maxThreshold * 1024) { - if (entity instanceof TaskModel) { - String errorMsg = - String.format( - "The payload size: %d of task: %s in workflow: %s is greater than the permissible limit: %d bytes", - payloadSize, - ((TaskModel) entity).getTaskId(), - ((TaskModel) entity).getWorkflowInstanceId(), - maxThreshold); - failTask(((TaskModel) entity), payloadType, errorMsg); - } else { - String errorMsg = - String.format( - "The output payload size: %dB of workflow: %s is greater than the permissible limit: %d bytes", - payloadSize, - ((WorkflowModel) entity).getWorkflowId(), - maxThreshold); - failWorkflow(((WorkflowModel) entity), payloadType, errorMsg); - } - } else if (payloadSize > threshold * 1024) { - String externalInputPayloadStoragePath, externalOutputPayloadStoragePath; - switch (payloadType) { - case TASK_INPUT: - externalInputPayloadStoragePath = - uploadHelper(payloadBytes, payloadSize, PayloadType.TASK_INPUT); - ((TaskModel) entity).externalizeInput(externalInputPayloadStoragePath); - Monitors.recordExternalPayloadStorageUsage( - ((TaskModel) entity).getTaskDefName(), - ExternalPayloadStorage.Operation.WRITE.toString(), - PayloadType.TASK_INPUT.toString()); - break; - case TASK_OUTPUT: - externalOutputPayloadStoragePath = - uploadHelper(payloadBytes, payloadSize, PayloadType.TASK_OUTPUT); - ((TaskModel) entity).externalizeOutput(externalOutputPayloadStoragePath); - Monitors.recordExternalPayloadStorageUsage( - ((TaskModel) entity).getTaskDefName(), - ExternalPayloadStorage.Operation.WRITE.toString(), - PayloadType.TASK_OUTPUT.toString()); - break; - case WORKFLOW_INPUT: - externalInputPayloadStoragePath = - uploadHelper(payloadBytes, payloadSize, PayloadType.WORKFLOW_INPUT); - ((WorkflowModel) entity).externalizeInput(externalInputPayloadStoragePath); - Monitors.recordExternalPayloadStorageUsage( - ((WorkflowModel) entity).getWorkflowName(), - ExternalPayloadStorage.Operation.WRITE.toString(), - PayloadType.WORKFLOW_INPUT.toString()); - break; - case WORKFLOW_OUTPUT: - externalOutputPayloadStoragePath = - uploadHelper( - payloadBytes, payloadSize, PayloadType.WORKFLOW_OUTPUT); - ((WorkflowModel) entity) - .externalizeOutput(externalOutputPayloadStoragePath); - Monitors.recordExternalPayloadStorageUsage( - ((WorkflowModel) entity).getWorkflowName(), - ExternalPayloadStorage.Operation.WRITE.toString(), - PayloadType.WORKFLOW_OUTPUT.toString()); - break; - } - } - } catch (IOException e) { - LOGGER.error( - "Unable to upload payload to external storage for workflow: {}", workflowId, e); - throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, e); - } - } - - @VisibleForTesting - String uploadHelper( - byte[] payloadBytes, long payloadSize, ExternalPayloadStorage.PayloadType payloadType) { - ExternalStorageLocation location = - externalPayloadStorage.getLocation( - ExternalPayloadStorage.Operation.WRITE, payloadType, ""); - externalPayloadStorage.upload( - location.getPath(), new ByteArrayInputStream(payloadBytes), payloadSize); - return location.getPath(); - } - - @VisibleForTesting - void failTask(TaskModel task, PayloadType payloadType, String errorMsg) { - LOGGER.error(errorMsg); - task.setReasonForIncompletion(errorMsg); - task.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR); - if (payloadType == PayloadType.TASK_INPUT) { - task.setInputData(new HashMap<>()); - } else { - task.setOutputData(new HashMap<>()); - } - throw new TerminateWorkflowException(errorMsg, WorkflowModel.Status.FAILED, task); - } - - @VisibleForTesting - void failWorkflow(WorkflowModel workflow, PayloadType payloadType, String errorMsg) { - LOGGER.error(errorMsg); - if (payloadType == PayloadType.WORKFLOW_INPUT) { - workflow.setInput(new HashMap<>()); - } else { - workflow.setOutput(new HashMap<>()); - } - throw new TerminateWorkflowException(errorMsg); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/IDGenerator.java b/core/src/main/java/com/netflix/conductor/core/utils/IDGenerator.java deleted file mode 100644 index 813d63aa9..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/IDGenerator.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.UUID; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - -@Component -@ConditionalOnProperty( - name = "conductor.id.generator", - havingValue = "default", - matchIfMissing = true) -/** - * ID Generator used by Conductor Note on overriding the ID Generator: The default ID generator uses - * UUID v4 as the ID format. By overriding this class it is possible to use different scheme for ID - * generation. However, this is not normal and should only be done after very careful consideration. - * - *

    Please note, if you use Cassandra persistence, the schema uses UUID as the column type and the - * IDs have to be valid UUIDs supported by Cassandra. - */ -public class IDGenerator { - - public IDGenerator() {} - - public String generate() { - return UUID.randomUUID().toString(); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/JsonUtils.java b/core/src/main/java/com/netflix/conductor/core/utils/JsonUtils.java deleted file mode 100644 index 38f70218c..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/JsonUtils.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.List; -import java.util.Map; - -import org.springframework.stereotype.Component; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** This class contains utility functions for parsing/expanding JSON. */ -@SuppressWarnings("unchecked") -@Component -public class JsonUtils { - - private final ObjectMapper objectMapper; - - public JsonUtils(ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - } - - /** - * Expands a JSON object into a java object - * - * @param input the object to be expanded - * @return the expanded object containing java types like {@link Map} and {@link List} - */ - public Object expand(Object input) { - if (input instanceof List) { - expandList((List) input); - return input; - } else if (input instanceof Map) { - expandMap((Map) input); - return input; - } else if (input instanceof String) { - return getJson((String) input); - } else { - return input; - } - } - - private void expandList(List input) { - for (Object value : input) { - if (value instanceof String) { - if (isJsonString(value.toString())) { - value = getJson(value.toString()); - } - } else if (value instanceof Map) { - expandMap((Map) value); - } else if (value instanceof List) { - expandList((List) value); - } - } - } - - private void expandMap(Map input) { - for (Map.Entry entry : input.entrySet()) { - Object value = entry.getValue(); - if (value instanceof String) { - if (isJsonString(value.toString())) { - entry.setValue(getJson(value.toString())); - } - } else if (value instanceof Map) { - expandMap((Map) value); - } else if (value instanceof List) { - expandList((List) value); - } - } - } - - /** - * Used to obtain a JSONified object from a string - * - * @param jsonAsString the json object represented in string form - * @return the JSONified object representation if the input is a valid json string if the input - * is not a valid json string, it will be returned as-is and no exception is thrown - */ - private Object getJson(String jsonAsString) { - try { - return objectMapper.readValue(jsonAsString, Object.class); - } catch (Exception e) { - return jsonAsString; - } - } - - private boolean isJsonString(String jsonAsString) { - jsonAsString = jsonAsString.trim(); - return jsonAsString.startsWith("{") || jsonAsString.startsWith("["); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/ParametersUtils.java b/core/src/main/java/com/netflix/conductor/core/utils/ParametersUtils.java deleted file mode 100644 index e76344ff8..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/ParametersUtils.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.utils.EnvUtils; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.jayway.jsonpath.Configuration; -import com.jayway.jsonpath.DocumentContext; -import com.jayway.jsonpath.JsonPath; -import com.jayway.jsonpath.Option; - -/** Used to parse and resolve the JSONPath bindings in the workflow and task definitions. */ -@Component -public class ParametersUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(ParametersUtils.class); - - private final ObjectMapper objectMapper; - private final TypeReference> map = new TypeReference<>() {}; - - public ParametersUtils(ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - } - - public Map getTaskInput( - Map inputParams, - WorkflowModel workflow, - TaskDef taskDefinition, - String taskId) { - if (workflow.getWorkflowDefinition().getSchemaVersion() > 1) { - return getTaskInputV2(inputParams, workflow, taskId, taskDefinition); - } - return getTaskInputV1(workflow, inputParams); - } - - public Map getTaskInputV2( - Map input, - WorkflowModel workflow, - String taskId, - TaskDef taskDefinition) { - Map inputParams; - - if (input != null) { - inputParams = clone(input); - } else { - inputParams = new HashMap<>(); - } - if (taskDefinition != null && taskDefinition.getInputTemplate() != null) { - clone(taskDefinition.getInputTemplate()).forEach(inputParams::putIfAbsent); - } - - Map> inputMap = new HashMap<>(); - - Map workflowParams = new HashMap<>(); - workflowParams.put("input", workflow.getInput()); - workflowParams.put("output", workflow.getOutput()); - workflowParams.put("status", workflow.getStatus()); - workflowParams.put("workflowId", workflow.getWorkflowId()); - workflowParams.put("parentWorkflowId", workflow.getParentWorkflowId()); - workflowParams.put("parentWorkflowTaskId", workflow.getParentWorkflowTaskId()); - workflowParams.put("workflowType", workflow.getWorkflowName()); - workflowParams.put("version", workflow.getWorkflowVersion()); - workflowParams.put("correlationId", workflow.getCorrelationId()); - workflowParams.put("reasonForIncompletion", workflow.getReasonForIncompletion()); - workflowParams.put("schemaVersion", workflow.getWorkflowDefinition().getSchemaVersion()); - workflowParams.put("variables", workflow.getVariables()); - - inputMap.put("workflow", workflowParams); - - // For new workflow being started the list of tasks will be empty - workflow.getTasks().stream() - .map(TaskModel::getReferenceTaskName) - .map(workflow::getTaskByRefName) - .forEach( - task -> { - Map taskParams = new HashMap<>(); - taskParams.put("input", task.getInputData()); - taskParams.put("output", task.getOutputData()); - taskParams.put("taskType", task.getTaskType()); - if (task.getStatus() != null) { - taskParams.put("status", task.getStatus().toString()); - } - taskParams.put("referenceTaskName", task.getReferenceTaskName()); - taskParams.put("retryCount", task.getRetryCount()); - taskParams.put("correlationId", task.getCorrelationId()); - taskParams.put("pollCount", task.getPollCount()); - taskParams.put("taskDefName", task.getTaskDefName()); - taskParams.put("scheduledTime", task.getScheduledTime()); - taskParams.put("startTime", task.getStartTime()); - taskParams.put("endTime", task.getEndTime()); - taskParams.put("workflowInstanceId", task.getWorkflowInstanceId()); - taskParams.put("taskId", task.getTaskId()); - taskParams.put( - "reasonForIncompletion", task.getReasonForIncompletion()); - taskParams.put("callbackAfterSeconds", task.getCallbackAfterSeconds()); - taskParams.put("workerId", task.getWorkerId()); - taskParams.put("iteration", task.getIteration()); - inputMap.put( - task.isLoopOverTask() - ? TaskUtils.removeIterationFromTaskRefName( - task.getReferenceTaskName()) - : task.getReferenceTaskName(), - taskParams); - }); - - Configuration option = - Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS); - DocumentContext documentContext = JsonPath.parse(inputMap, option); - Map replacedTaskInput = replace(inputParams, documentContext, taskId); - if (taskDefinition != null && taskDefinition.getInputTemplate() != null) { - // If input for a given key resolves to null, try replacing it with one from - // inputTemplate, if it exists. - replacedTaskInput.replaceAll( - (key, value) -> - (value == null) ? taskDefinition.getInputTemplate().get(key) : value); - } - return replacedTaskInput; - } - - // deep clone using json - POJO - private Map clone(Map inputTemplate) { - try { - byte[] bytes = objectMapper.writeValueAsBytes(inputTemplate); - return objectMapper.readValue(bytes, map); - } catch (IOException e) { - throw new RuntimeException("Unable to clone input params", e); - } - } - - public Map replace(Map input, Object json) { - Object doc; - if (json instanceof String) { - doc = JsonPath.parse(json.toString()); - } else { - doc = json; - } - Configuration option = - Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS); - DocumentContext documentContext = JsonPath.parse(doc, option); - return replace(input, documentContext, null); - } - - public Object replace(String paramString) { - Configuration option = - Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS); - DocumentContext documentContext = JsonPath.parse(Collections.emptyMap(), option); - return replaceVariables(paramString, documentContext, null); - } - - @SuppressWarnings("unchecked") - private Map replace( - Map input, DocumentContext documentContext, String taskId) { - Map result = new HashMap<>(); - for (Entry e : input.entrySet()) { - Object newValue; - Object value = e.getValue(); - if (value instanceof String) { - newValue = replaceVariables(value.toString(), documentContext, taskId); - } else if (value instanceof Map) { - // recursive call - newValue = replace((Map) value, documentContext, taskId); - } else if (value instanceof List) { - newValue = replaceList((List) value, taskId, documentContext); - } else { - newValue = value; - } - result.put(e.getKey(), newValue); - } - return result; - } - - @SuppressWarnings("unchecked") - private Object replaceList(List values, String taskId, DocumentContext io) { - List replacedList = new LinkedList<>(); - for (Object listVal : values) { - if (listVal instanceof String) { - Object replaced = replaceVariables(listVal.toString(), io, taskId); - replacedList.add(replaced); - } else if (listVal instanceof Map) { - Object replaced = replace((Map) listVal, io, taskId); - replacedList.add(replaced); - } else if (listVal instanceof List) { - Object replaced = replaceList((List) listVal, taskId, io); - replacedList.add(replaced); - } else { - replacedList.add(listVal); - } - } - return replacedList; - } - - private Object replaceVariables( - String paramString, DocumentContext documentContext, String taskId) { - String[] values = paramString.split("(?=(? 1) { - for (int i = 0; i < convertedValues.length; i++) { - Object val = convertedValues[i]; - if (val == null) { - val = ""; - } - if (i == 0) { - retObj = val; - } else { - retObj = retObj + "" + val.toString(); - } - } - } - return retObj; - } - - @Deprecated - // Workflow schema version 1 is deprecated and new workflows should be using version 2 - private Map getTaskInputV1( - WorkflowModel workflow, Map inputParams) { - Map input = new HashMap<>(); - if (inputParams == null) { - return input; - } - Map workflowInput = workflow.getInput(); - inputParams.forEach( - (paramName, value) -> { - String paramPath = "" + value; - String[] paramPathComponents = paramPath.split("\\."); - Utils.checkArgument( - paramPathComponents.length == 3, - "Invalid input expression for " - + paramName - + ", paramPathComponents.size=" - + paramPathComponents.length - + ", expression=" - + paramPath); - - String source = paramPathComponents[0]; // workflow, or task reference name - String type = paramPathComponents[1]; // input/output - String name = paramPathComponents[2]; // name of the parameter - if ("workflow".equals(source)) { - input.put(paramName, workflowInput.get(name)); - } else { - TaskModel task = workflow.getTaskByRefName(source); - if (task != null) { - if ("input".equals(type)) { - input.put(paramName, task.getInputData().get(name)); - } else { - input.put(paramName, task.getOutputData().get(name)); - } - } - } - }); - return input; - } - - public Map getWorkflowInput( - WorkflowDef workflowDef, Map inputParams) { - if (workflowDef != null && workflowDef.getInputTemplate() != null) { - clone(workflowDef.getInputTemplate()).forEach(inputParams::putIfAbsent); - } - return inputParams; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/QueueUtils.java b/core/src/main/java/com/netflix/conductor/core/utils/QueueUtils.java deleted file mode 100644 index bca477390..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/QueueUtils.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.model.TaskModel; - -public class QueueUtils { - - public static final String DOMAIN_SEPARATOR = ":"; - private static final String ISOLATION_SEPARATOR = "-"; - private static final String EXECUTION_NAME_SPACE_SEPARATOR = "@"; - - public static String getQueueName(TaskModel taskModel) { - return getQueueName( - taskModel.getTaskType(), - taskModel.getDomain(), - taskModel.getIsolationGroupId(), - taskModel.getExecutionNameSpace()); - } - - public static String getQueueName(Task task) { - return getQueueName( - task.getTaskType(), - task.getDomain(), - task.getIsolationGroupId(), - task.getExecutionNameSpace()); - } - - /** - * Creates a queue name string using taskType, domain, - * isolationGroupId and executionNamespace. - * - * @return domain:taskType@eexecutionNameSpace-isolationGroupId. - */ - public static String getQueueName( - String taskType, String domain, String isolationGroupId, String executionNamespace) { - - String queueName; - if (domain == null) { - queueName = taskType; - } else { - queueName = domain + DOMAIN_SEPARATOR + taskType; - } - - if (executionNamespace != null) { - queueName = queueName + EXECUTION_NAME_SPACE_SEPARATOR + executionNamespace; - } - - if (isolationGroupId != null) { - queueName = queueName + ISOLATION_SEPARATOR + isolationGroupId; - } - return queueName; - } - - public static String getQueueNameWithoutDomain(String queueName) { - return queueName.substring(queueName.indexOf(DOMAIN_SEPARATOR) + 1); - } - - public static String getExecutionNameSpace(String queueName) { - if (StringUtils.contains(queueName, ISOLATION_SEPARATOR) - && StringUtils.contains(queueName, EXECUTION_NAME_SPACE_SEPARATOR)) { - return StringUtils.substringBetween( - queueName, EXECUTION_NAME_SPACE_SEPARATOR, ISOLATION_SEPARATOR); - } else if (StringUtils.contains(queueName, EXECUTION_NAME_SPACE_SEPARATOR)) { - return StringUtils.substringAfter(queueName, EXECUTION_NAME_SPACE_SEPARATOR); - } else { - return StringUtils.EMPTY; - } - } - - public static boolean isIsolatedQueue(String queue) { - return StringUtils.isNotBlank(getIsolationGroup(queue)); - } - - private static String getIsolationGroup(String queue) { - return StringUtils.substringAfter(queue, QueueUtils.ISOLATION_SEPARATOR); - } - - public static String getTaskType(String queue) { - - if (StringUtils.isBlank(queue)) { - return StringUtils.EMPTY; - } - - int domainSeperatorIndex = StringUtils.indexOf(queue, DOMAIN_SEPARATOR); - int startIndex; - if (domainSeperatorIndex == -1) { - startIndex = 0; - } else { - startIndex = domainSeperatorIndex + 1; - } - int endIndex = StringUtils.indexOf(queue, EXECUTION_NAME_SPACE_SEPARATOR); - - if (endIndex == -1) { - endIndex = StringUtils.lastIndexOf(queue, ISOLATION_SEPARATOR); - } - if (endIndex == -1) { - endIndex = queue.length(); - } - - return StringUtils.substring(queue, startIndex, endIndex); - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/SemaphoreUtil.java b/core/src/main/java/com/netflix/conductor/core/utils/SemaphoreUtil.java deleted file mode 100644 index 793494bd9..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/SemaphoreUtil.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.concurrent.Semaphore; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** A class wrapping a semaphore which holds the number of permits available for processing. */ -public class SemaphoreUtil { - - private static final Logger LOGGER = LoggerFactory.getLogger(SemaphoreUtil.class); - private final Semaphore semaphore; - - public SemaphoreUtil(int numSlots) { - LOGGER.debug("Semaphore util initialized with {} permits", numSlots); - semaphore = new Semaphore(numSlots); - } - - /** - * Signals if processing is allowed based on whether specified number of permits can be - * acquired. - * - * @param numSlots the number of permits to acquire - * @return {@code true} - if permit is acquired {@code false} - if permit could not be acquired - */ - public boolean acquireSlots(int numSlots) { - boolean acquired = semaphore.tryAcquire(numSlots); - LOGGER.trace("Trying to acquire {} permit: {}", numSlots, acquired); - return acquired; - } - - /** Signals that processing is complete and the specified number of permits can be released. */ - public void completeProcessing(int numSlots) { - LOGGER.trace("Completed execution; releasing permit"); - semaphore.release(numSlots); - } - - /** - * Gets the number of slots available for processing. - * - * @return number of available permits - */ - public int availableSlots() { - int available = semaphore.availablePermits(); - LOGGER.trace("Number of available permits: {}", available); - return available; - } -} diff --git a/core/src/main/java/com/netflix/conductor/core/utils/Utils.java b/core/src/main/java/com/netflix/conductor/core/utils/Utils.java deleted file mode 100644 index 537d1137a..000000000 --- a/core/src/main/java/com/netflix/conductor/core/utils/Utils.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.*; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.core.exception.ApplicationException; - -public class Utils { - - public static final String DECIDER_QUEUE = "_deciderQueue"; - - /** - * ID of the server. Can be host name, IP address or any other meaningful identifier - * - * @return canonical host name resolved for the instance, "unknown" if resolution fails - */ - public static String getServerId() { - try { - return InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - return "unknown"; - } - } - - /** - * Split string with "|" as delimiter. - * - * @param inputStr Input string - * @return List of String - */ - public static List convertStringToList(String inputStr) { - List list = new ArrayList<>(); - if (StringUtils.isNotBlank(inputStr)) { - list = Arrays.asList(inputStr.split("\\|")); - } - return list; - } - - /** - * Ensures the truth of an condition involving one or more parameters to the calling method. - * - * @param condition a boolean expression - * @param errorMessage The exception message use if the input condition is not valid - * @throws ApplicationException if input condition is not valid - */ - public static void checkArgument(boolean condition, String errorMessage) { - if (!condition) { - throw new IllegalArgumentException(errorMessage); - } - } - - /** - * This method checks if the collection is null or is empty. - * - * @param collection input of type {@link Collection} - * @param errorMessage The exception message use if the collection is empty or null - * @throws ApplicationException if input Collection is not valid - */ - public static void checkNotNullOrEmpty(Collection collection, String errorMessage) { - if (collection == null || collection.isEmpty()) { - throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, errorMessage); - } - } - - /** - * This method checks if the input map is valid or not. - * - * @param map input of type {@link Map} - * @param errorMessage The exception message use if the map is empty or null - * @throws ApplicationException if input map is not valid - */ - public static void checkNotNullOrEmpty(Map map, String errorMessage) { - if (map == null || map.isEmpty()) { - throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, errorMessage); - } - } - - /** - * This method checks it the input string is null or empty. - * - * @param input input of type {@link String} - * @param errorMessage The exception message use if the string is empty or null - * @throws ApplicationException if input string is not valid - */ - public static void checkNotNullOrEmpty(String input, String errorMessage) { - if (StringUtils.isEmpty(input)) { - throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, errorMessage); - } - } - - /** - * This method checks if the object is null or empty. - * - * @param object input of type {@link Object} - * @param errorMessage The exception message use if the object is empty or null - * @throws ApplicationException if input object is not valid - */ - public static void checkNotNull(Object object, String errorMessage) { - if (object == null) { - throw new NullPointerException(errorMessage); - } - } - - /** - * Used to determine if the exception is thrown due to a transient failure and the operation is - * expected to succeed upon retrying. - * - * @param throwable the exception that is thrown - * @return true - if the exception is a transient failure - *

    false - if the exception is non-transient - */ - public static boolean isTransientException(Throwable throwable) { - if (throwable != null) { - return !((throwable instanceof UnsupportedOperationException) - || (throwable instanceof ApplicationException - && ((ApplicationException) throwable).getCode() - != ApplicationException.Code.BACKEND_ERROR)); - } - return true; - } -} diff --git a/core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java b/core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java deleted file mode 100644 index 853a72e63..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.model.TaskModel; - -/** - * A contract to support concurrency limits of tasks. - * - * @since v3.3.5. - */ -public interface ConcurrentExecutionLimitDAO { - - default void addTaskToLimit(TaskModel task) { - throw new UnsupportedOperationException( - getClass() + " does not support addTaskToLimit method."); - } - - default void removeTaskFromLimit(TaskModel task) { - throw new UnsupportedOperationException( - getClass() + " does not support removeTaskFromLimit method."); - } - - /** - * Checks if the number of tasks in progress for the given taskDef will exceed the limit if the - * task is scheduled to be in progress (given to the worker or for system tasks start() method - * called) - * - * @param task The task to be executed. Limit is set in the Task's definition - * @return true if by executing this task, the limit is breached. false otherwise. - * @see TaskDef#concurrencyLimit() - */ - boolean exceedsLimit(TaskModel task); -} diff --git a/core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java b/core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java deleted file mode 100644 index 6c9dc47a9..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; - -import com.netflix.conductor.common.metadata.events.EventHandler; - -/** An abstraction to enable different Event Handler store implementations */ -public interface EventHandlerDAO { - - /** - * @param eventHandler Event handler to be added. - *

    NOTE: Will throw an exception if an event handler already exists with the - * name - */ - void addEventHandler(EventHandler eventHandler); - - /** - * @param eventHandler Event handler to be updated. - */ - void updateEventHandler(EventHandler eventHandler); - - /** - * @param name Removes the event handler from the system - */ - void removeEventHandler(String name); - - /** - * @return All the event handlers registered in the system - */ - List getAllEventHandlers(); - - /** - * @param event name of the event - * @param activeOnly if true, returns only the active handlers - * @return Returns the list of all the event handlers for a given event - */ - List getEventHandlersForEvent(String event, boolean activeOnly); -} diff --git a/core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java b/core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java deleted file mode 100644 index 8e33cac29..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -/** Data access layer for storing workflow executions */ -public interface ExecutionDAO { - - /** - * @param taskName Name of the task - * @param workflowId Workflow instance id - * @return List of pending tasks (in_progress) - */ - List getPendingTasksByWorkflow(String taskName, String workflowId); - - /** - * @param taskType Type of task - * @param startKey start - * @param count number of tasks to return - * @return List of tasks starting from startKey - */ - List getTasks(String taskType, String startKey, int count); - - /** - * @param tasks tasks to be created - * @return List of tasks that were created. - *

    Note on the primary key constraint - *

    For a given task reference name and retryCount should be considered unique/primary - * key. Given two tasks with the same reference name and retryCount only one should be added - * to the database. - */ - List createTasks(List tasks); - - /** - * @param task Task to be updated - */ - void updateTask(TaskModel task); - - /** - * Checks if the number of tasks in progress for the given taskDef will exceed the limit if the - * task is scheduled to be in progress (given to the worker or for system tasks start() method - * called) - * - * @param task The task to be executed. Limit is set in the Task's definition - * @return true if by executing this task, the limit is breached. false otherwise. - * @see TaskDef#concurrencyLimit() - * @deprecated Since v3.3.5. Use {@link ConcurrentExecutionLimitDAO#exceedsLimit(TaskModel)}. - */ - @Deprecated - default boolean exceedsInProgressLimit(TaskModel task) { - throw new UnsupportedOperationException( - getClass() + "does not support exceedsInProgressLimit"); - } - - /** - * @param taskId id of the task to be removed. - * @return true if the deletion is successful, false otherwise. - */ - boolean removeTask(String taskId); - - /** - * @param taskId Task instance id - * @return Task - */ - TaskModel getTask(String taskId); - - /** - * @param taskIds Task instance ids - * @return List of tasks - */ - List getTasks(List taskIds); - - /** - * @param taskType Type of the task for which to retrieve the list of pending tasks - * @return List of pending tasks - */ - List getPendingTasksForTaskType(String taskType); - - /** - * @param workflowId Workflow instance id - * @return List of tasks for the given workflow instance id - */ - List getTasksForWorkflow(String workflowId); - - /** - * @param workflow Workflow to be created - * @return Id of the newly created workflow - */ - String createWorkflow(WorkflowModel workflow); - - /** - * @param workflow Workflow to be updated - * @return Id of the updated workflow - */ - String updateWorkflow(WorkflowModel workflow); - - /** - * @param workflowId workflow instance id - * @return true if the deletion is successful, false otherwise - */ - boolean removeWorkflow(String workflowId); - - /** - * Removes the workflow with ttl seconds - * - * @param workflowId workflowId workflow instance id - * @param ttlSeconds time to live in seconds. - * @return - */ - boolean removeWorkflowWithExpiry(String workflowId, int ttlSeconds); - - /** - * @param workflowType Workflow Type - * @param workflowId workflow instance id - */ - void removeFromPendingWorkflow(String workflowType, String workflowId); - - /** - * @param workflowId workflow instance id - * @return Workflow - */ - WorkflowModel getWorkflow(String workflowId); - - /** - * @param workflowId workflow instance id - * @param includeTasks if set, includes the tasks (pending and completed) sorted by Task - * Sequence number in Workflow. - * @return Workflow instance details - */ - WorkflowModel getWorkflow(String workflowId, boolean includeTasks); - - /** - * @param workflowName name of the workflow - * @param version the workflow version - * @return List of workflow ids which are running - */ - List getRunningWorkflowIds(String workflowName, int version); - - /** - * @param workflowName Name of the workflow - * @param version the workflow version - * @return List of workflows that are running - */ - List getPendingWorkflowsByType(String workflowName, int version); - - /** - * @param workflowName Name of the workflow - * @return No. of running workflows - */ - long getPendingWorkflowCount(String workflowName); - - /** - * @param taskDefName Name of the task - * @return Number of task currently in IN_PROGRESS status - */ - long getInProgressTaskCount(String taskDefName); - - /** - * @param workflowName Name of the workflow - * @param startTime epoch time - * @param endTime epoch time - * @return List of workflows between start and end time - */ - List getWorkflowsByType(String workflowName, Long startTime, Long endTime); - - /** - * @param workflowName workflow name - * @param correlationId Correlation Id - * @param includeTasks Option to includeTasks in results - * @return List of workflows by correlation id - */ - List getWorkflowsByCorrelationId( - String workflowName, String correlationId, boolean includeTasks); - - /** - * @return true, if the DAO implementation is capable of searching across workflows false, if - * the DAO implementation cannot perform searches across workflows (and needs to use - * indexDAO) - */ - boolean canSearchAcrossWorkflows(); - - // Events - - /** - * @param eventExecution Event Execution to be stored - * @return true if the event was added. false otherwise when the event by id is already already - * stored. - */ - boolean addEventExecution(EventExecution eventExecution); - - /** - * @param eventExecution Event execution to be updated - */ - void updateEventExecution(EventExecution eventExecution); - - /** - * @param eventExecution Event execution to be removed - */ - void removeEventExecution(EventExecution eventExecution); -} diff --git a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java deleted file mode 100644 index 490758d15..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; - -/** DAO to index the workflow and task details for searching. */ -public interface IndexDAO { - - /** Setup method in charge or initializing/populating the index. */ - void setup() throws Exception; - - /** - * This method should return an unique identifier of the indexed doc - * - * @param workflow Workflow to be indexed - */ - void indexWorkflow(WorkflowSummary workflow); - - /** - * This method should return an unique identifier of the indexed doc - * - * @param workflow Workflow to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncIndexWorkflow(WorkflowSummary workflow); - - /** - * @param task Task to be indexed - */ - void indexTask(TaskSummary task); - - /** - * @param task Task to be indexed asynchronously - * @return CompletableFuture of type void - */ - CompletableFuture asyncIndexTask(TaskSummary task); - - /** - * @param query SQL like query for workflow search parameters. - * @param freeText Additional query in free text. Lucene syntax - * @param start start start index for pagination - * @param count count # of workflow ids to be returned - * @param sort sort options - * @return List of workflow ids for the matching query - */ - SearchResult searchWorkflows( - String query, String freeText, int start, int count, List sort); - - /** - * @param query SQL like query for task search parameters. - * @param freeText Additional query in free text. Lucene syntax - * @param start start start index for pagination - * @param count count # of task ids to be returned - * @param sort sort options - * @return List of workflow ids for the matching query - */ - SearchResult searchTasks( - String query, String freeText, int start, int count, List sort); - - /** - * Remove the workflow index - * - * @param workflowId workflow to be removed - */ - void removeWorkflow(String workflowId); - - /** - * Remove the workflow index - * - * @param workflowId workflow to be removed - * @return CompletableFuture of type void - */ - CompletableFuture asyncRemoveWorkflow(String workflowId); - - /** - * Updates the index - * - * @param workflowInstanceId id of the workflow - * @param keys keys to be updated - * @param values values. Number of keys and values MUST match. - */ - void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values); - - /** - * Updates the index - * - * @param workflowInstanceId id of the workflow - * @param keys keys to be updated - * @param values values. Number of keys and values MUST match. - * @return CompletableFuture of type void - */ - CompletableFuture asyncUpdateWorkflow( - String workflowInstanceId, String[] keys, Object[] values); - - /** - * Retrieves a specific field from the index - * - * @param workflowInstanceId id of the workflow - * @param key field to be retrieved - * @return value of the field as string - */ - String get(String workflowInstanceId, String key); - - /** - * @param logs Task Execution logs to be indexed - */ - void addTaskExecutionLogs(List logs); - - /** - * @param logs Task Execution logs to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncAddTaskExecutionLogs(List logs); - - /** - * @param taskId Id of the task for which to fetch the execution logs - * @return Returns the task execution logs for given task id - */ - List getTaskExecutionLogs(String taskId); - - /** - * @param eventExecution Event Execution to be indexed - */ - void addEventExecution(EventExecution eventExecution); - - List getEventExecutions(String event); - - /** - * @param eventExecution Event Execution to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncAddEventExecution(EventExecution eventExecution); - - /** - * Adds an incoming external message into the index - * - * @param queue Name of the registered queue - * @param msg Message - */ - void addMessage(String queue, Message msg); - - /** - * Adds an incoming external message into the index - * - * @param queue Name of the registered queue - * @param message {@link Message} - * @return CompletableFuture of type Void - */ - CompletableFuture asyncAddMessage(String queue, Message message); - - List getMessages(String queue); - - /** - * Search for Workflows completed or failed beyond archiveTtlDays - * - * @param indexName Name of the index to search - * @param archiveTtlDays Archival Time to Live - * @return List of worlflow Ids matching the pattern - */ - List searchArchivableWorkflows(String indexName, long archiveTtlDays); - - /** - * Get total workflow counts that matches the query - * - * @param query SQL like query for workflow search parameters. - * @param freeText Additional query in free text. Lucene syntax - * @return Number of matches for the query - */ - long getWorkflowCount(String query, String freeText); -} diff --git a/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java b/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java deleted file mode 100644 index 1d6bb7020..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; -import java.util.Optional; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; - -/** Data access layer for the workflow metadata - task definitions and workflow definitions */ -public interface MetadataDAO { - - /** - * @param taskDef task definition to be created - */ - void createTaskDef(TaskDef taskDef); - - /** - * @param taskDef task definition to be updated. - * @return name of the task definition - */ - String updateTaskDef(TaskDef taskDef); - - /** - * @param name Name of the task - * @return Task Definition - */ - TaskDef getTaskDef(String name); - - /** - * @return All the task definitions - */ - List getAllTaskDefs(); - - /** - * @param name Name of the task - */ - void removeTaskDef(String name); - - /** - * @param def workflow definition - */ - void createWorkflowDef(WorkflowDef def); - - /** - * @param def workflow definition - */ - void updateWorkflowDef(WorkflowDef def); - - /** - * @param name Name of the workflow - * @return Workflow Definition - */ - Optional getLatestWorkflowDef(String name); - - /** - * @param name Name of the workflow - * @param version version - * @return workflow definition - */ - Optional getWorkflowDef(String name, int version); - - /** - * @param name Name of the workflow definition to be removed - * @param version Version of the workflow definition to be removed - */ - void removeWorkflowDef(String name, Integer version); - - /** - * @return List of all the workflow definitions - */ - List getAllWorkflowDefs(); -} diff --git a/core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java b/core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java deleted file mode 100644 index d53d6660d..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.PollData; - -/** An abstraction to enable different PollData store implementations */ -public interface PollDataDAO { - - /** - * Updates the {@link PollData} information with the most recently polled data for a task queue. - * - * @param taskDefName name of the task as specified in the task definition - * @param domain domain in which this task is being polled from - * @param workerId the identifier of the worker polling for this task - */ - void updateLastPollData(String taskDefName, String domain, String workerId); - - /** - * Retrieve the {@link PollData} for the given task in the given domain. - * - * @param taskDefName name of the task as specified in the task definition - * @param domain domain for which {@link PollData} is being requested - * @return the {@link PollData} for the given task queue in the specified domain - */ - PollData getPollData(String taskDefName, String domain); - - /** - * Retrieve the {@link PollData} for the given task across all domains. - * - * @param taskDefName name of the task as specified in the task definition - * @return the {@link PollData} for the given task queue in all domains - */ - List getPollData(String taskDefName); - - /** - * Retrieve the {@link PollData} for all task types - * - * @return the {@link PollData} for all task types - */ - default List getAllPollData() { - throw new UnsupportedOperationException( - "The selected PollDataDAO (" - + this.getClass().getSimpleName() - + ") does not implement the getAllPollData() method"); - } -} diff --git a/core/src/main/java/com/netflix/conductor/dao/QueueDAO.java b/core/src/main/java/com/netflix/conductor/dao/QueueDAO.java deleted file mode 100644 index 70b5857c5..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/QueueDAO.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; -import java.util.Map; - -import com.netflix.conductor.core.events.queue.Message; - -/** DAO responsible for managing queuing for the tasks. */ -public interface QueueDAO { - - /** - * @param queueName name of the queue - * @param id message id - * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. - * (for timed queues) - */ - void push(String queueName, String id, long offsetTimeInSecond); - - /** - * @param queueName name of the queue - * @param id message id - * @param priority message priority (between 0 and 99) - * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. - * (for timed queues) - */ - void push(String queueName, String id, int priority, long offsetTimeInSecond); - - /** - * @param queueName Name of the queue - * @param messages messages to be pushed. - */ - void push(String queueName, List messages); - - /** - * @param queueName Name of the queue - * @param id message id - * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. - * (for timed queues) - * @return true if the element was added to the queue. false otherwise indicating the element - * already exists in the queue. - */ - boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond); - - /** - * @param queueName Name of the queue - * @param id message id - * @param priority message priority (between 0 and 99) - * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. - * (for timed queues) - * @return true if the element was added to the queue. false otherwise indicating the element - * already exists in the queue. - */ - boolean pushIfNotExists(String queueName, String id, int priority, long offsetTimeInSecond); - - /** - * @param queueName Name of the queue - * @param count number of messages to be read from the queue - * @param timeout timeout in milliseconds - * @return list of elements from the named queue - */ - List pop(String queueName, int count, int timeout); - - /** - * @param queueName Name of the queue - * @param count number of messages to be read from the queue - * @param timeout timeout in milliseconds - * @return list of elements from the named queue - */ - List pollMessages(String queueName, int count, int timeout); - - /** - * @param queueName Name of the queue - * @param messageId Message id - */ - void remove(String queueName, String messageId); - - /** - * @param queueName Name of the queue - * @return size of the queue - */ - int getSize(String queueName); - - /** - * @param queueName Name of the queue - * @param messageId Message Id - * @return true if the message was found and ack'ed - */ - boolean ack(String queueName, String messageId); - - /** - * Extend the lease of the unacknowledged message for longer period. - * - * @param queueName Name of the queue - * @param messageId Message Id - * @param unackTimeout timeout in milliseconds for which the unack lease should be extended. - * (replaces the current value with this value) - * @return true if the message was updated with extended lease. false otherwise. - */ - boolean setUnackTimeout(String queueName, String messageId, long unackTimeout); - - /** - * @param queueName Name of the queue - */ - void flush(String queueName); - - /** - * @return key : queue name, value: size of the queue - */ - Map queuesDetail(); - - /** - * @return key : queue name, value: map of shard name to size and unack queue size - */ - Map>> queuesDetailVerbose(); - - default void processUnacks(String queueName) {} - - /** - * Resets the offsetTime on a message to 0, without pulling out the message from the queue - * - * @param queueName name of the queue - * @param id message id - * @return true if the message is in queue and the change was successful else returns false - */ - boolean resetOffsetTime(String queueName, String id); - - /** - * Postpone a given message with postponeDurationInSeconds, so that the message won't be - * available for further polls until specified duration. By default, the message is removed and - * pushed backed with postponeDurationInSeconds to be backwards compatible. - * - * @param queueName name of the queue - * @param messageId message id - * @param priority message priority (between 0 and 99) - * @param postponeDurationInSeconds duration in seconds by which the message is to be postponed - */ - default boolean postpone( - String queueName, String messageId, int priority, long postponeDurationInSeconds) { - remove(queueName, messageId); - push(queueName, messageId, priority, postponeDurationInSeconds); - return true; - } - - /** - * Check if the message with given messageId exists in the Queue. - * - * @param queueName - * @param messageId - * @return - */ - default boolean containsMessage(String queueName, String messageId) { - throw new UnsupportedOperationException( - "Please ensure your provided Queue implementation overrides and implements this method."); - } -} diff --git a/core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java b/core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java deleted file mode 100644 index 6ef4b4859..000000000 --- a/core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.model.TaskModel; - -/** An abstraction to enable different Rate Limiting implementations */ -public interface RateLimitingDAO { - - /** - * Checks if the Task is rate limited or not based on the {@link - * TaskModel#getRateLimitPerFrequency()} and {@link TaskModel#getRateLimitFrequencyInSeconds()} - * - * @param task: which needs to be evaluated whether it is rateLimited or not - * @return true: If the {@link TaskModel} is rateLimited false: If the {@link TaskModel} is not - * rateLimited - */ - boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef); -} diff --git a/core/src/main/java/com/netflix/conductor/metrics/Monitors.java b/core/src/main/java/com/netflix/conductor/metrics/Monitors.java deleted file mode 100644 index 5dc6d0fc7..000000000 --- a/core/src/main/java/com/netflix/conductor/metrics/Monitors.java +++ /dev/null @@ -1,568 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.metrics; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.spectator.api.Counter; -import com.netflix.spectator.api.DistributionSummary; -import com.netflix.spectator.api.Gauge; -import com.netflix.spectator.api.Id; -import com.netflix.spectator.api.Registry; -import com.netflix.spectator.api.Spectator; -import com.netflix.spectator.api.Timer; -import com.netflix.spectator.api.histogram.PercentileTimer; - -public class Monitors { - - private static final Registry registry = Spectator.globalRegistry(); - - public static final String NO_DOMAIN = "NO_DOMAIN"; - - private static final Map, Counter>> counters = - new ConcurrentHashMap<>(); - - private static final Map, PercentileTimer>> timers = - new ConcurrentHashMap<>(); - - private static final Map, Gauge>> gauges = - new ConcurrentHashMap<>(); - - private static final Map, DistributionSummary>> - distributionSummaries = new ConcurrentHashMap<>(); - - public static final String classQualifier = "WorkflowMonitor"; - - private Monitors() {} - - /** - * Increment a counter that is used to measure the rate at which some event is occurring. - * Consider a simple queue, counters would be used to measure things like the rate at which - * items are being inserted and removed. - * - * @param className - * @param name - * @param additionalTags - */ - private static void counter(String className, String name, String... additionalTags) { - getCounter(className, name, additionalTags).increment(); - } - - /** - * Set a gauge is a handle to get the current value. Typical examples for gauges would be the - * size of a queue or number of threads in the running state. Since gauges are sampled, there is - * no information about what might have occurred between samples. - * - * @param className - * @param name - * @param measurement - * @param additionalTags - */ - private static void gauge( - String className, String name, long measurement, String... additionalTags) { - getGauge(className, name, additionalTags).set(measurement); - } - - /** - * Records a value for an event as a distribution summary. Unlike a gauge, this is sampled - * multiple times during a minute or everytime a new value is recorded. - * - * @param className - * @param name - * @param additionalTags - */ - private static void distributionSummary( - String className, String name, long value, String... additionalTags) { - getDistributionSummary(className, name, additionalTags).record(value); - } - - private static Timer getTimer(String className, String name, String... additionalTags) { - Map tags = toMap(className, additionalTags); - return timers.computeIfAbsent(name, s -> new ConcurrentHashMap<>()) - .computeIfAbsent( - tags, - t -> { - Id id = registry.createId(name, tags); - return PercentileTimer.get(registry, id); - }); - } - - private static Counter getCounter(String className, String name, String... additionalTags) { - Map tags = toMap(className, additionalTags); - - return counters.computeIfAbsent(name, s -> new ConcurrentHashMap<>()) - .computeIfAbsent( - tags, - t -> { - Id id = registry.createId(name, tags); - return registry.counter(id); - }); - } - - private static Gauge getGauge(String className, String name, String... additionalTags) { - Map tags = toMap(className, additionalTags); - - return gauges.computeIfAbsent(name, s -> new ConcurrentHashMap<>()) - .computeIfAbsent( - tags, - t -> { - Id id = registry.createId(name, tags); - return registry.gauge(id); - }); - } - - private static DistributionSummary getDistributionSummary( - String className, String name, String... additionalTags) { - Map tags = toMap(className, additionalTags); - - return distributionSummaries - .computeIfAbsent(name, s -> new ConcurrentHashMap<>()) - .computeIfAbsent( - tags, - t -> { - Id id = registry.createId(name, tags); - return registry.distributionSummary(id); - }); - } - - private static Map toMap(String className, String... additionalTags) { - Map tags = new HashMap<>(); - tags.put("class", className); - for (int j = 0; j < additionalTags.length - 1; j++) { - String tk = additionalTags[j]; - String tv = "" + additionalTags[j + 1]; - if (!tv.isEmpty()) { - tags.put(tk, tv); - } - j++; - } - return tags; - } - - /** - * @param className Name of the class - * @param methodName Method name - */ - public static void error(String className, String methodName) { - getCounter(className, "workflow_server_error", "methodName", methodName).increment(); - } - - public static void recordGauge(String name, long count) { - gauge(classQualifier, name, count); - } - - public static void recordQueueWaitTime(String taskType, long queueWaitTime) { - getTimer(classQualifier, "task_queue_wait", "taskType", taskType) - .record(queueWaitTime, TimeUnit.MILLISECONDS); - } - - public static void recordTaskExecutionTime( - String taskType, long duration, boolean includesRetries, TaskModel.Status status) { - getTimer( - classQualifier, - "task_execution", - "taskType", - taskType, - "includeRetries", - "" + includesRetries, - "status", - status.name()) - .record(duration, TimeUnit.MILLISECONDS); - } - - public static void recordTaskPollError(String taskType, String exception) { - recordTaskPollError(taskType, NO_DOMAIN, exception); - } - - public static void recordTaskPollError(String taskType, String domain, String exception) { - counter( - classQualifier, - "task_poll_error", - "taskType", - taskType, - "domain", - domain, - "exception", - exception); - } - - public static void recordTaskPoll(String taskType) { - counter(classQualifier, "task_poll", "taskType", taskType); - } - - public static void recordTaskPollCount(String taskType, int count) { - recordTaskPollCount(taskType, NO_DOMAIN, count); - } - - public static void recordTaskPollCount(String taskType, String domain, int count) { - getCounter(classQualifier, "task_poll_count", "taskType", taskType, "domain", domain) - .increment(count); - } - - public static void recordQueueDepth(String taskType, long size, String ownerApp) { - gauge( - classQualifier, - "task_queue_depth", - size, - "taskType", - taskType, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordTaskInProgress(String taskType, long size, String ownerApp) { - gauge( - classQualifier, - "task_in_progress", - size, - "taskType", - taskType, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordRunningWorkflows( - long count, String name, String version, String ownerApp) { - gauge( - classQualifier, - "workflow_running", - count, - "workflowName", - name, - "version", - version, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordNumTasksInWorkflow(long count, String name, String version) { - distributionSummary( - classQualifier, - "tasks_in_workflow", - count, - "workflowName", - name, - "version", - version); - } - - public static void recordTaskTimeout(String taskType) { - counter(classQualifier, "task_timeout", "taskType", taskType); - } - - public static void recordTaskResponseTimeout(String taskType) { - counter(classQualifier, "task_response_timeout", "taskType", taskType); - } - - public static void recordTaskPendingTime(String taskType, String workflowType, long duration) { - gauge( - classQualifier, - "task_pending_time", - duration, - "workflowName", - workflowType, - "taskType", - taskType); - } - - public static void recordWorkflowTermination( - String workflowType, WorkflowModel.Status status, String ownerApp) { - counter( - classQualifier, - "workflow_failure", - "workflowName", - workflowType, - "status", - status.name(), - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordWorkflowStartSuccess( - String workflowType, String version, String ownerApp) { - counter( - classQualifier, - "workflow_start_success", - "workflowName", - workflowType, - "version", - version, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordWorkflowStartError(String workflowType, String ownerApp) { - counter( - classQualifier, - "workflow_start_error", - "workflowName", - workflowType, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")); - } - - public static void recordUpdateConflict( - String taskType, String workflowType, WorkflowModel.Status status) { - counter( - classQualifier, - "task_update_conflict", - "workflowName", - workflowType, - "taskType", - taskType, - "workflowStatus", - status.name()); - } - - public static void recordUpdateConflict( - String taskType, String workflowType, TaskModel.Status status) { - counter( - classQualifier, - "task_update_conflict", - "workflowName", - workflowType, - "taskType", - taskType, - "taskStatus", - status.name()); - } - - public static void recordTaskUpdateError(String taskType, String workflowType) { - counter( - classQualifier, - "task_update_error", - "workflowName", - workflowType, - "taskType", - taskType); - } - - public static void recordTaskQueueOpError(String taskType, String workflowType) { - counter( - classQualifier, - "task_queue_op_error", - "workflowName", - workflowType, - "taskType", - taskType); - } - - public static void recordWorkflowCompletion( - String workflowType, long duration, String ownerApp) { - getTimer( - classQualifier, - "workflow_execution", - "workflowName", - workflowType, - "ownerApp", - StringUtils.defaultIfBlank(ownerApp, "unknown")) - .record(duration, TimeUnit.MILLISECONDS); - } - - public static void recordTaskRateLimited(String taskDefName, int limit) { - gauge(classQualifier, "task_rate_limited", limit, "taskType", taskDefName); - } - - public static void recordTaskConcurrentExecutionLimited(String taskDefName, int limit) { - gauge(classQualifier, "task_concurrent_execution_limited", limit, "taskType", taskDefName); - } - - public static void recordEventQueueMessagesProcessed( - String queueType, String queueName, int count) { - getCounter( - classQualifier, - "event_queue_messages_processed", - "queueType", - queueType, - "queueName", - queueName) - .increment(count); - } - - public static void recordObservableQMessageReceivedErrors(String queueType) { - counter(classQualifier, "observable_queue_error", "queueType", queueType); - } - - public static void recordEventQueueMessagesHandled(String queueType, String queueName) { - counter( - classQualifier, - "event_queue_messages_handled", - "queueType", - queueType, - "queueName", - queueName); - } - - public static void recordEventQueueMessagesError(String queueType, String queueName) { - counter( - classQualifier, - "event_queue_messages_error", - "queueType", - queueType, - "queueName", - queueName); - } - - public static void recordEventExecutionSuccess(String event, String handler, String action) { - counter( - classQualifier, - "event_execution_success", - "event", - event, - "handler", - handler, - "action", - action); - } - - public static void recordEventExecutionError( - String event, String handler, String action, String exceptionClazz) { - counter( - classQualifier, - "event_execution_error", - "event", - event, - "handler", - handler, - "action", - action, - "exception", - exceptionClazz); - } - - public static void recordEventActionError(String action, String entityName, String event) { - counter( - classQualifier, - "event_action_error", - "action", - action, - "entityName", - entityName, - "event", - event); - } - - public static void recordDaoRequests( - String dao, String action, String taskType, String workflowType) { - counter( - classQualifier, - "dao_requests", - "dao", - dao, - "action", - action, - "taskType", - StringUtils.defaultIfBlank(taskType, "unknown"), - "workflowType", - StringUtils.defaultIfBlank(workflowType, "unknown")); - } - - public static void recordDaoEventRequests(String dao, String action, String event) { - counter(classQualifier, "dao_event_requests", "dao", dao, "action", action, "event", event); - } - - public static void recordDaoPayloadSize( - String dao, String action, String taskType, String workflowType, int size) { - gauge( - classQualifier, - "dao_payload_size", - size, - "dao", - dao, - "action", - action, - "taskType", - StringUtils.defaultIfBlank(taskType, "unknown"), - "workflowType", - StringUtils.defaultIfBlank(workflowType, "unknown")); - } - - public static void recordExternalPayloadStorageUsage( - String name, String operation, String payloadType) { - counter( - classQualifier, - "external_payload_storage_usage", - "name", - name, - "operation", - operation, - "payloadType", - payloadType); - } - - public static void recordDaoError(String dao, String action) { - counter(classQualifier, "dao_errors", "dao", dao, "action", action); - } - - public static void recordAckTaskError(String taskType) { - counter(classQualifier, "task_ack_error", "taskType", taskType); - } - - public static void recordESIndexTime(String action, String docType, long val) { - getTimer(Monitors.classQualifier, action, "docType", docType) - .record(val, TimeUnit.MILLISECONDS); - } - - public static void recordWorkerQueueSize(String queueType, int val) { - gauge(Monitors.classQualifier, "indexing_worker_queue", val, "queueType", queueType); - } - - public static void recordDiscardedIndexingCount(String queueType) { - counter(Monitors.classQualifier, "discarded_index_count", "queueType", queueType); - } - - public static void recordAcquireLockUnsuccessful() { - counter(classQualifier, "acquire_lock_unsuccessful"); - } - - public static void recordAcquireLockFailure(String exceptionClassName) { - counter(classQualifier, "acquire_lock_failure", "exceptionType", exceptionClassName); - } - - public static void recordWorkflowArchived(String workflowType, WorkflowModel.Status status) { - counter( - classQualifier, - "workflow_archived", - "workflowName", - workflowType, - "workflowStatus", - status.name()); - } - - public static void recordArchivalDelayQueueSize(int val) { - gauge(classQualifier, "workflow_archival_delay_queue_size", val); - } - - public static void recordDiscardedArchivalCount() { - counter(classQualifier, "discarded_archival_count"); - } - - public static void recordSystemTaskWorkerPollingLimited(String queueName) { - counter(classQualifier, "system_task_worker_polling_limited", "queueName", queueName); - } - - public static void recordEventQueuePollSize(String queueType, int val) { - gauge(Monitors.classQualifier, "event_queue_poll", val, "queueType", queueType); - } - - public static void recordQueueMessageRepushFromRepairService(String queueName) { - counter(classQualifier, "queue_message_repushed", "queueName", queueName); - } -} diff --git a/core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java b/core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java deleted file mode 100644 index fdcb39ee0..000000000 --- a/core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.metrics; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.service.MetadataService; - -import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER; - -@Component -@ConditionalOnProperty( - name = "conductor.workflow-monitor.enabled", - havingValue = "true", - matchIfMissing = true) -public class WorkflowMonitor { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowMonitor.class); - - private final MetadataService metadataService; - private final QueueDAO queueDAO; - private final ExecutionDAOFacade executionDAOFacade; - private final int metadataRefreshInterval; - private final Set asyncSystemTasks; - - private List taskDefs; - private List workflowDefs; - private int refreshCounter = 0; - - public WorkflowMonitor( - MetadataService metadataService, - QueueDAO queueDAO, - ExecutionDAOFacade executionDAOFacade, - @Value("${conductor.workflow-monitor.metadata-refresh-interval:10}") - int metadataRefreshInterval, - @Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set asyncSystemTasks) { - this.metadataService = metadataService; - this.queueDAO = queueDAO; - this.executionDAOFacade = executionDAOFacade; - this.metadataRefreshInterval = metadataRefreshInterval; - this.asyncSystemTasks = asyncSystemTasks; - LOGGER.info("{} initialized.", WorkflowMonitor.class.getSimpleName()); - } - - @Scheduled( - initialDelayString = "${conductor.workflow-monitor.stats.initial-delay:120000}", - fixedDelayString = "${conductor.workflow-monitor.stats.delay:60000}") - public void reportMetrics() { - try { - if (refreshCounter <= 0) { - workflowDefs = metadataService.getWorkflowDefs(); - taskDefs = new ArrayList<>(metadataService.getTaskDefs()); - refreshCounter = metadataRefreshInterval; - } - - workflowDefs.forEach( - workflowDef -> { - String name = workflowDef.getName(); - String version = String.valueOf(workflowDef.getVersion()); - String ownerApp = workflowDef.getOwnerApp(); - long count = executionDAOFacade.getPendingWorkflowCount(name); - Monitors.recordRunningWorkflows(count, name, version, ownerApp); - }); - - taskDefs.forEach( - taskDef -> { - long size = queueDAO.getSize(taskDef.getName()); - long inProgressCount = - executionDAOFacade.getInProgressTaskCount(taskDef.getName()); - Monitors.recordQueueDepth(taskDef.getName(), size, taskDef.getOwnerApp()); - if (taskDef.concurrencyLimit() > 0) { - Monitors.recordTaskInProgress( - taskDef.getName(), inProgressCount, taskDef.getOwnerApp()); - } - }); - - asyncSystemTasks.forEach( - workflowSystemTask -> { - long size = queueDAO.getSize(workflowSystemTask.getTaskType()); - long inProgressCount = - executionDAOFacade.getInProgressTaskCount( - workflowSystemTask.getTaskType()); - Monitors.recordQueueDepth(workflowSystemTask.getTaskType(), size, "system"); - Monitors.recordTaskInProgress( - workflowSystemTask.getTaskType(), inProgressCount, "system"); - }); - - refreshCounter--; - } catch (Exception e) { - LOGGER.error("Error while publishing scheduled metrics", e); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/model/TaskModel.java b/core/src/main/java/com/netflix/conductor/model/TaskModel.java deleted file mode 100644 index 94cc47b13..000000000 --- a/core/src/main/java/com/netflix/conductor/model/TaskModel.java +++ /dev/null @@ -1,856 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.model; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.BeanUtils; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.protobuf.Any; - -public class TaskModel { - - public enum Status { - IN_PROGRESS(false, true, true), - CANCELED(true, false, false), - FAILED(true, false, true), - FAILED_WITH_TERMINAL_ERROR(true, false, false), - COMPLETED(true, true, true), - COMPLETED_WITH_ERRORS(true, true, true), - SCHEDULED(false, true, true), - TIMED_OUT(true, false, true), - SKIPPED(true, true, false); - - private final boolean terminal; - - private final boolean successful; - - private final boolean retriable; - - Status(boolean terminal, boolean successful, boolean retriable) { - this.terminal = terminal; - this.successful = successful; - this.retriable = retriable; - } - - public boolean isTerminal() { - return terminal; - } - - public boolean isSuccessful() { - return successful; - } - - public boolean isRetriable() { - return retriable; - } - } - - private String taskType; - - private Status status; - - private String referenceTaskName; - - private int retryCount; - - private int seq; - - private String correlationId; - - private int pollCount; - - private String taskDefName; - - /** Time when the task was scheduled */ - private long scheduledTime; - - /** Time when the task was first polled */ - private long startTime; - - /** Time when the task completed executing */ - private long endTime; - - /** Time when the task was last updated */ - private long updateTime; - - private int startDelayInSeconds; - - private String retriedTaskId; - - private boolean retried; - - private boolean executed; - - private boolean callbackFromWorker = true; - - private long responseTimeoutSeconds; - - private String workflowInstanceId; - - private String workflowType; - - private String taskId; - - private String reasonForIncompletion; - - private long callbackAfterSeconds; - - private String workerId; - - private WorkflowTask workflowTask; - - private String domain; - - private Any inputMessage; - - private Any outputMessage; - - // id 31 is reserved - - private int rateLimitPerFrequency; - - private int rateLimitFrequencyInSeconds; - - private String externalInputPayloadStoragePath; - - private String externalOutputPayloadStoragePath; - - private int workflowPriority; - - private String executionNameSpace; - - private String isolationGroupId; - - private int iteration; - - private String subWorkflowId; - - // Timeout after which the wait task should be marked as completed - private long waitTimeout; - - /** - * Used to note that a sub workflow associated with SUB_WORKFLOW task has an action performed on - * it directly. - */ - private boolean subworkflowChanged; - - @JsonIgnore private Map inputPayload = new HashMap<>(); - - @JsonIgnore private Map outputPayload = new HashMap<>(); - - @JsonIgnore private Map inputData = new HashMap<>(); - - @JsonIgnore private Map outputData = new HashMap<>(); - - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - this.status = status; - } - - @JsonIgnore - public Map getInputData() { - return externalInputPayloadStoragePath != null ? inputPayload : inputData; - } - - @JsonIgnore - public void setInputData(Map inputData) { - if (inputData == null) { - inputData = new HashMap<>(); - } - this.inputData = inputData; - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @JsonProperty("inputData") - @Deprecated - public void setRawInputData(Map inputData) { - setInputData(inputData); - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @JsonProperty("inputData") - @Deprecated - public Map getRawInputData() { - return inputData; - } - - public String getReferenceTaskName() { - return referenceTaskName; - } - - public void setReferenceTaskName(String referenceTaskName) { - this.referenceTaskName = referenceTaskName; - } - - public int getRetryCount() { - return retryCount; - } - - public void setRetryCount(int retryCount) { - this.retryCount = retryCount; - } - - public int getSeq() { - return seq; - } - - public void setSeq(int seq) { - this.seq = seq; - } - - public String getCorrelationId() { - return correlationId; - } - - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - public int getPollCount() { - return pollCount; - } - - public void setPollCount(int pollCount) { - this.pollCount = pollCount; - } - - public String getTaskDefName() { - if (taskDefName == null || "".equals(taskDefName)) { - taskDefName = taskType; - } - return taskDefName; - } - - public void setTaskDefName(String taskDefName) { - this.taskDefName = taskDefName; - } - - public long getScheduledTime() { - return scheduledTime; - } - - public void setScheduledTime(long scheduledTime) { - this.scheduledTime = scheduledTime; - } - - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - public long getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(long updateTime) { - this.updateTime = updateTime; - } - - public int getStartDelayInSeconds() { - return startDelayInSeconds; - } - - public void setStartDelayInSeconds(int startDelayInSeconds) { - this.startDelayInSeconds = startDelayInSeconds; - } - - public String getRetriedTaskId() { - return retriedTaskId; - } - - public void setRetriedTaskId(String retriedTaskId) { - this.retriedTaskId = retriedTaskId; - } - - public boolean isRetried() { - return retried; - } - - public void setRetried(boolean retried) { - this.retried = retried; - } - - public boolean isExecuted() { - return executed; - } - - public void setExecuted(boolean executed) { - this.executed = executed; - } - - public boolean isCallbackFromWorker() { - return callbackFromWorker; - } - - public void setCallbackFromWorker(boolean callbackFromWorker) { - this.callbackFromWorker = callbackFromWorker; - } - - public long getResponseTimeoutSeconds() { - return responseTimeoutSeconds; - } - - public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getWorkflowType() { - return workflowType; - } - - public void setWorkflowType(String workflowType) { - this.workflowType = workflowType; - } - - public String getTaskId() { - return taskId; - } - - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - public String getWorkerId() { - return workerId; - } - - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - @JsonIgnore - public Map getOutputData() { - return externalOutputPayloadStoragePath != null ? outputPayload : outputData; - } - - @JsonIgnore - public void setOutputData(Map outputData) { - if (outputData == null) { - outputData = new HashMap<>(); - } - this.outputData = outputData; - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @JsonProperty("outputData") - @Deprecated - public void setRawOutputData(Map inputData) { - setOutputData(inputData); - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @JsonProperty("outputData") - @Deprecated - public Map getRawOutputData() { - return outputData; - } - - public WorkflowTask getWorkflowTask() { - return workflowTask; - } - - public void setWorkflowTask(WorkflowTask workflowTask) { - this.workflowTask = workflowTask; - } - - public String getDomain() { - return domain; - } - - public void setDomain(String domain) { - this.domain = domain; - } - - public Any getInputMessage() { - return inputMessage; - } - - public void setInputMessage(Any inputMessage) { - this.inputMessage = inputMessage; - } - - public Any getOutputMessage() { - return outputMessage; - } - - public void setOutputMessage(Any outputMessage) { - this.outputMessage = outputMessage; - } - - public int getRateLimitPerFrequency() { - return rateLimitPerFrequency; - } - - public void setRateLimitPerFrequency(int rateLimitPerFrequency) { - this.rateLimitPerFrequency = rateLimitPerFrequency; - } - - public int getRateLimitFrequencyInSeconds() { - return rateLimitFrequencyInSeconds; - } - - public void setRateLimitFrequencyInSeconds(int rateLimitFrequencyInSeconds) { - this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; - } - - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - public int getWorkflowPriority() { - return workflowPriority; - } - - public void setWorkflowPriority(int workflowPriority) { - this.workflowPriority = workflowPriority; - } - - public String getExecutionNameSpace() { - return executionNameSpace; - } - - public void setExecutionNameSpace(String executionNameSpace) { - this.executionNameSpace = executionNameSpace; - } - - public String getIsolationGroupId() { - return isolationGroupId; - } - - public void setIsolationGroupId(String isolationGroupId) { - this.isolationGroupId = isolationGroupId; - } - - public int getIteration() { - return iteration; - } - - public void setIteration(int iteration) { - this.iteration = iteration; - } - - public String getSubWorkflowId() { - // For backwards compatibility - if (StringUtils.isNotBlank(subWorkflowId)) { - return subWorkflowId; - } else { - return this.getOutputData() != null && this.getOutputData().get("subWorkflowId") != null - ? (String) this.getOutputData().get("subWorkflowId") - : this.getInputData() != null - ? (String) this.getInputData().get("subWorkflowId") - : null; - } - } - - public void setSubWorkflowId(String subWorkflowId) { - this.subWorkflowId = subWorkflowId; - // For backwards compatibility - if (this.outputData != null && this.outputData.containsKey("subWorkflowId")) { - this.outputData.put("subWorkflowId", subWorkflowId); - } - } - - public boolean isSubworkflowChanged() { - return subworkflowChanged; - } - - public void setSubworkflowChanged(boolean subworkflowChanged) { - this.subworkflowChanged = subworkflowChanged; - } - - public void incrementPollCount() { - ++this.pollCount; - } - - /** - * @return {@link Optional} containing the task definition if available - */ - public Optional getTaskDefinition() { - return Optional.ofNullable(this.getWorkflowTask()).map(WorkflowTask::getTaskDefinition); - } - - public boolean isLoopOverTask() { - return iteration > 0; - } - - public long getWaitTimeout() { - return waitTimeout; - } - - public void setWaitTimeout(long waitTimeout) { - this.waitTimeout = waitTimeout; - } - - /** - * @return the queueWaitTime - */ - public long getQueueWaitTime() { - if (this.startTime > 0 && this.scheduledTime > 0) { - if (this.updateTime > 0 && getCallbackAfterSeconds() > 0) { - long waitTime = - System.currentTimeMillis() - - (this.updateTime + (getCallbackAfterSeconds() * 1000)); - return waitTime > 0 ? waitTime : 0; - } else { - return this.startTime - this.scheduledTime; - } - } - return 0L; - } - - /** - * @return a copy of the task instance - */ - public TaskModel copy() { - TaskModel copy = new TaskModel(); - BeanUtils.copyProperties(this, copy); - return copy; - } - - public void externalizeInput(String path) { - this.inputPayload = this.inputData; - this.inputData = new HashMap<>(); - this.externalInputPayloadStoragePath = path; - } - - public void externalizeOutput(String path) { - this.outputPayload = this.outputData; - this.outputData = new HashMap<>(); - this.externalOutputPayloadStoragePath = path; - } - - public void internalizeInput(Map data) { - this.inputData = new HashMap<>(); - this.inputPayload = data; - } - - public void internalizeOutput(Map data) { - this.outputData = new HashMap<>(); - this.outputPayload = data; - } - - @Override - public String toString() { - return "TaskModel{" - + "taskType='" - + taskType - + '\'' - + ", status=" - + status - + ", inputData=" - + inputData - + ", referenceTaskName='" - + referenceTaskName - + '\'' - + ", retryCount=" - + retryCount - + ", seq=" - + seq - + ", correlationId='" - + correlationId - + '\'' - + ", pollCount=" - + pollCount - + ", taskDefName='" - + taskDefName - + '\'' - + ", scheduledTime=" - + scheduledTime - + ", startTime=" - + startTime - + ", endTime=" - + endTime - + ", updateTime=" - + updateTime - + ", startDelayInSeconds=" - + startDelayInSeconds - + ", retriedTaskId='" - + retriedTaskId - + '\'' - + ", retried=" - + retried - + ", executed=" - + executed - + ", callbackFromWorker=" - + callbackFromWorker - + ", responseTimeoutSeconds=" - + responseTimeoutSeconds - + ", workflowInstanceId='" - + workflowInstanceId - + '\'' - + ", workflowType='" - + workflowType - + '\'' - + ", taskId='" - + taskId - + '\'' - + ", reasonForIncompletion='" - + reasonForIncompletion - + '\'' - + ", callbackAfterSeconds=" - + callbackAfterSeconds - + ", workerId='" - + workerId - + '\'' - + ", outputData=" - + outputData - + ", workflowTask=" - + workflowTask - + ", domain='" - + domain - + '\'' - + ", waitTimeout='" - + waitTimeout - + '\'' - + ", inputMessage=" - + inputMessage - + ", outputMessage=" - + outputMessage - + ", rateLimitPerFrequency=" - + rateLimitPerFrequency - + ", rateLimitFrequencyInSeconds=" - + rateLimitFrequencyInSeconds - + ", externalInputPayloadStoragePath='" - + externalInputPayloadStoragePath - + '\'' - + ", externalOutputPayloadStoragePath='" - + externalOutputPayloadStoragePath - + '\'' - + ", workflowPriority=" - + workflowPriority - + ", executionNameSpace='" - + executionNameSpace - + '\'' - + ", isolationGroupId='" - + isolationGroupId - + '\'' - + ", iteration=" - + iteration - + ", subWorkflowId='" - + subWorkflowId - + '\'' - + ", subworkflowChanged=" - + subworkflowChanged - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TaskModel taskModel = (TaskModel) o; - return getRetryCount() == taskModel.getRetryCount() - && getSeq() == taskModel.getSeq() - && getPollCount() == taskModel.getPollCount() - && getScheduledTime() == taskModel.getScheduledTime() - && getStartTime() == taskModel.getStartTime() - && getEndTime() == taskModel.getEndTime() - && getUpdateTime() == taskModel.getUpdateTime() - && getStartDelayInSeconds() == taskModel.getStartDelayInSeconds() - && isRetried() == taskModel.isRetried() - && isExecuted() == taskModel.isExecuted() - && isCallbackFromWorker() == taskModel.isCallbackFromWorker() - && getResponseTimeoutSeconds() == taskModel.getResponseTimeoutSeconds() - && getCallbackAfterSeconds() == taskModel.getCallbackAfterSeconds() - && getRateLimitPerFrequency() == taskModel.getRateLimitPerFrequency() - && getRateLimitFrequencyInSeconds() == taskModel.getRateLimitFrequencyInSeconds() - && getWorkflowPriority() == taskModel.getWorkflowPriority() - && getIteration() == taskModel.getIteration() - && isSubworkflowChanged() == taskModel.isSubworkflowChanged() - && Objects.equals(getTaskType(), taskModel.getTaskType()) - && getStatus() == taskModel.getStatus() - && Objects.equals(getInputData(), taskModel.getInputData()) - && Objects.equals(getReferenceTaskName(), taskModel.getReferenceTaskName()) - && Objects.equals(getCorrelationId(), taskModel.getCorrelationId()) - && Objects.equals(getTaskDefName(), taskModel.getTaskDefName()) - && Objects.equals(getRetriedTaskId(), taskModel.getRetriedTaskId()) - && Objects.equals(getWorkflowInstanceId(), taskModel.getWorkflowInstanceId()) - && Objects.equals(getWorkflowType(), taskModel.getWorkflowType()) - && Objects.equals(getTaskId(), taskModel.getTaskId()) - && Objects.equals(getReasonForIncompletion(), taskModel.getReasonForIncompletion()) - && Objects.equals(getWorkerId(), taskModel.getWorkerId()) - && Objects.equals(getWaitTimeout(), taskModel.getWaitTimeout()) - && Objects.equals(getOutputData(), taskModel.getOutputData()) - && Objects.equals(getWorkflowTask(), taskModel.getWorkflowTask()) - && Objects.equals(getDomain(), taskModel.getDomain()) - && Objects.equals(getInputMessage(), taskModel.getInputMessage()) - && Objects.equals(getOutputMessage(), taskModel.getOutputMessage()) - && Objects.equals( - getExternalInputPayloadStoragePath(), - taskModel.getExternalInputPayloadStoragePath()) - && Objects.equals( - getExternalOutputPayloadStoragePath(), - taskModel.getExternalOutputPayloadStoragePath()) - && Objects.equals(getExecutionNameSpace(), taskModel.getExecutionNameSpace()) - && Objects.equals(getIsolationGroupId(), taskModel.getIsolationGroupId()) - && Objects.equals(getSubWorkflowId(), taskModel.getSubWorkflowId()); - } - - @Override - public int hashCode() { - return Objects.hash( - getTaskType(), - getStatus(), - getInputData(), - getReferenceTaskName(), - getRetryCount(), - getSeq(), - getCorrelationId(), - getPollCount(), - getTaskDefName(), - getScheduledTime(), - getStartTime(), - getEndTime(), - getUpdateTime(), - getStartDelayInSeconds(), - getRetriedTaskId(), - isRetried(), - isExecuted(), - isCallbackFromWorker(), - getResponseTimeoutSeconds(), - getWorkflowInstanceId(), - getWorkflowType(), - getTaskId(), - getReasonForIncompletion(), - getCallbackAfterSeconds(), - getWorkerId(), - getWaitTimeout(), - getOutputData(), - getWorkflowTask(), - getDomain(), - getInputMessage(), - getOutputMessage(), - getRateLimitPerFrequency(), - getRateLimitFrequencyInSeconds(), - getExternalInputPayloadStoragePath(), - getExternalOutputPayloadStoragePath(), - getWorkflowPriority(), - getExecutionNameSpace(), - getIsolationGroupId(), - getIteration(), - getSubWorkflowId(), - isSubworkflowChanged()); - } - - public Task toTask() { - Task task = new Task(); - BeanUtils.copyProperties(this, task); - task.setStatus(Task.Status.valueOf(status.name())); - - // ensure that input/output is properly represented - if (externalInputPayloadStoragePath != null) { - task.setInputData(new HashMap<>()); - } - if (externalOutputPayloadStoragePath != null) { - task.setOutputData(new HashMap<>()); - } - return task; - } - - public static Task.Status mapToTaskStatus(TaskModel.Status status) { - return Task.Status.valueOf(status.name()); - } - - public void addInput(String key, Object value) { - this.inputData.put(key, value); - } - - public void addInput(Map inputData) { - this.inputData.putAll(inputData); - } - - public void addOutput(String key, Object value) { - this.outputData.put(key, value); - } - - public void addOutput(Map outputData) { - this.outputData.putAll(outputData); - } -} diff --git a/core/src/main/java/com/netflix/conductor/model/WorkflowModel.java b/core/src/main/java/com/netflix/conductor/model/WorkflowModel.java deleted file mode 100644 index 315563365..000000000 --- a/core/src/main/java/com/netflix/conductor/model/WorkflowModel.java +++ /dev/null @@ -1,552 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.model; - -import java.util.*; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.BeanUtils; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.core.utils.Utils; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class WorkflowModel { - - public enum Status { - RUNNING(false, false), - COMPLETED(true, true), - FAILED(true, false), - TIMED_OUT(true, false), - TERMINATED(true, false), - PAUSED(false, true); - - private final boolean terminal; - private final boolean successful; - - Status(boolean terminal, boolean successful) { - this.terminal = terminal; - this.successful = successful; - } - - public boolean isTerminal() { - return terminal; - } - - public boolean isSuccessful() { - return successful; - } - } - - private Status status = Status.RUNNING; - - private long endTime; - - private String workflowId; - - private String parentWorkflowId; - - private String parentWorkflowTaskId; - - private List tasks = new LinkedList<>(); - - private String correlationId; - - private String reRunFromWorkflowId; - - private String reasonForIncompletion; - - private String event; - - private Map taskToDomain = new HashMap<>(); - - private Set failedReferenceTaskNames = new HashSet<>(); - - private WorkflowDef workflowDefinition; - - private String externalInputPayloadStoragePath; - - private String externalOutputPayloadStoragePath; - - private int priority; - - private Map variables = new HashMap<>(); - - private long lastRetriedTime; - - private String ownerApp; - - private Long createTime; - - private Long updatedTime; - - private String createdBy; - - private String updatedBy; - - // Capture the failed taskId if the workflow execution failed because of task failure - private String failedTaskId; - - private Status previousStatus; - - @JsonIgnore private Map input = new HashMap<>(); - - @JsonIgnore private Map output = new HashMap<>(); - - @JsonIgnore private Map inputPayload = new HashMap<>(); - - @JsonIgnore private Map outputPayload = new HashMap<>(); - - public Status getPreviousStatus() { - return previousStatus; - } - - public void setPreviousStatus(Status status) { - this.previousStatus = status; - } - - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - // update previous status if current status changed - if (this.status != status) { - setPreviousStatus(this.status); - } - this.status = status; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - public String getWorkflowId() { - return workflowId; - } - - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - - public String getParentWorkflowId() { - return parentWorkflowId; - } - - public void setParentWorkflowId(String parentWorkflowId) { - this.parentWorkflowId = parentWorkflowId; - } - - public String getParentWorkflowTaskId() { - return parentWorkflowTaskId; - } - - public void setParentWorkflowTaskId(String parentWorkflowTaskId) { - this.parentWorkflowTaskId = parentWorkflowTaskId; - } - - public List getTasks() { - return tasks; - } - - public void setTasks(List tasks) { - this.tasks = tasks; - } - - @JsonIgnore - public Map getInput() { - return externalInputPayloadStoragePath != null ? inputPayload : input; - } - - @JsonIgnore - public void setInput(Map input) { - if (input == null) { - input = new HashMap<>(); - } - this.input = input; - } - - @JsonIgnore - public Map getOutput() { - return externalOutputPayloadStoragePath != null ? outputPayload : output; - } - - @JsonIgnore - public void setOutput(Map output) { - if (output == null) { - output = new HashMap<>(); - } - this.output = output; - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @Deprecated - @JsonProperty("input") - public Map getRawInput() { - return input; - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @Deprecated - @JsonProperty("input") - public void setRawInput(Map input) { - setInput(input); - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @Deprecated - @JsonProperty("output") - public Map getRawOutput() { - return output; - } - - /** - * @deprecated Used only for JSON serialization and deserialization. - */ - @Deprecated - @JsonProperty("output") - public void setRawOutput(Map output) { - setOutput(output); - } - - public String getCorrelationId() { - return correlationId; - } - - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - public String getReRunFromWorkflowId() { - return reRunFromWorkflowId; - } - - public void setReRunFromWorkflowId(String reRunFromWorkflowId) { - this.reRunFromWorkflowId = reRunFromWorkflowId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - public String getEvent() { - return event; - } - - public void setEvent(String event) { - this.event = event; - } - - public Map getTaskToDomain() { - return taskToDomain; - } - - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - - public Set getFailedReferenceTaskNames() { - return failedReferenceTaskNames; - } - - public void setFailedReferenceTaskNames(Set failedReferenceTaskNames) { - this.failedReferenceTaskNames = failedReferenceTaskNames; - } - - public WorkflowDef getWorkflowDefinition() { - return workflowDefinition; - } - - public void setWorkflowDefinition(WorkflowDef workflowDefinition) { - this.workflowDefinition = workflowDefinition; - } - - public String getExternalInputPayloadStoragePath() { - return externalInputPayloadStoragePath; - } - - public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { - this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; - } - - public String getExternalOutputPayloadStoragePath() { - return externalOutputPayloadStoragePath; - } - - public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { - this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; - } - - public int getPriority() { - return priority; - } - - public void setPriority(int priority) { - if (priority < 0 || priority > 99) { - throw new IllegalArgumentException("priority MUST be between 0 and 99 (inclusive)"); - } - this.priority = priority; - } - - public Map getVariables() { - return variables; - } - - public void setVariables(Map variables) { - this.variables = variables; - } - - public long getLastRetriedTime() { - return lastRetriedTime; - } - - public void setLastRetriedTime(long lastRetriedTime) { - this.lastRetriedTime = lastRetriedTime; - } - - public String getOwnerApp() { - return ownerApp; - } - - public void setOwnerApp(String ownerApp) { - this.ownerApp = ownerApp; - } - - public Long getCreateTime() { - return createTime; - } - - public void setCreateTime(Long createTime) { - this.createTime = createTime; - } - - public Long getUpdatedTime() { - return updatedTime; - } - - public void setUpdatedTime(Long updatedTime) { - this.updatedTime = updatedTime; - } - - public String getCreatedBy() { - return createdBy; - } - - public void setCreatedBy(String createdBy) { - this.createdBy = createdBy; - } - - public String getUpdatedBy() { - return updatedBy; - } - - public void setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - } - - public String getFailedTaskId() { - return failedTaskId; - } - - public void setFailedTaskId(String failedTaskId) { - this.failedTaskId = failedTaskId; - } - - /** - * Convenience method for accessing the workflow definition name. - * - * @return the workflow definition name. - */ - public String getWorkflowName() { - Utils.checkNotNull(workflowDefinition, "Workflow definition is null"); - return workflowDefinition.getName(); - } - - /** - * Convenience method for accessing the workflow definition version. - * - * @return the workflow definition version. - */ - public int getWorkflowVersion() { - Utils.checkNotNull(workflowDefinition, "Workflow definition is null"); - return workflowDefinition.getVersion(); - } - - public boolean hasParent() { - return StringUtils.isNotEmpty(parentWorkflowId); - } - - /** - * A string representation of all relevant fields that identify this workflow. Intended for use - * in log and other system generated messages. - */ - public String toShortString() { - String name = workflowDefinition != null ? workflowDefinition.getName() : null; - Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; - return String.format("%s.%s/%s", name, version, workflowId); - } - - public TaskModel getTaskByRefName(String refName) { - if (refName == null) { - throw new RuntimeException( - "refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); - } - LinkedList found = new LinkedList<>(); - for (TaskModel task : tasks) { - if (task.getReferenceTaskName() == null) { - throw new RuntimeException( - "Task " - + task.getTaskDefName() - + ", seq=" - + task.getSeq() - + " does not have reference name specified."); - } - if (task.getReferenceTaskName().equals(refName)) { - found.add(task); - } - } - if (found.isEmpty()) { - return null; - } - return found.getLast(); - } - - public void externalizeInput(String path) { - this.inputPayload = this.input; - this.input = new HashMap<>(); - this.externalInputPayloadStoragePath = path; - } - - public void externalizeOutput(String path) { - this.outputPayload = this.output; - this.output = new HashMap<>(); - this.externalOutputPayloadStoragePath = path; - } - - public void internalizeInput(Map data) { - this.input = new HashMap<>(); - this.inputPayload = data; - } - - public void internalizeOutput(Map data) { - this.output = new HashMap<>(); - this.outputPayload = data; - } - - @Override - public String toString() { - String name = workflowDefinition != null ? workflowDefinition.getName() : null; - Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; - return String.format("%s.%s/%s.%s", name, version, workflowId, status); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WorkflowModel that = (WorkflowModel) o; - return getEndTime() == that.getEndTime() - && getPriority() == that.getPriority() - && getLastRetriedTime() == that.getLastRetriedTime() - && getStatus() == that.getStatus() - && Objects.equals(getWorkflowId(), that.getWorkflowId()) - && Objects.equals(getParentWorkflowId(), that.getParentWorkflowId()) - && Objects.equals(getParentWorkflowTaskId(), that.getParentWorkflowTaskId()) - && Objects.equals(getTasks(), that.getTasks()) - && Objects.equals(getInput(), that.getInput()) - && Objects.equals(getOutput(), that.getOutput()) - && Objects.equals(getCorrelationId(), that.getCorrelationId()) - && Objects.equals(getReRunFromWorkflowId(), that.getReRunFromWorkflowId()) - && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) - && Objects.equals(getEvent(), that.getEvent()) - && Objects.equals(getTaskToDomain(), that.getTaskToDomain()) - && Objects.equals(getFailedReferenceTaskNames(), that.getFailedReferenceTaskNames()) - && Objects.equals(getWorkflowDefinition(), that.getWorkflowDefinition()) - && Objects.equals( - getExternalInputPayloadStoragePath(), - that.getExternalInputPayloadStoragePath()) - && Objects.equals( - getExternalOutputPayloadStoragePath(), - that.getExternalOutputPayloadStoragePath()) - && Objects.equals(getVariables(), that.getVariables()) - && Objects.equals(getOwnerApp(), that.getOwnerApp()) - && Objects.equals(getCreateTime(), that.getCreateTime()) - && Objects.equals(getUpdatedTime(), that.getUpdatedTime()) - && Objects.equals(getCreatedBy(), that.getCreatedBy()) - && Objects.equals(getUpdatedBy(), that.getUpdatedBy()); - } - - @Override - public int hashCode() { - return Objects.hash( - getStatus(), - getEndTime(), - getWorkflowId(), - getParentWorkflowId(), - getParentWorkflowTaskId(), - getTasks(), - getInput(), - getOutput(), - getCorrelationId(), - getReRunFromWorkflowId(), - getReasonForIncompletion(), - getEvent(), - getTaskToDomain(), - getFailedReferenceTaskNames(), - getWorkflowDefinition(), - getExternalInputPayloadStoragePath(), - getExternalOutputPayloadStoragePath(), - getPriority(), - getVariables(), - getLastRetriedTime(), - getOwnerApp(), - getCreateTime(), - getUpdatedTime(), - getCreatedBy(), - getUpdatedBy()); - } - - public Workflow toWorkflow() { - Workflow workflow = new Workflow(); - BeanUtils.copyProperties(this, workflow); - workflow.setStatus(Workflow.WorkflowStatus.valueOf(this.status.name())); - workflow.setTasks(tasks.stream().map(TaskModel::toTask).collect(Collectors.toList())); - - // ensure that input/output is properly represented - if (externalInputPayloadStoragePath != null) { - workflow.setInput(new HashMap<>()); - } - if (externalOutputPayloadStoragePath != null) { - workflow.setOutput(new HashMap<>()); - } - return workflow; - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/AdminService.java b/core/src/main/java/com/netflix/conductor/service/AdminService.java deleted file mode 100644 index 84d68c279..000000000 --- a/core/src/main/java/com/netflix/conductor/service/AdminService.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Map; - -import javax.validation.constraints.NotEmpty; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.metadata.tasks.Task; - -@Validated -public interface AdminService { - - /** - * Queue up all the running workflows for sweep. - * - * @param workflowId Id of the workflow - * @return the id of the workflow instance that can be use for tracking. - */ - String requeueSweep( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Get all the configuration parameters. - * - * @return all the configuration parameters. - */ - Map getAllConfig(); - - /** - * Get the list of pending tasks for a given task type. - * - * @param taskType Name of the task - * @param start Start index of pagination - * @param count Number of entries - * @return list of pending {@link Task} - */ - List getListOfPendingTask( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, - Integer start, - Integer count); - - /** - * Verify that the Workflow is consistent, and run repairs as needed. - * - * @param workflowId id of the workflow to be returned - * @return true, if repair was successful - */ - boolean verifyAndRepairWorkflowConsistency( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Get registered queues. - * - * @param verbose `true|false` for verbose logs - * @return map of event queues - */ - Map getEventQueues(boolean verbose); -} diff --git a/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java deleted file mode 100644 index bf470d4e8..000000000 --- a/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.springframework.boot.info.BuildProperties; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.Audit; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.EventQueueManager; -import com.netflix.conductor.core.reconciliation.WorkflowRepairService; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.dao.QueueDAO; - -@Audit -@Trace -@Service -public class AdminServiceImpl implements AdminService { - - private final ConductorProperties properties; - private final ExecutionService executionService; - private final QueueDAO queueDAO; - private final WorkflowRepairService workflowRepairService; - private final EventQueueManager eventQueueManager; - private final BuildProperties buildProperties; - - public AdminServiceImpl( - ConductorProperties properties, - ExecutionService executionService, - QueueDAO queueDAO, - Optional workflowRepairService, - Optional eventQueueManager, - Optional buildProperties) { - this.properties = properties; - this.executionService = executionService; - this.queueDAO = queueDAO; - this.workflowRepairService = workflowRepairService.orElse(null); - this.eventQueueManager = eventQueueManager.orElse(null); - this.buildProperties = buildProperties.orElse(null); - } - - /** - * Get all the configuration parameters. - * - * @return all the configuration parameters. - */ - public Map getAllConfig() { - Map configs = properties.getAll(); - configs.putAll(getBuildProperties()); - return configs; - } - - /** - * Get all build properties - * - * @return all the build properties. - */ - private Map getBuildProperties() { - if (buildProperties == null) return Collections.emptyMap(); - Map buildProps = new HashMap<>(); - buildProps.put("version", buildProperties.getVersion()); - buildProps.put("buildDate", buildProperties.getTime()); - return buildProps; - } - - /** - * Get the list of pending tasks for a given task type. - * - * @param taskType Name of the task - * @param start Start index of pagination - * @param count Number of entries - * @return list of pending {@link Task} - */ - public List getListOfPendingTask(String taskType, Integer start, Integer count) { - List tasks = executionService.getPendingTasksForTaskType(taskType); - int total = start + count; - total = Math.min(tasks.size(), total); - if (start > tasks.size()) { - start = tasks.size(); - } - return tasks.subList(start, total); - } - - @Override - public boolean verifyAndRepairWorkflowConsistency(String workflowId) { - if (workflowRepairService == null) { - throw new IllegalStateException( - WorkflowRepairService.class.getSimpleName() + " is disabled."); - } - return workflowRepairService.verifyAndRepairWorkflow(workflowId, true); - } - - /** - * Queue up the workflow for sweep. - * - * @param workflowId Id of the workflow - * @return the id of the workflow instance that can be use for tracking. - */ - public String requeueSweep(String workflowId) { - boolean pushed = - queueDAO.pushIfNotExists( - Utils.DECIDER_QUEUE, - workflowId, - properties.getWorkflowOffsetTimeout().getSeconds()); - return pushed + "." + workflowId; - } - - /** - * Get registered queues. - * - * @param verbose `true|false` for verbose logs - * @return map of event queues - */ - public Map getEventQueues(boolean verbose) { - if (eventQueueManager == null) { - throw new IllegalStateException("Event processing is DISABLED"); - } - return (verbose ? eventQueueManager.getQueueSizes() : eventQueueManager.getQueues()); - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/EventService.java b/core/src/main/java/com/netflix/conductor/service/EventService.java deleted file mode 100644 index c2f29e734..000000000 --- a/core/src/main/java/com/netflix/conductor/service/EventService.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; - -import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.metadata.events.EventHandler; - -@Validated -public interface EventService { - - /** - * Add a new event handler. - * - * @param eventHandler Instance of {@link EventHandler} - */ - void addEventHandler( - @NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler); - - /** - * Update an existing event handler. - * - * @param eventHandler Instance of {@link EventHandler} - */ - void updateEventHandler( - @NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler); - - /** - * Remove an event handler. - * - * @param name Event name - */ - void removeEventHandlerStatus( - @NotEmpty(message = "EventHandler name cannot be null or empty.") String name); - - /** - * Get all the event handlers. - * - * @return list of {@link EventHandler} - */ - List getEventHandlers(); - - /** - * Get event handlers for a given event. - * - * @param event Event Name - * @param activeOnly `true|false` for active only events - * @return list of {@link EventHandler} - */ - List getEventHandlersForEvent( - @NotEmpty(message = "Event cannot be null or empty.") String event, boolean activeOnly); -} diff --git a/core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java deleted file mode 100644 index b4b20bdf3..000000000 --- a/core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; - -import org.springframework.stereotype.Service; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.core.events.EventQueues; - -@Service -public class EventServiceImpl implements EventService { - - private final MetadataService metadataService; - - public EventServiceImpl(MetadataService metadataService, EventQueues eventQueues) { - this.metadataService = metadataService; - } - - /** - * Add a new event handler. - * - * @param eventHandler Instance of {@link EventHandler} - */ - public void addEventHandler(EventHandler eventHandler) { - metadataService.addEventHandler(eventHandler); - } - - /** - * Update an existing event handler. - * - * @param eventHandler Instance of {@link EventHandler} - */ - public void updateEventHandler(EventHandler eventHandler) { - metadataService.updateEventHandler(eventHandler); - } - - /** - * Remove an event handler. - * - * @param name Event name - */ - public void removeEventHandlerStatus(String name) { - metadataService.removeEventHandlerStatus(name); - } - - /** - * Get all the event handlers. - * - * @return list of {@link EventHandler} - */ - public List getEventHandlers() { - return metadataService.getAllEventHandlers(); - } - - /** - * Get event handlers for a given event. - * - * @param event Event Name - * @param activeOnly `true|false` for active only events - * @return list of {@link EventHandler} - */ - public List getEventHandlersForEvent(String event, boolean activeOnly) { - return metadataService.getEventHandlersForEvent(event, activeOnly); - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java b/core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java deleted file mode 100644 index e355dc336..000000000 --- a/core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.sync.Lock; -import com.netflix.conductor.metrics.Monitors; - -@Service -public class ExecutionLockService { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionLockService.class); - private final ConductorProperties properties; - private final Lock lock; - private final long lockLeaseTime; - private final long lockTimeToTry; - - @Autowired - public ExecutionLockService(ConductorProperties properties, Lock lock) { - this.properties = properties; - this.lock = lock; - this.lockLeaseTime = properties.getLockLeaseTime().toMillis(); - this.lockTimeToTry = properties.getLockTimeToTry().toMillis(); - } - - /** - * Tries to acquire lock with reasonable timeToTry duration and lease time. Exits if a lock - * cannot be acquired. Considering that the workflow decide can be triggered through multiple - * entry points, and periodically through the sweeper service, do not block on acquiring the - * lock, as the order of execution of decides on a workflow doesn't matter. - * - * @param lockId - * @return - */ - public boolean acquireLock(String lockId) { - return acquireLock(lockId, lockTimeToTry, lockLeaseTime); - } - - public boolean acquireLock(String lockId, long timeToTryMs) { - return acquireLock(lockId, timeToTryMs, lockLeaseTime); - } - - public boolean acquireLock(String lockId, long timeToTryMs, long leaseTimeMs) { - if (properties.isWorkflowExecutionLockEnabled()) { - if (!lock.acquireLock(lockId, timeToTryMs, leaseTimeMs, TimeUnit.MILLISECONDS)) { - LOGGER.debug( - "Thread {} failed to acquire lock to lockId {}.", - Thread.currentThread().getId(), - lockId); - Monitors.recordAcquireLockUnsuccessful(); - return false; - } - LOGGER.debug( - "Thread {} acquired lock to lockId {}.", - Thread.currentThread().getId(), - lockId); - } - return true; - } - - /** - * Blocks until it gets the lock for workflowId - * - * @param lockId - */ - public void waitForLock(String lockId) { - if (properties.isWorkflowExecutionLockEnabled()) { - lock.acquireLock(lockId); - LOGGER.debug( - "Thread {} acquired lock to lockId {}.", - Thread.currentThread().getId(), - lockId); - } - } - - public void releaseLock(String lockId) { - if (properties.isWorkflowExecutionLockEnabled()) { - lock.releaseLock(lockId); - LOGGER.debug( - "Thread {} released lock to lockId {}.", - Thread.currentThread().getId(), - lockId); - } - } - - public void deleteLock(String lockId) { - if (properties.isWorkflowExecutionLockEnabled()) { - lock.deleteLock(lockId); - LOGGER.debug("Thread {} deleted lockId {}.", Thread.currentThread().getId(), lockId); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java deleted file mode 100644 index 97f95dbee..000000000 --- a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java +++ /dev/null @@ -1,627 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; - -@Trace -@Service -public class ExecutionService { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionService.class); - - private final WorkflowExecutor workflowExecutor; - private final ExecutionDAOFacade executionDAOFacade; - private final QueueDAO queueDAO; - private final ExternalPayloadStorage externalPayloadStorage; - private final SystemTaskRegistry systemTaskRegistry; - - private final long queueTaskMessagePostponeSecs; - - private static final int MAX_POLL_TIMEOUT_MS = 5000; - private static final int POLL_COUNT_ONE = 1; - private static final int POLLING_TIMEOUT_IN_MS = 100; - - public ExecutionService( - WorkflowExecutor workflowExecutor, - ExecutionDAOFacade executionDAOFacade, - QueueDAO queueDAO, - ConductorProperties properties, - ExternalPayloadStorage externalPayloadStorage, - SystemTaskRegistry systemTaskRegistry) { - this.workflowExecutor = workflowExecutor; - this.executionDAOFacade = executionDAOFacade; - this.queueDAO = queueDAO; - this.externalPayloadStorage = externalPayloadStorage; - - this.queueTaskMessagePostponeSecs = - properties.getTaskExecutionPostponeDuration().getSeconds(); - this.systemTaskRegistry = systemTaskRegistry; - } - - public Task poll(String taskType, String workerId) { - return poll(taskType, workerId, null); - } - - public Task poll(String taskType, String workerId, String domain) { - - List tasks = poll(taskType, workerId, domain, 1, 100); - if (tasks.isEmpty()) { - return null; - } - return tasks.get(0); - } - - public List poll(String taskType, String workerId, int count, int timeoutInMilliSecond) { - return poll(taskType, workerId, null, count, timeoutInMilliSecond); - } - - public List poll( - String taskType, String workerId, String domain, int count, int timeoutInMilliSecond) { - if (timeoutInMilliSecond > MAX_POLL_TIMEOUT_MS) { - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "Long Poll Timeout value cannot be more than 5 seconds"); - } - String queueName = QueueUtils.getQueueName(taskType, domain, null, null); - - List taskIds = new LinkedList<>(); - List tasks = new LinkedList<>(); - try { - taskIds = queueDAO.pop(queueName, count, timeoutInMilliSecond); - } catch (Exception e) { - LOGGER.error( - "Error polling for task: {} from worker: {} in domain: {}, count: {}", - taskType, - workerId, - domain, - count, - e); - Monitors.error(this.getClass().getCanonicalName(), "taskPoll"); - Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName()); - } - - for (String taskId : taskIds) { - try { - TaskModel taskModel = executionDAOFacade.getTaskModel(taskId); - if (taskModel == null || taskModel.getStatus().isTerminal()) { - // Remove taskId(s) without a valid Task/terminal state task from the queue - queueDAO.remove(queueName, taskId); - LOGGER.debug("Removed task: {} from the queue: {}", taskId, queueName); - continue; - } - - if (executionDAOFacade.exceedsInProgressLimit(taskModel)) { - // Postpone this message, so that it would be available for poll again. - queueDAO.postpone( - queueName, - taskId, - taskModel.getWorkflowPriority(), - queueTaskMessagePostponeSecs); - LOGGER.debug( - "Postponed task: {} in queue: {} by {} seconds", - taskId, - queueName, - queueTaskMessagePostponeSecs); - continue; - } - TaskDef taskDef = - taskModel.getTaskDefinition().isPresent() - ? taskModel.getTaskDefinition().get() - : null; - if (taskModel.getRateLimitPerFrequency() > 0 - && executionDAOFacade.exceedsRateLimitPerFrequency(taskModel, taskDef)) { - // Postpone this message, so that it would be available for poll again. - queueDAO.postpone( - queueName, - taskId, - taskModel.getWorkflowPriority(), - queueTaskMessagePostponeSecs); - LOGGER.debug( - "RateLimit Execution limited for {}:{}, limit:{}", - taskId, - taskModel.getTaskDefName(), - taskModel.getRateLimitPerFrequency()); - continue; - } - - taskModel.setStatus(TaskModel.Status.IN_PROGRESS); - if (taskModel.getStartTime() == 0) { - taskModel.setStartTime(System.currentTimeMillis()); - Monitors.recordQueueWaitTime( - taskModel.getTaskDefName(), taskModel.getQueueWaitTime()); - } - taskModel.setCallbackAfterSeconds( - 0); // reset callbackAfterSeconds when giving the task to the worker - taskModel.setWorkerId(workerId); - taskModel.incrementPollCount(); - executionDAOFacade.updateTask(taskModel); - tasks.add(taskModel.toTask()); - } catch (Exception e) { - // db operation failed for dequeued message, re-enqueue with a delay - LOGGER.warn( - "DB operation failed for task: {}, postponing task in queue", taskId, e); - Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName()); - queueDAO.postpone(queueName, taskId, 0, queueTaskMessagePostponeSecs); - } - } - executionDAOFacade.updateTaskLastPoll(taskType, domain, workerId); - Monitors.recordTaskPoll(queueName); - tasks.forEach(this::ackTaskReceived); - return tasks; - } - - public Task getLastPollTask(String taskType, String workerId, String domain) { - List tasks = poll(taskType, workerId, domain, POLL_COUNT_ONE, POLLING_TIMEOUT_IN_MS); - if (tasks.isEmpty()) { - LOGGER.debug( - "No Task available for the poll: /tasks/poll/{}?{}&{}", - taskType, - workerId, - domain); - return null; - } - Task task = tasks.get(0); - ackTaskReceived(task); - LOGGER.debug( - "The Task {} being returned for /tasks/poll/{}?{}&{}", - task, - taskType, - workerId, - domain); - return task; - } - - public List getPollData(String taskType) { - return executionDAOFacade.getTaskPollData(taskType); - } - - public List getAllPollData() { - try { - return executionDAOFacade.getAllPollData(); - } catch (UnsupportedOperationException uoe) { - List allPollData = new ArrayList<>(); - Map queueSizes = queueDAO.queuesDetail(); - queueSizes - .keySet() - .forEach( - queueName -> { - try { - if (!queueName.contains(QueueUtils.DOMAIN_SEPARATOR)) { - allPollData.addAll( - getPollData( - QueueUtils.getQueueNameWithoutDomain( - queueName))); - } - } catch (Exception e) { - LOGGER.error("Unable to fetch all poll data!", e); - } - }); - return allPollData; - } - } - - public void terminateWorkflow(String workflowId, String reason) { - workflowExecutor.terminateWorkflow(workflowId, reason); - } - - public void updateTask(TaskResult taskResult) { - workflowExecutor.updateTask(taskResult); - } - - public List getTasks(String taskType, String startKey, int count) { - return executionDAOFacade.getTasksByName(taskType, startKey, count); - } - - public Task getTask(String taskId) { - return executionDAOFacade.getTask(taskId); - } - - public Task getPendingTaskForWorkflow(String taskReferenceName, String workflowId) { - return executionDAOFacade.getTasksForWorkflow(workflowId).stream() - .filter(task -> !task.getStatus().isTerminal()) - .filter(task -> task.getReferenceTaskName().equals(taskReferenceName)) - .findFirst() // There can only be one task by a given reference name running at a - // time. - .orElse(null); - } - - /** - * This method removes the task from the un-acked Queue - * - * @param taskId: the taskId that needs to be updated and removed from the unacked queue - * @return True in case of successful removal of the taskId from the un-acked queue - */ - public boolean ackTaskReceived(String taskId) { - return Optional.ofNullable(getTask(taskId)).map(this::ackTaskReceived).orElse(false); - } - - public boolean ackTaskReceived(Task task) { - return queueDAO.ack(QueueUtils.getQueueName(task), task.getTaskId()); - } - - public Map getTaskQueueSizes(List taskDefNames) { - Map sizes = new HashMap<>(); - for (String taskDefName : taskDefNames) { - sizes.put(taskDefName, getTaskQueueSize(taskDefName)); - } - return sizes; - } - - public Integer getTaskQueueSize(String queueName) { - return queueDAO.getSize(queueName); - } - - public void removeTaskFromQueue(String taskId) { - Task task = getTask(taskId); - if (task == null) { - throw new ApplicationException( - ApplicationException.Code.NOT_FOUND, - String.format("No such task found by taskId: %s", taskId)); - } - queueDAO.remove(QueueUtils.getQueueName(task), taskId); - } - - public int requeuePendingTasks(String taskType) { - - int count = 0; - List tasks = getPendingTasksForTaskType(taskType); - - for (Task pending : tasks) { - - if (systemTaskRegistry.isSystemTask(pending.getTaskType())) { - continue; - } - if (pending.getStatus().isTerminal()) { - continue; - } - - LOGGER.debug( - "Requeuing Task: {} of taskType: {} in Workflow: {}", - pending.getTaskId(), - pending.getTaskType(), - pending.getWorkflowInstanceId()); - boolean pushed = requeue(pending); - if (pushed) { - count++; - } - } - return count; - } - - private boolean requeue(Task pending) { - long callback = pending.getCallbackAfterSeconds(); - if (callback < 0) { - callback = 0; - } - queueDAO.remove(QueueUtils.getQueueName(pending), pending.getTaskId()); - long now = System.currentTimeMillis(); - callback = callback - ((now - pending.getUpdateTime()) / 1000); - if (callback < 0) { - callback = 0; - } - return queueDAO.pushIfNotExists( - QueueUtils.getQueueName(pending), - pending.getTaskId(), - pending.getWorkflowPriority(), - callback); - } - - public List getWorkflowInstances( - String workflowName, - String correlationId, - boolean includeClosed, - boolean includeTasks) { - - List workflows = - executionDAOFacade.getWorkflowsByCorrelationId(workflowName, correlationId, false); - return workflows.stream() - .parallel() - .filter( - workflow -> { - if (includeClosed - || workflow.getStatus() - .equals(Workflow.WorkflowStatus.RUNNING)) { - // including tasks for subset of workflows to increase performance - if (includeTasks) { - List tasks = - executionDAOFacade.getTasksForWorkflow( - workflow.getWorkflowId()); - tasks.sort(Comparator.comparingInt(Task::getSeq)); - workflow.setTasks(tasks); - } - return true; - } else { - return false; - } - }) - .collect(Collectors.toList()); - } - - public Workflow getExecutionStatus(String workflowId, boolean includeTasks) { - return executionDAOFacade.getWorkflow(workflowId, includeTasks); - } - - public List getRunningWorkflows(String workflowName, int version) { - return executionDAOFacade.getRunningWorkflowIds(workflowName, version); - } - - public void removeWorkflow(String workflowId, boolean archiveWorkflow) { - executionDAOFacade.removeWorkflow(workflowId, archiveWorkflow); - } - - public SearchResult search( - String query, String freeText, int start, int size, List sortOptions) { - - SearchResult result = - executionDAOFacade.searchWorkflows(query, freeText, start, size, sortOptions); - List workflows = - result.getResults().stream() - .parallel() - .map( - workflowId -> { - try { - return new WorkflowSummary( - executionDAOFacade.getWorkflow(workflowId, false)); - } catch (Exception e) { - LOGGER.error( - "Error fetching workflow by id: {}", workflowId, e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - int missing = result.getResults().size() - workflows.size(); - long totalHits = result.getTotalHits() - missing; - return new SearchResult<>(totalHits, workflows); - } - - public SearchResult searchV2( - String query, String freeText, int start, int size, List sortOptions) { - - SearchResult result = - executionDAOFacade.searchWorkflows(query, freeText, start, size, sortOptions); - List workflows = - result.getResults().stream() - .parallel() - .map( - workflowId -> { - try { - return executionDAOFacade.getWorkflow(workflowId, false); - } catch (Exception e) { - LOGGER.error( - "Error fetching workflow by id: {}", workflowId, e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - int missing = result.getResults().size() - workflows.size(); - long totalHits = result.getTotalHits() - missing; - return new SearchResult<>(totalHits, workflows); - } - - public SearchResult searchWorkflowByTasks( - String query, String freeText, int start, int size, List sortOptions) { - SearchResult taskSummarySearchResult = - searchTasks(query, freeText, start, size, sortOptions); - List workflowSummaries = - taskSummarySearchResult.getResults().stream() - .parallel() - .map( - taskSummary -> { - try { - String workflowId = taskSummary.getWorkflowId(); - return new WorkflowSummary( - executionDAOFacade.getWorkflow(workflowId, false)); - } catch (Exception e) { - LOGGER.error( - "Error fetching workflow by id: {}", - taskSummary.getWorkflowId(), - e); - return null; - } - }) - .filter(Objects::nonNull) - .distinct() - .collect(Collectors.toList()); - int missing = taskSummarySearchResult.getResults().size() - workflowSummaries.size(); - long totalHits = taskSummarySearchResult.getTotalHits() - missing; - return new SearchResult<>(totalHits, workflowSummaries); - } - - public SearchResult searchWorkflowByTasksV2( - String query, String freeText, int start, int size, List sortOptions) { - SearchResult taskSummarySearchResult = - searchTasks(query, freeText, start, size, sortOptions); - List workflows = - taskSummarySearchResult.getResults().stream() - .parallel() - .map( - taskSummary -> { - try { - String workflowId = taskSummary.getWorkflowId(); - return executionDAOFacade.getWorkflow(workflowId, false); - } catch (Exception e) { - LOGGER.error( - "Error fetching workflow by id: {}", - taskSummary.getWorkflowId(), - e); - return null; - } - }) - .filter(Objects::nonNull) - .distinct() - .collect(Collectors.toList()); - int missing = taskSummarySearchResult.getResults().size() - workflows.size(); - long totalHits = taskSummarySearchResult.getTotalHits() - missing; - return new SearchResult<>(totalHits, workflows); - } - - public SearchResult searchTasks( - String query, String freeText, int start, int size, List sortOptions) { - - SearchResult result = - executionDAOFacade.searchTasks(query, freeText, start, size, sortOptions); - List workflows = - result.getResults().stream() - .parallel() - .map( - task -> { - try { - return new TaskSummary(executionDAOFacade.getTask(task)); - } catch (Exception e) { - LOGGER.error("Error fetching task by id: {}", task, e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - int missing = result.getResults().size() - workflows.size(); - long totalHits = result.getTotalHits() - missing; - return new SearchResult<>(totalHits, workflows); - } - - public SearchResult getSearchTasks( - String query, - String freeText, - int start, - /*@Max(value = MAX_SEARCH_SIZE, message = "Cannot return more than {value} workflows." + - " Please use pagination.")*/ int size, - String sortString) { - return searchTasks(query, freeText, start, size, Utils.convertStringToList(sortString)); - } - - public SearchResult getSearchTasksV2( - String query, String freeText, int start, int size, String sortString) { - SearchResult result = - executionDAOFacade.searchTasks( - query, freeText, start, size, Utils.convertStringToList(sortString)); - List tasks = - result.getResults().stream() - .parallel() - .map( - task -> { - try { - return executionDAOFacade.getTask(task); - } catch (Exception e) { - LOGGER.error("Error fetching task by id: {}", task, e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - int missing = result.getResults().size() - tasks.size(); - long totalHits = result.getTotalHits() - missing; - return new SearchResult<>(totalHits, tasks); - } - - public List getPendingTasksForTaskType(String taskType) { - return executionDAOFacade.getPendingTasksForTaskType(taskType); - } - - public boolean addEventExecution(EventExecution eventExecution) { - return executionDAOFacade.addEventExecution(eventExecution); - } - - public void removeEventExecution(EventExecution eventExecution) { - executionDAOFacade.removeEventExecution(eventExecution); - } - - public void updateEventExecution(EventExecution eventExecution) { - executionDAOFacade.updateEventExecution(eventExecution); - } - - /** - * @param queue Name of the registered queueDAO - * @param msg Message - */ - public void addMessage(String queue, Message msg) { - executionDAOFacade.addMessage(queue, msg); - } - - /** - * Adds task logs - * - * @param taskId Id of the task - * @param log logs - */ - public void log(String taskId, String log) { - TaskExecLog executionLog = new TaskExecLog(); - executionLog.setTaskId(taskId); - executionLog.setLog(log); - executionLog.setCreatedTime(System.currentTimeMillis()); - executionDAOFacade.addTaskExecLog(Collections.singletonList(executionLog)); - } - - /** - * @param taskId Id of the task for which to retrieve logs - * @return Execution Logs (logged by the worker) - */ - public List getTaskLogs(String taskId) { - return executionDAOFacade.getTaskExecutionLogs(taskId); - } - - /** - * Get external uri for the payload - * - * @param operation the type of {@link Operation} to be performed - * @param payloadType the {@link PayloadType} at the external uri - * @param path the path for which the external storage location is to be populated - * @return the external uri at which the payload is stored/to be stored - */ - public ExternalStorageLocation getExternalStorageLocation( - Operation operation, PayloadType payloadType, String path) { - return externalPayloadStorage.getLocation(operation, payloadType, path); - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java deleted file mode 100644 index bf690f809..000000000 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Optional; - -import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; - -@Validated -public interface MetadataService { - - /** - * @param taskDefinitions Task Definitions to register - */ - void registerTaskDef( - @NotNull(message = "TaskDefList cannot be empty or null") - @Size(min = 1, message = "TaskDefList is empty") - List<@Valid TaskDef> taskDefinitions); - - /** - * @param taskDefinition Task Definition to be updated - */ - void updateTaskDef(@NotNull(message = "TaskDef cannot be null") @Valid TaskDef taskDefinition); - - /** - * @param taskType Remove task definition - */ - void unregisterTaskDef(@NotEmpty(message = "TaskName cannot be null or empty") String taskType); - - /** - * @return List of all the registered tasks - */ - List getTaskDefs(); - - /** - * @param taskType Task to retrieve - * @return Task Definition - */ - TaskDef getTaskDef(@NotEmpty(message = "TaskType cannot be null or empty") String taskType); - - /** - * @param def Workflow definition to be updated - */ - void updateWorkflowDef(@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef def); - - /** - * @param workflowDefList Workflow definitions to be updated. - */ - void updateWorkflowDef( - @NotNull(message = "WorkflowDef list name cannot be null or empty") - @Size(min = 1, message = "WorkflowDefList is empty") - List<@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef> - workflowDefList); - - /** - * @param name Name of the workflow to retrieve - * @param version Optional. Version. If null, then retrieves the latest - * @return Workflow definition - */ - WorkflowDef getWorkflowDef( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - Integer version); - - /** - * @param name Name of the workflow to retrieve - * @return Latest version of the workflow definition - */ - Optional getLatestWorkflow( - @NotEmpty(message = "Workflow name cannot be null or empty") String name); - - List getWorkflowDefs(); - - void registerWorkflowDef( - @NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef workflowDef); - - /** - * @param name Name of the workflow definition to be removed - * @param version Version of the workflow definition to be removed - */ - void unregisterWorkflowDef( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - @NotNull(message = "Version cannot be null") Integer version); - - /** - * @param eventHandler Event handler to be added. Will throw an exception if an event handler - * already exists with the name - */ - void addEventHandler( - @NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler); - - /** - * @param eventHandler Event handler to be updated. - */ - void updateEventHandler( - @NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler); - - /** - * @param name Removes the event handler from the system - */ - void removeEventHandlerStatus( - @NotEmpty(message = "EventName cannot be null or empty") String name); - - /** - * @return All the event handlers registered in the system - */ - List getAllEventHandlers(); - - /** - * @param event name of the event - * @param activeOnly if true, returns only the active handlers - * @return Returns the list of all the event handlers for a given event - */ - List getEventHandlersForEvent( - @NotEmpty(message = "EventName cannot be null or empty") String event, - boolean activeOnly); -} diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java deleted file mode 100644 index 24ebd0308..000000000 --- a/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Optional; - -import org.springframework.stereotype.Service; - -import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.validations.ValidationContext; - -@Service -public class MetadataServiceImpl implements MetadataService { - - private final MetadataDAO metadataDAO; - private final EventHandlerDAO eventHandlerDAO; - - public MetadataServiceImpl( - MetadataDAO metadataDAO, - EventHandlerDAO eventHandlerDAO, - ConductorProperties properties) { - this.metadataDAO = metadataDAO; - this.eventHandlerDAO = eventHandlerDAO; - - ValidationContext.initialize(metadataDAO); - OwnerEmailMandatoryConstraint.WorkflowTaskValidValidator.setOwnerEmailMandatory( - properties.isOwnerEmailMandatory()); - } - - /** - * @param taskDefinitions Task Definitions to register - */ - public void registerTaskDef(List taskDefinitions) { - for (TaskDef taskDefinition : taskDefinitions) { - taskDefinition.setCreatedBy(WorkflowContext.get().getClientApp()); - taskDefinition.setCreateTime(System.currentTimeMillis()); - taskDefinition.setUpdatedBy(null); - taskDefinition.setUpdateTime(null); - - metadataDAO.createTaskDef(taskDefinition); - } - } - - /** - * @param taskDefinition Task Definition to be updated - */ - public void updateTaskDef(TaskDef taskDefinition) { - TaskDef existing = metadataDAO.getTaskDef(taskDefinition.getName()); - if (existing == null) { - throw new ApplicationException( - Code.NOT_FOUND, "No such task by name " + taskDefinition.getName()); - } - taskDefinition.setUpdatedBy(WorkflowContext.get().getClientApp()); - taskDefinition.setUpdateTime(System.currentTimeMillis()); - metadataDAO.updateTaskDef(taskDefinition); - } - - /** - * @param taskType Remove task definition - */ - public void unregisterTaskDef(String taskType) { - metadataDAO.removeTaskDef(taskType); - } - - /** - * @return List of all the registered tasks - */ - public List getTaskDefs() { - return metadataDAO.getAllTaskDefs(); - } - - /** - * @param taskType Task to retrieve - * @return Task Definition - */ - public TaskDef getTaskDef(String taskType) { - TaskDef taskDef = metadataDAO.getTaskDef(taskType); - if (taskDef == null) { - throw new ApplicationException( - Code.NOT_FOUND, String.format("No such taskType found by name: %s", taskType)); - } - return taskDef; - } - - /** - * @param workflowDef Workflow definition to be updated - */ - public void updateWorkflowDef(WorkflowDef workflowDef) { - workflowDef.setUpdateTime(System.currentTimeMillis()); - metadataDAO.updateWorkflowDef(workflowDef); - } - - /** - * @param workflowDefList Workflow definitions to be updated. - */ - public void updateWorkflowDef(List workflowDefList) { - for (WorkflowDef workflowDef : workflowDefList) { - workflowDef.setUpdateTime(System.currentTimeMillis()); - metadataDAO.updateWorkflowDef(workflowDef); - } - } - - /** - * @param name Name of the workflow to retrieve - * @param version Optional. Version. If null, then retrieves the latest - * @return Workflow definition - */ - public WorkflowDef getWorkflowDef(String name, Integer version) { - Optional workflowDef; - if (version == null) { - workflowDef = metadataDAO.getLatestWorkflowDef(name); - } else { - workflowDef = metadataDAO.getWorkflowDef(name, version); - } - - return workflowDef.orElseThrow( - () -> - new ApplicationException( - Code.NOT_FOUND, - String.format( - "No such workflow found by name: %s, version: %d", - name, version))); - } - - /** - * @param name Name of the workflow to retrieve - * @return Latest version of the workflow definition - */ - public Optional getLatestWorkflow(String name) { - return metadataDAO.getLatestWorkflowDef(name); - } - - public List getWorkflowDefs() { - return metadataDAO.getAllWorkflowDefs(); - } - - public void registerWorkflowDef(WorkflowDef workflowDef) { - if (workflowDef.getName().contains(":")) { - throw new ApplicationException( - Code.INVALID_INPUT, - "Workflow name cannot contain the following set of characters: ':'"); - } - if (workflowDef.getSchemaVersion() < 1 || workflowDef.getSchemaVersion() > 2) { - workflowDef.setSchemaVersion(2); - } - workflowDef.setCreateTime(System.currentTimeMillis()); - metadataDAO.createWorkflowDef(workflowDef); - } - - /** - * @param name Name of the workflow definition to be removed - * @param version Version of the workflow definition to be removed - */ - public void unregisterWorkflowDef(String name, Integer version) { - metadataDAO.removeWorkflowDef(name, version); - } - - /** - * @param eventHandler Event handler to be added. Will throw an exception if an event handler - * already exists with the name - */ - public void addEventHandler(EventHandler eventHandler) { - eventHandlerDAO.addEventHandler(eventHandler); - } - - /** - * @param eventHandler Event handler to be updated. - */ - public void updateEventHandler(EventHandler eventHandler) { - eventHandlerDAO.updateEventHandler(eventHandler); - } - - /** - * @param name Removes the event handler from the system - */ - public void removeEventHandlerStatus(String name) { - eventHandlerDAO.removeEventHandler(name); - } - - /** - * @return All the event handlers registered in the system - */ - public List getAllEventHandlers() { - return eventHandlerDAO.getAllEventHandlers(); - } - - /** - * @param event name of the event - * @param activeOnly if true, returns only the active handlers - * @return Returns the list of all the event handlers for a given event - */ - public List getEventHandlersForEvent(String event, boolean activeOnly) { - return eventHandlerDAO.getEventHandlersForEvent(event, activeOnly); - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/TaskService.java b/core/src/main/java/com/netflix/conductor/service/TaskService.java deleted file mode 100644 index 7f4f3d0a6..000000000 --- a/core/src/main/java/com/netflix/conductor/service/TaskService.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Map; - -import javax.validation.Valid; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; - -@Validated -public interface TaskService { - - /** - * Poll for a task of a certain type. - * - * @param taskType Task name - * @param workerId Id of the workflow - * @param domain Domain of the workflow - * @return polled {@link Task} - */ - Task poll( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, - String workerId, - String domain); - - /** - * Batch Poll for a task of a certain type. - * - * @param taskType Task Name - * @param workerId Id of the workflow - * @param domain Domain of the workflow - * @param count Number of tasks - * @param timeout Timeout for polling in milliseconds - * @return list of {@link Task} - */ - List batchPoll( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, - String workerId, - String domain, - Integer count, - Integer timeout); - - /** - * Get in progress tasks. The results are paginated. - * - * @param taskType Task Name - * @param startKey Start index of pagination - * @param count Number of entries - * @return list of {@link Task} - */ - List getTasks( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, - String startKey, - Integer count); - - /** - * Get in progress task for a given workflow id. - * - * @param workflowId Id of the workflow - * @param taskReferenceName Task reference name. - * @return instance of {@link Task} - */ - Task getPendingTaskForWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - @NotEmpty(message = "TaskReferenceName cannot be null or empty.") - String taskReferenceName); - - /** - * Updates a task. - * - * @param taskResult Instance of {@link TaskResult} - * @return task Id of the updated task. - */ - String updateTask( - @NotNull(message = "TaskResult cannot be null or empty.") @Valid TaskResult taskResult); - - /** - * Ack Task is received. - * - * @param taskId Id of the task - * @param workerId Id of the worker - * @return `true|false` if task if received or not - */ - String ackTaskReceived( - @NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String workerId); - - /** - * Ack Task is received. - * - * @param taskId Id of the task - * @return `true|false` if task if received or not - */ - boolean ackTaskReceived(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); - - /** - * Log Task Execution Details. - * - * @param taskId Id of the task - * @param log Details you want to log - */ - void log(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String log); - - /** - * Get Task Execution Logs. - * - * @param taskId Id of the task. - * @return list of {@link TaskExecLog} - */ - List getTaskLogs( - @NotEmpty(message = "TaskId cannot be null or empty.") String taskId); - - /** - * Get task by Id. - * - * @param taskId Id of the task. - * @return instance of {@link Task} - */ - Task getTask(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); - - /** - * Remove Task from a Task type queue. - * - * @param taskType Task Name - * @param taskId ID of the task - */ - void removeTaskFromQueue( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, - @NotEmpty(message = "TaskId cannot be null or empty.") String taskId); - - /** - * Remove Task from a Task type queue. - * - * @param taskId ID of the task - */ - void removeTaskFromQueue(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); - - /** - * Get Task type queue sizes. - * - * @param taskTypes List of task types. - * @return map of task type as Key and queue size as value. - */ - Map getTaskQueueSizes(List taskTypes); - - /** - * Get the queue size for a Task Type. The input can optionally include domain, - * isolationGroupId and executionNamespace. - * - * @return - */ - Integer getTaskQueueSize( - String taskType, String domain, String isolationGroupId, String executionNamespace); - - /** - * Get the details about each queue. - * - * @return map of queue details. - */ - Map>> allVerbose(); - - /** - * Get the details about each queue. - * - * @return map of details about each queue. - */ - Map getAllQueueDetails(); - - /** - * Get the last poll data for a given task type. - * - * @param taskType Task Name - * @return list of {@link PollData} - */ - List getPollData( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType); - - /** - * Get the last poll data for all task types. - * - * @return list of {@link PollData} - */ - List getAllPollData(); - - /** - * Requeue pending tasks. - * - * @param taskType Task name. - * @return number of tasks requeued. - */ - String requeuePendingTask( - @NotEmpty(message = "TaskType cannot be null or empty.") String taskType); - - /** - * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. - * sort=name or sort=workflowId. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult search( - int start, int size, String sort, String freeText, String query); - - /** - * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. - * sort=name or sort=workflowId. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchV2(int start, int size, String sort, String freeText, String query); - - /** - * Get the external storage location where the task output payload is stored/to be stored - * - * @param path the path for which the external storage location is to be populated - * @param operation the operation to be performed (read or write) - * @param payloadType the type of payload (input or output) - * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is - * stored in external storage - */ - ExternalStorageLocation getExternalStorageLocation( - String path, String operation, String payloadType); -} diff --git a/core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java deleted file mode 100644 index 333b2e346..000000000 --- a/core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java +++ /dev/null @@ -1,382 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.Audit; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.utils.QueueUtils; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.metrics.Monitors; - -@Audit -@Trace -@Service -public class TaskServiceImpl implements TaskService { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); - private final ExecutionService executionService; - private final QueueDAO queueDAO; - - public TaskServiceImpl(ExecutionService executionService, QueueDAO queueDAO) { - this.executionService = executionService; - this.queueDAO = queueDAO; - } - - /** - * Poll for a task of a certain type. - * - * @param taskType Task name - * @param workerId id of the workflow - * @param domain Domain of the workflow - * @return polled {@link Task} - */ - public Task poll(String taskType, String workerId, String domain) { - LOGGER.debug("Task being polled: /tasks/poll/{}?{}&{}", taskType, workerId, domain); - Task task = executionService.getLastPollTask(taskType, workerId, domain); - if (task != null) { - LOGGER.debug( - "The Task {} being returned for /tasks/poll/{}?{}&{}", - task, - taskType, - workerId, - domain); - } - Monitors.recordTaskPollCount(taskType, domain, 1); - return task; - } - - /** - * Batch Poll for a task of a certain type. - * - * @param taskType Task Name - * @param workerId id of the workflow - * @param domain Domain of the workflow - * @param count Number of tasks - * @param timeout Timeout for polling in milliseconds - * @return list of {@link Task} - */ - public List batchPoll( - String taskType, String workerId, String domain, Integer count, Integer timeout) { - List polledTasks = executionService.poll(taskType, workerId, domain, count, timeout); - LOGGER.debug( - "The Tasks {} being returned for /tasks/poll/{}?{}&{}", - polledTasks.stream().map(Task::getTaskId).collect(Collectors.toList()), - taskType, - workerId, - domain); - Monitors.recordTaskPollCount(taskType, domain, polledTasks.size()); - return polledTasks; - } - - /** - * Get in progress tasks. The results are paginated. - * - * @param taskType Task Name - * @param startKey Start index of pagination - * @param count Number of entries - * @return list of {@link Task} - */ - public List getTasks(String taskType, String startKey, Integer count) { - return executionService.getTasks(taskType, startKey, count); - } - - /** - * Get in progress task for a given workflow id. - * - * @param workflowId id of the workflow - * @param taskReferenceName Task reference name. - * @return instance of {@link Task} - */ - public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceName) { - return executionService.getPendingTaskForWorkflow(taskReferenceName, workflowId); - } - - /** - * Updates a task. - * - * @param taskResult Instance of {@link TaskResult} - * @return task Id of the updated task. - */ - public String updateTask(TaskResult taskResult) { - LOGGER.debug( - "Update Task: {} with callback time: {}", - taskResult, - taskResult.getCallbackAfterSeconds()); - executionService.updateTask(taskResult); - LOGGER.debug( - "Task: {} updated successfully with callback time: {}", - taskResult, - taskResult.getCallbackAfterSeconds()); - return taskResult.getTaskId(); - } - - /** - * Ack Task is received. - * - * @param taskId id of the task - * @param workerId id of the worker - * @return `true|false` if task is received or not - */ - public String ackTaskReceived(String taskId, String workerId) { - LOGGER.debug("Ack received for task: {} from worker: {}", taskId, workerId); - return String.valueOf(ackTaskReceived(taskId)); - } - - /** - * Ack Task is received. - * - * @param taskId id of the task - * @return `true|false` if task is received or not - */ - public boolean ackTaskReceived(String taskId) { - LOGGER.debug("Ack received for task: {}", taskId); - AtomicBoolean ackResult = new AtomicBoolean(false); - try { - ackResult.set(executionService.ackTaskReceived(taskId)); - } catch (Exception e) { - // Fail the task and let decide reevaluate the workflow, thereby preventing workflow - // being stuck from transient ack errors. - String errorMsg = String.format("Error when trying to ack task %s", taskId); - LOGGER.error(errorMsg, e); - Task task = executionService.getTask(taskId); - Monitors.recordAckTaskError(task.getTaskType()); - failTask(task, errorMsg); - ackResult.set(false); - } - return ackResult.get(); - } - - /** Updates the task with FAILED status; On exception, fails the workflow. */ - private void failTask(Task task, String errorMsg) { - try { - TaskResult taskResult = new TaskResult(); - taskResult.setStatus(TaskResult.Status.FAILED); - taskResult.setTaskId(task.getTaskId()); - taskResult.setWorkflowInstanceId(task.getWorkflowInstanceId()); - taskResult.setReasonForIncompletion(errorMsg); - executionService.updateTask(taskResult); - } catch (Exception e) { - LOGGER.error( - "Unable to fail task: {} in workflow: {}", - task.getTaskId(), - task.getWorkflowInstanceId(), - e); - executionService.terminateWorkflow( - task.getWorkflowInstanceId(), "Failed to ack task: " + task.getTaskId()); - } - } - - /** - * Log Task Execution Details. - * - * @param taskId id of the task - * @param log Details you want to log - */ - public void log(String taskId, String log) { - executionService.log(taskId, log); - } - - /** - * Get Task Execution Logs. - * - * @param taskId id of the task. - * @return list of {@link TaskExecLog} - */ - public List getTaskLogs(String taskId) { - return executionService.getTaskLogs(taskId); - } - - /** - * Get task by Id. - * - * @param taskId id of the task. - * @return instance of {@link Task} - */ - public Task getTask(String taskId) { - return executionService.getTask(taskId); - } - - /** - * Remove Task from a Task type queue. - * - * @param taskType Task Name - * @param taskId ID of the task - */ - public void removeTaskFromQueue(String taskType, String taskId) { - executionService.removeTaskFromQueue(taskId); - } - - /** - * Remove Task from a Task type queue. - * - * @param taskId ID of the task - */ - public void removeTaskFromQueue(String taskId) { - executionService.removeTaskFromQueue(taskId); - } - - /** - * Get Task type queue sizes. - * - * @param taskTypes List of task types. - * @return map of task type as Key and queue size as value. - */ - public Map getTaskQueueSizes(List taskTypes) { - return executionService.getTaskQueueSizes(taskTypes); - } - - @Override - public Integer getTaskQueueSize( - String taskType, String domain, String isolationGroupId, String executionNamespace) { - String queueName = - QueueUtils.getQueueName(taskType, domain, isolationGroupId, executionNamespace); - - return executionService.getTaskQueueSize(queueName); - } - - /** - * Get the details about each queue. - * - * @return map of queue details. - */ - public Map>> allVerbose() { - return queueDAO.queuesDetailVerbose(); - } - - /** - * Get the details about each queue. - * - * @return map of details about each queue. - */ - public Map getAllQueueDetails() { - return queueDAO.queuesDetail().entrySet().stream() - .sorted(Entry.comparingByKey()) - .collect( - Collectors.toMap( - Entry::getKey, - Entry::getValue, - (v1, v2) -> v1, - LinkedHashMap::new)); - } - - /** - * Get the last poll data for a given task type. - * - * @param taskType Task Name - * @return list of {@link PollData} - */ - public List getPollData(String taskType) { - return executionService.getPollData(taskType); - } - - /** - * Get the last poll data for all task types. - * - * @return list of {@link PollData} - */ - public List getAllPollData() { - return executionService.getAllPollData(); - } - - /** - * Requeue pending tasks. - * - * @param taskType Task name. - * @return number of tasks requeued. - */ - public String requeuePendingTask(String taskType) { - return String.valueOf(executionService.requeuePendingTasks(taskType)); - } - - /** - * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. - * sort=name or sort=workflowId. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult search( - int start, int size, String sort, String freeText, String query) { - return executionService.getSearchTasks(query, freeText, start, size, sort); - } - - /** - * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. - * sort=name or sort=workflowId. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchV2( - int start, int size, String sort, String freeText, String query) { - return executionService.getSearchTasksV2(query, freeText, start, size, sort); - } - - /** - * Get the external storage location where the task output payload is stored/to be stored - * - * @param path the path for which the external storage location is to be populated - * @param operation the operation to be performed (read or write) - * @param type the type of payload (input or output) - * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is - * stored in external storage - */ - public ExternalStorageLocation getExternalStorageLocation( - String path, String operation, String type) { - try { - ExternalPayloadStorage.Operation payloadOperation = - ExternalPayloadStorage.Operation.valueOf(StringUtils.upperCase(operation)); - ExternalPayloadStorage.PayloadType payloadType = - ExternalPayloadStorage.PayloadType.valueOf(StringUtils.upperCase(type)); - return executionService.getExternalStorageLocation(payloadOperation, payloadType, path); - } catch (Exception e) { - // FIXME: for backwards compatibility - LOGGER.error( - "Invalid input - Operation: {}, PayloadType: {}, defaulting to WRITE/TASK_OUTPUT", - operation, - type); - return executionService.getExternalStorageLocation( - ExternalPayloadStorage.Operation.WRITE, - ExternalPayloadStorage.PayloadType.TASK_OUTPUT, - path); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java deleted file mode 100644 index 2c1ef0f7f..000000000 --- a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; - -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.Size; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.model.BulkResponse; - -@Validated -public interface WorkflowBulkService { - - int MAX_REQUEST_ITEMS = 1000; - - BulkResponse pauseWorkflow( - @NotEmpty(message = "WorkflowIds list cannot be null.") - @Size( - max = MAX_REQUEST_ITEMS, - message = - "Cannot process more than {max} workflows. Please use multiple requests.") - List workflowIds); - - BulkResponse resumeWorkflow( - @NotEmpty(message = "WorkflowIds list cannot be null.") - @Size( - max = MAX_REQUEST_ITEMS, - message = - "Cannot process more than {max} workflows. Please use multiple requests.") - List workflowIds); - - BulkResponse restart( - @NotEmpty(message = "WorkflowIds list cannot be null.") - @Size( - max = MAX_REQUEST_ITEMS, - message = - "Cannot process more than {max} workflows. Please use multiple requests.") - List workflowIds, - boolean useLatestDefinitions); - - BulkResponse retry( - @NotEmpty(message = "WorkflowIds list cannot be null.") - @Size( - max = MAX_REQUEST_ITEMS, - message = - "Cannot process more than {max} workflows. Please use multiple requests.") - List workflowIds); - - BulkResponse terminate( - @NotEmpty(message = "WorkflowIds list cannot be null.") - @Size( - max = MAX_REQUEST_ITEMS, - message = - "Cannot process more than {max} workflows. Please use multiple requests.") - List workflowIds, - String reason); -} diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java deleted file mode 100644 index c637b8bd9..000000000 --- a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.Audit; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.model.BulkResponse; -import com.netflix.conductor.core.execution.WorkflowExecutor; - -@Audit -@Trace -@Service -public class WorkflowBulkServiceImpl implements WorkflowBulkService { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowBulkService.class); - private final WorkflowExecutor workflowExecutor; - - public WorkflowBulkServiceImpl(WorkflowExecutor workflowExecutor) { - this.workflowExecutor = workflowExecutor; - } - - /** - * Pause the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform pause operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - public BulkResponse pauseWorkflow(List workflowIds) { - - BulkResponse bulkResponse = new BulkResponse(); - for (String workflowId : workflowIds) { - try { - workflowExecutor.pauseWorkflow(workflowId); - bulkResponse.appendSuccessResponse(workflowId); - } catch (Exception e) { - LOGGER.error( - "bulk pauseWorkflow exception, workflowId {}, message: {} ", - workflowId, - e.getMessage(), - e); - bulkResponse.appendFailedResponse(workflowId, e.getMessage()); - } - } - - return bulkResponse; - } - - /** - * Resume the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform resume operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - public BulkResponse resumeWorkflow(List workflowIds) { - BulkResponse bulkResponse = new BulkResponse(); - for (String workflowId : workflowIds) { - try { - workflowExecutor.resumeWorkflow(workflowId); - bulkResponse.appendSuccessResponse(workflowId); - } catch (Exception e) { - LOGGER.error( - "bulk resumeWorkflow exception, workflowId {}, message: {} ", - workflowId, - e.getMessage(), - e); - bulkResponse.appendFailedResponse(workflowId, e.getMessage()); - } - } - return bulkResponse; - } - - /** - * Restart the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform restart operation on - * @param useLatestDefinitions if true, use latest workflow and task definitions upon restart - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - public BulkResponse restart(List workflowIds, boolean useLatestDefinitions) { - BulkResponse bulkResponse = new BulkResponse(); - for (String workflowId : workflowIds) { - try { - workflowExecutor.restart(workflowId, useLatestDefinitions); - bulkResponse.appendSuccessResponse(workflowId); - } catch (Exception e) { - LOGGER.error( - "bulk restart exception, workflowId {}, message: {} ", - workflowId, - e.getMessage(), - e); - bulkResponse.appendFailedResponse(workflowId, e.getMessage()); - } - } - return bulkResponse; - } - - /** - * Retry the last failed task for each workflow from the list. - * - * @param workflowIds - list of workflow Ids to perform retry operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - public BulkResponse retry(List workflowIds) { - BulkResponse bulkResponse = new BulkResponse(); - for (String workflowId : workflowIds) { - try { - workflowExecutor.retry(workflowId, false); - bulkResponse.appendSuccessResponse(workflowId); - } catch (Exception e) { - LOGGER.error( - "bulk retry exception, workflowId {}, message: {} ", - workflowId, - e.getMessage(), - e); - bulkResponse.appendFailedResponse(workflowId, e.getMessage()); - } - } - return bulkResponse; - } - - /** - * Terminate workflows execution. - * - * @param workflowIds - list of workflow Ids to perform terminate operation on - * @param reason - description to be specified for the terminated workflow for future - * references. - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - public BulkResponse terminate(List workflowIds, String reason) { - BulkResponse bulkResponse = new BulkResponse(); - for (String workflowId : workflowIds) { - try { - workflowExecutor.terminateWorkflow(workflowId, reason); - bulkResponse.appendSuccessResponse(workflowId); - } catch (Exception e) { - LOGGER.error( - "bulk terminate exception, workflowId {}, message: {} ", - workflowId, - e.getMessage(), - e); - bulkResponse.appendFailedResponse(workflowId, e.getMessage()); - } - } - return bulkResponse; - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowService.java b/core/src/main/java/com/netflix/conductor/service/WorkflowService.java deleted file mode 100644 index 10f9e1842..000000000 --- a/core/src/main/java/com/netflix/conductor/service/WorkflowService.java +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Map; - -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import org.springframework.validation.annotation.Validated; - -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; - -@Validated -public interface WorkflowService { - - /** - * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. - * - * @param startWorkflowRequest StartWorkflow request for the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - String startWorkflow( - @NotNull(message = "StartWorkflowRequest cannot be null") @Valid - StartWorkflowRequest startWorkflowRequest); - - /** - * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @param externalInputPayloadStoragePath - * @param taskToDomain - * @param workflowDef - workflow definition - * @return the id of the workflow instance that can be use for tracking. - */ - String startWorkflow( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - Integer version, - String correlationId, - Map input, - String externalInputPayloadStoragePath, - Map taskToDomain, - WorkflowDef workflowDef); - - /** - * Start a new workflow. Returns the ID of the workflow instance that can be later used for - * tracking. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - String startWorkflow( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - Integer version, - String correlationId, - Map input); - - /** - * Start a new workflow. Returns the ID of the workflow instance that can be later used for - * tracking. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param priority Priority of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - String startWorkflow( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - Integer version, - String correlationId, - @Min(value = 0, message = "0 is the minimum priority value") - @Max(value = 99, message = "99 is the maximum priority value") - Integer priority, - Map input); - - /** - * Start a new workflow. Returns the ID of the workflow instance that can be later used for - * tracking. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param priority Priority of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @param externalInputPayloadStoragePath - * @param taskToDomain - * @param workflowDef - workflow definition - * @return the id of the workflow instance that can be use for tracking. - */ - String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input, - String externalInputPayloadStoragePath, - Map taskToDomain, - WorkflowDef workflowDef); - - /** - * Lists workflows for the given correlation id. - * - * @param name Name of the workflow. - * @param correlationId CorrelationID of the workflow you want to list. - * @param includeClosed IncludeClosed workflow which are not running. - * @param includeTasks Includes tasks associated with workflows. - * @return a list of {@link Workflow} - */ - List getWorkflows( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - String correlationId, - boolean includeClosed, - boolean includeTasks); - - /** - * Lists workflows for the given correlation id. - * - * @param name Name of the workflow. - * @param includeClosed CorrelationID of the workflow you want to start. - * @param includeTasks IncludeClosed workflow which are not running. - * @param correlationIds Includes tasks associated with workflows. - * @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value - */ - Map> getWorkflows( - @NotEmpty(message = "Workflow name cannot be null or empty") String name, - boolean includeClosed, - boolean includeTasks, - List correlationIds); - - /** - * Gets the workflow by workflow Id. - * - * @param workflowId Id of the workflow. - * @param includeTasks Includes tasks associated with workflow. - * @return an instance of {@link Workflow} - */ - Workflow getExecutionStatus( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - boolean includeTasks); - - /** - * Removes the workflow from the system. - * - * @param workflowId WorkflowID of the workflow you want to remove from system. - * @param archiveWorkflow Archives the workflow. - */ - void deleteWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - boolean archiveWorkflow); - - /** - * Retrieves all the running workflows. - * - * @param workflowName Name of the workflow. - * @param version Version of the workflow. - * @param startTime Starttime of the workflow. - * @param endTime EndTime of the workflow - * @return a list of workflow Ids. - */ - List getRunningWorkflows( - @NotEmpty(message = "Workflow name cannot be null or empty.") String workflowName, - Integer version, - Long startTime, - Long endTime); - - /** - * Starts the decision task for a workflow. - * - * @param workflowId WorkflowId of the workflow. - */ - void decideWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Pauses the workflow given a worklfowId. - * - * @param workflowId WorkflowId of the workflow. - */ - void pauseWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Resumes the workflow. - * - * @param workflowId WorkflowId of the workflow. - */ - void resumeWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Skips a given task from a current running workflow. - * - * @param workflowId WorkflowId of the workflow. - * @param taskReferenceName The task reference name. - * @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip. - */ - void skipTaskFromWorkflow( - @NotEmpty(message = "WorkflowId name cannot be null or empty.") String workflowId, - @NotEmpty(message = "TaskReferenceName cannot be null or empty.") - String taskReferenceName, - SkipTaskRequest skipTaskRequest); - - /** - * Reruns the workflow from a specific task. - * - * @param workflowId WorkflowId of the workflow you want to rerun. - * @param request (@link RerunWorkflowRequest) for the workflow. - * @return WorkflowId of the rerun workflow. - */ - String rerunWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - @NotNull(message = "RerunWorkflowRequest cannot be null.") - RerunWorkflowRequest request); - - /** - * Restarts a completed workflow. - * - * @param workflowId WorkflowId of the workflow. - * @param useLatestDefinitions if true, use the latest workflow and task definitions upon - * restart - */ - void restartWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - boolean useLatestDefinitions); - - /** - * Retries the last failed task. - * - * @param workflowId WorkflowId of the workflow. - */ - void retryWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - boolean resumeSubworkflowTasks); - - /** - * Resets callback times of all non-terminal SIMPLE tasks to 0. - * - * @param workflowId WorkflowId of the workflow. - */ - void resetWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); - - /** - * Terminate workflow execution. - * - * @param workflowId WorkflowId of the workflow. - * @param reason Reason for terminating the workflow. - */ - void terminateWorkflow( - @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, - String reason); - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflows( - int start, - @Max( - value = 5_000, - message = - "Cannot return more than {value} workflows. Please use pagination.") - int size, - String sort, - String freeText, - String query); - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsV2( - int start, - @Max( - value = 5_000, - message = - "Cannot return more than {value} workflows. Please use pagination.") - int size, - String sort, - String freeText, - String query); - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflows( - int start, - @Max( - value = 5_000, - message = - "Cannot return more than {value} workflows. Please use pagination.") - int size, - List sort, - String freeText, - String query); - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsV2( - int start, - @Max( - value = 5_000, - message = - "Cannot return more than {value} workflows. Please use pagination.") - int size, - List sort, - String freeText, - String query); - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsByTasks( - int start, int size, String sort, String freeText, String query); - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsByTasksV2( - int start, int size, String sort, String freeText, String query); - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsByTasks( - int start, int size, List sort, String freeText, String query); - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - SearchResult searchWorkflowsByTasksV2( - int start, int size, List sort, String freeText, String query); - - /** - * Get the external storage location where the workflow input payload is stored/to be stored - * - * @param path the path for which the external storage location is to be populated - * @param operation the operation to be performed (read or write) - * @param payloadType the type of payload (input or output) - * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is - * stored in external storage - */ - ExternalStorageLocation getExternalStorageLocation( - String path, String operation, String payloadType); -} diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java deleted file mode 100644 index f1a84a13d..000000000 --- a/core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java +++ /dev/null @@ -1,545 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.annotations.Audit; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.utils.Utils; - -@Audit -@Trace -@Service -public class WorkflowServiceImpl implements WorkflowService { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowServiceImpl.class); - private final WorkflowExecutor workflowExecutor; - private final ExecutionService executionService; - private final MetadataService metadataService; - - public WorkflowServiceImpl( - WorkflowExecutor workflowExecutor, - ExecutionService executionService, - MetadataService metadataService) { - this.workflowExecutor = workflowExecutor; - this.executionService = executionService; - this.metadataService = metadataService; - } - - /** - * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. - * - * @param startWorkflowRequest StartWorkflow request for the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { - return startWorkflow( - startWorkflowRequest.getName(), - startWorkflowRequest.getVersion(), - startWorkflowRequest.getCorrelationId(), - startWorkflowRequest.getPriority(), - startWorkflowRequest.getInput(), - startWorkflowRequest.getExternalInputPayloadStoragePath(), - startWorkflowRequest.getTaskToDomain(), - startWorkflowRequest.getWorkflowDef()); - } - - /** - * Start a new workflow. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @param externalInputPayloadStoragePath the relative path in external storage where input - * payload is located - * @param taskToDomain the task to domain mapping - * @param workflowDef - workflow definition - * @return the id of the workflow instance that can be use for tracking. - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Map input, - String externalInputPayloadStoragePath, - Map taskToDomain, - WorkflowDef workflowDef) { - return startWorkflow( - name, - version, - correlationId, - 0, - input, - externalInputPayloadStoragePath, - taskToDomain, - workflowDef); - } - - /** - * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param priority Priority of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @param externalInputPayloadStoragePath the relative path in external storage where input * - * payload is located - * @param taskToDomain the task to domain mapping - * @param workflowDef - workflow definition - * @return the id of the workflow instance that can be use for tracking. - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input, - String externalInputPayloadStoragePath, - Map taskToDomain, - WorkflowDef workflowDef) { - - if (workflowDef == null) { - return workflowExecutor.startWorkflow( - name, - version, - correlationId, - priority, - input, - externalInputPayloadStoragePath, - null, - taskToDomain); - } else { - return workflowExecutor.startWorkflow( - workflowDef, - input, - externalInputPayloadStoragePath, - correlationId, - priority, - null, - taskToDomain); - } - } - - /** - * Start a new workflow. Returns the ID of the workflow instance that can be later used for - * tracking. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - public String startWorkflow( - String name, Integer version, String correlationId, Map input) { - metadataService.getWorkflowDef(name, version); - return startWorkflow(name, version, correlationId, 0, input); - } - - /** - * Start a new workflow. Returns the ID of the workflow instance that can be later used for - * tracking. - * - * @param name Name of the workflow you want to start. - * @param version Version of the workflow you want to start. - * @param correlationId CorrelationID of the workflow you want to start. - * @param priority Priority of the workflow you want to start. - * @param input Input to the workflow you want to start. - * @return the id of the workflow instance that can be use for tracking. - */ - public String startWorkflow( - String name, - Integer version, - String correlationId, - Integer priority, - Map input) { - WorkflowDef workflowDef = metadataService.getWorkflowDef(name, version); - if (workflowDef == null) { - throw new ApplicationException( - ApplicationException.Code.NOT_FOUND, - String.format( - "No such workflow found by name: %s, version: %d", name, version)); - } - return workflowExecutor.startWorkflow( - workflowDef.getName(), - workflowDef.getVersion(), - correlationId, - priority, - input, - null); - } - - /** - * Lists workflows for the given correlation id. - * - * @param name Name of the workflow. - * @param correlationId CorrelationID of the workflow you want to start. - * @param includeClosed IncludeClosed workflow which are not running. - * @param includeTasks Includes tasks associated with workflows. - * @return a list of {@link Workflow} - */ - public List getWorkflows( - String name, String correlationId, boolean includeClosed, boolean includeTasks) { - return executionService.getWorkflowInstances( - name, correlationId, includeClosed, includeTasks); - } - - /** - * Lists workflows for the given correlation id. - * - * @param name Name of the workflow. - * @param includeClosed CorrelationID of the workflow you want to start. - * @param includeTasks IncludeClosed workflow which are not running. - * @param correlationIds Includes tasks associated with workflows. - * @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value - */ - public Map> getWorkflows( - String name, boolean includeClosed, boolean includeTasks, List correlationIds) { - Map> workflowMap = new HashMap<>(); - for (String correlationId : correlationIds) { - List workflows = - executionService.getWorkflowInstances( - name, correlationId, includeClosed, includeTasks); - workflowMap.put(correlationId, workflows); - } - return workflowMap; - } - - /** - * Gets the workflow by workflow id. - * - * @param workflowId id of the workflow. - * @param includeTasks Includes tasks associated with workflow. - * @return an instance of {@link Workflow} - */ - public Workflow getExecutionStatus(String workflowId, boolean includeTasks) { - Workflow workflow = executionService.getExecutionStatus(workflowId, includeTasks); - if (workflow == null) { - throw new ApplicationException( - ApplicationException.Code.NOT_FOUND, - String.format("Workflow with Id: %s not found.", workflowId)); - } - return workflow; - } - - /** - * Removes the workflow from the system. - * - * @param workflowId WorkflowID of the workflow you want to remove from system. - * @param archiveWorkflow Archives the workflow. - */ - public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { - executionService.removeWorkflow(workflowId, archiveWorkflow); - } - - /** - * Retrieves all the running workflows. - * - * @param workflowName Name of the workflow. - * @param version Version of the workflow. - * @param startTime start time of the workflow. - * @param endTime EndTime of the workflow - * @return a list of workflow Ids. - */ - public List getRunningWorkflows( - String workflowName, Integer version, Long startTime, Long endTime) { - if (Optional.ofNullable(startTime).orElse(0L) != 0 - && Optional.ofNullable(endTime).orElse(0L) != 0) { - return workflowExecutor.getWorkflows(workflowName, version, startTime, endTime); - } else { - version = - Optional.ofNullable(version) - .orElseGet( - () -> { - WorkflowDef workflowDef = - metadataService.getWorkflowDef(workflowName, null); - return workflowDef.getVersion(); - }); - return workflowExecutor.getRunningWorkflowIds(workflowName, version); - } - } - - /** - * Starts the decision task for a workflow. - * - * @param workflowId WorkflowId of the workflow. - */ - public void decideWorkflow(String workflowId) { - workflowExecutor.decide(workflowId); - } - - /** - * Pauses the workflow given a workflowId. - * - * @param workflowId WorkflowId of the workflow. - */ - public void pauseWorkflow(String workflowId) { - workflowExecutor.pauseWorkflow(workflowId); - } - - /** - * Resumes the workflow. - * - * @param workflowId WorkflowId of the workflow. - */ - public void resumeWorkflow(String workflowId) { - workflowExecutor.resumeWorkflow(workflowId); - } - - /** - * Skips a given task from a current running workflow. - * - * @param workflowId WorkflowId of the workflow. - * @param taskReferenceName The task reference name. - * @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip. - */ - public void skipTaskFromWorkflow( - String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { - workflowExecutor.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest); - } - - /** - * Reruns the workflow from a specific task. - * - * @param workflowId WorkflowId of the workflow you want to rerun. - * @param request (@link RerunWorkflowRequest) for the workflow. - * @return WorkflowId of the rerun workflow. - */ - public String rerunWorkflow(String workflowId, RerunWorkflowRequest request) { - request.setReRunFromWorkflowId(workflowId); - return workflowExecutor.rerun(request); - } - - /** - * Restarts a completed workflow. - * - * @param workflowId WorkflowId of the workflow. - * @param useLatestDefinitions if true, use the latest workflow and task definitions upon - * restart - */ - public void restartWorkflow(String workflowId, boolean useLatestDefinitions) { - workflowExecutor.restart(workflowId, useLatestDefinitions); - } - - /** - * Retries the last failed task. - * - * @param workflowId WorkflowId of the workflow. - */ - public void retryWorkflow(String workflowId, boolean resumeSubworkflowTasks) { - workflowExecutor.retry(workflowId, resumeSubworkflowTasks); - } - - /** - * Resets callback times of all non-terminal SIMPLE tasks to 0. - * - * @param workflowId WorkflowId of the workflow. - */ - public void resetWorkflow(String workflowId) { - workflowExecutor.resetCallbacksForWorkflow(workflowId); - } - - /** - * Terminate workflow execution. - * - * @param workflowId WorkflowId of the workflow. - * @param reason Reason for terminating the workflow. - */ - public void terminateWorkflow(String workflowId, String reason) { - workflowExecutor.terminateWorkflow(workflowId, reason); - } - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflows( - int start, int size, String sort, String freeText, String query) { - return executionService.search( - query, freeText, start, size, Utils.convertStringToList(sort)); - } - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsV2( - int start, int size, String sort, String freeText, String query) { - return executionService.searchV2( - query, freeText, start, size, Utils.convertStringToList(sort)); - } - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflows( - int start, int size, List sort, String freeText, String query) { - return executionService.search(query, freeText, start, size, sort); - } - - /** - * Search for workflows based on payload and given parameters. Use sort options as sort ASCor - * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsV2( - int start, int size, List sort, String freeText, String query) { - return executionService.searchV2(query, freeText, start, size, sort); - } - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsByTasks( - int start, int size, String sort, String freeText, String query) { - return executionService.searchWorkflowByTasks( - query, freeText, start, size, Utils.convertStringToList(sort)); - } - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort Sorting type ASC|DESC - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsByTasksV2( - int start, int size, String sort, String freeText, String query) { - return executionService.searchWorkflowByTasksV2( - query, freeText, start, size, Utils.convertStringToList(sort)); - } - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsByTasks( - int start, int size, List sort, String freeText, String query) { - return executionService.searchWorkflowByTasks(query, freeText, start, size, sort); - } - - /** - * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. - * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. - * - * @param start Start index of pagination - * @param size Number of entries - * @param sort list of sorting options, separated by "|" delimiter - * @param freeText Text you want to search - * @param query Query you want to search - * @return instance of {@link SearchResult} - */ - public SearchResult searchWorkflowsByTasksV2( - int start, int size, List sort, String freeText, String query) { - return executionService.searchWorkflowByTasksV2(query, freeText, start, size, sort); - } - - /** - * Get the external storage location where the workflow input payload is stored/to be stored - * - * @param path the path for which the external storage location is to be populated - * @param operation the operation to be performed (read or write) - * @param type the type of payload (input or output) - * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is - * stored in external storage - */ - public ExternalStorageLocation getExternalStorageLocation( - String path, String operation, String type) { - try { - ExternalPayloadStorage.Operation payloadOperation = - ExternalPayloadStorage.Operation.valueOf(StringUtils.upperCase(operation)); - ExternalPayloadStorage.PayloadType payloadType = - ExternalPayloadStorage.PayloadType.valueOf(StringUtils.upperCase(type)); - return executionService.getExternalStorageLocation(payloadOperation, payloadType, path); - } catch (Exception e) { - // FIXME: for backwards compatibility - LOGGER.error( - "Invalid input - Operation: {}, PayloadType: {}, defaulting to WRITE/WORKFLOW_INPUT", - operation, - type); - return executionService.getExternalStorageLocation( - ExternalPayloadStorage.Operation.WRITE, - ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT, - path); - } - } -} diff --git a/core/src/main/java/com/netflix/conductor/validations/ValidationContext.java b/core/src/main/java/com/netflix/conductor/validations/ValidationContext.java deleted file mode 100644 index 56fbb22d3..000000000 --- a/core/src/main/java/com/netflix/conductor/validations/ValidationContext.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.validations; - -import com.netflix.conductor.dao.MetadataDAO; - -/** - * This context is defined to get access to {@link MetadataDAO} inside {@link - * WorkflowTaskTypeConstraint} constraint validator to validate {@link - * com.netflix.conductor.common.metadata.workflow.WorkflowTask}. - */ -public class ValidationContext { - - private static MetadataDAO metadataDAO; - - public static void initialize(MetadataDAO metadataDAO) { - ValidationContext.metadataDAO = metadataDAO; - } - - public static MetadataDAO getMetadataDAO() { - return metadataDAO; - } -} diff --git a/core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java b/core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java deleted file mode 100644 index 48a23c8d3..000000000 --- a/core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.validations; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.util.Optional; - -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.DateTimeUtils; - -import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter; -import static com.netflix.conductor.core.execution.tasks.Terminate.validateInputStatus; -import static com.netflix.conductor.core.execution.tasks.Wait.DURATION_INPUT; -import static com.netflix.conductor.core.execution.tasks.Wait.UNTIL_INPUT; - -import static java.lang.annotation.ElementType.ANNOTATION_TYPE; -import static java.lang.annotation.ElementType.TYPE; - -/** - * This constraint class validates following things. 1. Correct parameters are set depending on task - * type. - */ -@Documented -@Constraint(validatedBy = WorkflowTaskTypeConstraint.WorkflowTaskValidator.class) -@Target({TYPE, ANNOTATION_TYPE}) -@Retention(RetentionPolicy.RUNTIME) -public @interface WorkflowTaskTypeConstraint { - - String message() default ""; - - Class[] groups() default {}; - - Class[] payload() default {}; - - class WorkflowTaskValidator - implements ConstraintValidator { - - final String PARAM_REQUIRED_STRING_FORMAT = - "%s field is required for taskType: %s taskName: %s"; - - @Override - public void initialize(WorkflowTaskTypeConstraint constraintAnnotation) {} - - @Override - public boolean isValid(WorkflowTask workflowTask, ConstraintValidatorContext context) { - context.disableDefaultConstraintViolation(); - - boolean valid = true; - - // depending on task type check if required parameters are set or not - switch (workflowTask.getType()) { - case TaskType.TASK_TYPE_EVENT: - valid = isEventTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_DECISION: - valid = isDecisionTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_SWITCH: - valid = isSwitchTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_DYNAMIC: - valid = isDynamicTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC: - valid = isDynamicForkJoinValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_HTTP: - valid = isHttpTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_FORK_JOIN: - valid = isForkJoinTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_TERMINATE: - valid = isTerminateTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_KAFKA_PUBLISH: - valid = isKafkaPublishTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_DO_WHILE: - valid = isDoWhileTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_SUB_WORKFLOW: - valid = isSubWorkflowTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_JSON_JQ_TRANSFORM: - valid = isJSONJQTransformTaskValid(workflowTask, context); - break; - case TaskType.TASK_TYPE_WAIT: - valid = isWaitTaskValid(workflowTask, context); - break; - } - - return valid; - } - - private boolean isEventTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getSink() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "sink", - TaskType.TASK_TYPE_EVENT, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isDecisionTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getCaseValueParam() == null - && workflowTask.getCaseExpression() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "caseValueParam or caseExpression", - TaskType.DECISION, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - if (workflowTask.getDecisionCases() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "decisionCases", - TaskType.DECISION, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } else if ((workflowTask.getDecisionCases() != null - || workflowTask.getCaseExpression() != null) - && (workflowTask.getDecisionCases().size() == 0)) { - String message = - String.format( - "decisionCases should have atleast one task for taskType: %s taskName: %s", - TaskType.DECISION, workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isSwitchTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getEvaluatorType() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "evaluatorType", - TaskType.SWITCH, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } else if (workflowTask.getExpression() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "expression", - TaskType.SWITCH, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - if (workflowTask.getDecisionCases() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "decisionCases", - TaskType.SWITCH, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } else if (workflowTask.getDecisionCases() != null - && workflowTask.getDecisionCases().size() == 0) { - String message = - String.format( - "decisionCases should have atleast one task for taskType: %s taskName: %s", - TaskType.SWITCH, workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isDoWhileTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getLoopCondition() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "loopExpression", - TaskType.DO_WHILE, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - if (workflowTask.getLoopOver() == null || workflowTask.getLoopOver().size() == 0) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "loopover", - TaskType.DO_WHILE, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isDynamicTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getDynamicTaskNameParam() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "dynamicTaskNameParam", - TaskType.DYNAMIC, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - - private boolean isWaitTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - String duration = - Optional.ofNullable(workflowTask.getInputParameters().get(DURATION_INPUT)) - .orElse("") - .toString(); - String until = - Optional.ofNullable(workflowTask.getInputParameters().get(UNTIL_INPUT)) - .orElse("") - .toString(); - - if (StringUtils.isNotBlank(duration) && StringUtils.isNotBlank(until)) { - String message = - "Both 'duration' and 'until' specified. Please provide only one input"; - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - try { - if (StringUtils.isNotBlank(duration)) { - DateTimeUtils.parseDuration(duration); - } else if (StringUtils.isNotBlank(until)) { - DateTimeUtils.parseDate(until); - } - } catch (Exception e) { - String message = "Wait time specified is invalid. The duration must be in "; - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - - private boolean isDynamicForkJoinValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - - // For DYNAMIC_FORK_JOIN_TASK support dynamicForkJoinTasksParam or combination of - // dynamicForkTasksParam and dynamicForkTasksInputParamName. - // Both are not allowed. - if (workflowTask.getDynamicForkJoinTasksParam() != null - && (workflowTask.getDynamicForkTasksParam() != null - || workflowTask.getDynamicForkTasksInputParamName() != null)) { - String message = - String.format( - "dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: %s taskName: %s", - TaskType.FORK_JOIN_DYNAMIC, workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - return false; - } - - if (workflowTask.getDynamicForkJoinTasksParam() != null) { - return valid; - } else { - if (workflowTask.getDynamicForkTasksParam() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "dynamicForkTasksParam", - TaskType.FORK_JOIN_DYNAMIC, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - if (workflowTask.getDynamicForkTasksInputParamName() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "dynamicForkTasksInputParamName", - TaskType.FORK_JOIN_DYNAMIC, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - } - - return valid; - } - - private boolean isHttpTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - boolean isInputParameterSet = false; - boolean isInputTemplateSet = false; - - // Either http_request in WorkflowTask inputParam should be set or in inputTemplate - // Taskdef should be set - if (workflowTask.getInputParameters() != null - && workflowTask.getInputParameters().containsKey("http_request")) { - isInputParameterSet = true; - } - - TaskDef taskDef = - Optional.ofNullable(workflowTask.getTaskDefinition()) - .orElse( - ValidationContext.getMetadataDAO() - .getTaskDef(workflowTask.getName())); - - if (taskDef != null - && taskDef.getInputTemplate() != null - && taskDef.getInputTemplate().containsKey("http_request")) { - isInputTemplateSet = true; - } - - if (!(isInputParameterSet || isInputTemplateSet)) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "inputParameters.http_request", - TaskType.HTTP, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - - private boolean isForkJoinTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - - if (workflowTask.getForkTasks() != null && (workflowTask.getForkTasks().size() == 0)) { - String message = - String.format( - "forkTasks should have atleast one task for taskType: %s taskName: %s", - TaskType.FORK_JOIN, workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - - private boolean isTerminateTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - Object inputStatusParam = - workflowTask.getInputParameters().get(getTerminationStatusParameter()); - if (workflowTask.isOptional()) { - String message = - String.format( - "terminate task cannot be optional, taskName: %s", - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - if (inputStatusParam == null || !validateInputStatus(inputStatusParam.toString())) { - String message = - String.format( - "terminate task must have an %s parameter and must be set to COMPLETED or FAILED, taskName: %s", - getTerminationStatusParameter(), workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isKafkaPublishTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - boolean isInputParameterSet = false; - boolean isInputTemplateSet = false; - - // Either kafka_request in WorkflowTask inputParam should be set or in inputTemplate - // Taskdef should be set - if (workflowTask.getInputParameters() != null - && workflowTask.getInputParameters().containsKey("kafka_request")) { - isInputParameterSet = true; - } - - TaskDef taskDef = - Optional.ofNullable(workflowTask.getTaskDefinition()) - .orElse( - ValidationContext.getMetadataDAO() - .getTaskDef(workflowTask.getName())); - - if (taskDef != null - && taskDef.getInputTemplate() != null - && taskDef.getInputTemplate().containsKey("kafka_request")) { - isInputTemplateSet = true; - } - - if (!(isInputParameterSet || isInputTemplateSet)) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "inputParameters.kafka_request", - TaskType.KAFKA_PUBLISH, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - - private boolean isSubWorkflowTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - if (workflowTask.getSubWorkflowParam() == null) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "subWorkflowParam", - TaskType.SUB_WORKFLOW, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - return valid; - } - - private boolean isJSONJQTransformTaskValid( - WorkflowTask workflowTask, ConstraintValidatorContext context) { - boolean valid = true; - boolean isInputParameterSet = false; - boolean isInputTemplateSet = false; - - // Either queryExpression in WorkflowTask inputParam should be set or in inputTemplate - // Taskdef should be set - if (workflowTask.getInputParameters() != null - && workflowTask.getInputParameters().containsKey("queryExpression")) { - isInputParameterSet = true; - } - - TaskDef taskDef = - Optional.ofNullable(workflowTask.getTaskDefinition()) - .orElse( - ValidationContext.getMetadataDAO() - .getTaskDef(workflowTask.getName())); - - if (taskDef != null - && taskDef.getInputTemplate() != null - && taskDef.getInputTemplate().containsKey("queryExpression")) { - isInputTemplateSet = true; - } - - if (!(isInputParameterSet || isInputTemplateSet)) { - String message = - String.format( - PARAM_REQUIRED_STRING_FORMAT, - "inputParameters.queryExpression", - TaskType.JSON_JQ_TRANSFORM, - workflowTask.getName()); - context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); - valid = false; - } - - return valid; - } - } -} diff --git a/core/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/core/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index b8d811422..000000000 --- a/core/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,139 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.workflow-reconciler.enabled", - "type": "java.lang.Boolean", - "description": "Enables the workflow reconciliation mechanism.", - "sourceType": "com.netflix.conductor.core.reconciliation.WorkflowReconciler", - "defaultValue": true - }, - { - "name": "conductor.sweep-frequency.millis", - "type": "java.lang.Integer", - "description": "The frequency in milliseconds, at which the workflow sweeper should evaluate active workflows.", - "sourceType": "com.netflix.conductor.core.reconciliation.WorkflowReconciler", - "defaultValue": 500 - }, - { - "name": "conductor.workflow-repair-service.enabled", - "type": "java.lang.Boolean", - "description": "Configuration to enable WorkflowRepairService, that tries to keep ExecutionDAO and QueueDAO in sync, based on the task or workflow state. This is disabled by default; To enable, the Queueing layer must implement QueueDAO.containsMessage method.", - "sourceType": "com.netflix.conductor.core.reconciliation.WorkflowRepairService" - }, - { - "name": "conductor.system-task-workers.enabled", - "type": "java.lang.Boolean", - "description": "Configuration to enable SystemTaskWorkerCoordinator, that polls and executes the asynchronous system tasks.", - "sourceType": "com.netflix.conductor.core.execution.tasks.SystemTaskWorkerCoordinator", - "defaultValue": true - }, - { - "name": "conductor.app.isolated-system-task-enabled", - "type": "java.lang.Boolean", - "description": "Used to enable/disable use of isolation groups for system task workers." - }, - { - "name": "conductor.app.isolatedSystemTaskPollIntervalSecs", - "type": "java.lang.Integer", - "description": "The time interval (in seconds) at which new isolated task queues will be polled and added to the system task queue repository." - }, - { - "name": "conductor.app.taskPendingTimeThresholdMins", - "type": "java.lang.Long", - "description": "The time threshold (in minutes) beyond which a warning log will be emitted for a task if it stays in the same state for this duration." - }, - { - "name": "conductor.workflow-monitor.enabled", - "type": "java.lang.Boolean", - "description": "Enables the workflow monitor that publishes workflow and task metrics.", - "defaultValue": "true", - "sourceType": "com.netflix.conductor.metrics.WorkflowMonitor" - }, - { - "name": "conductor.workflow-monitor.stats.initial-delay", - "type": "java.lang.Integer", - "description": "The initial delay (in milliseconds) at which the workflow monitor publishes workflow and task metrics." - }, - { - "name": "conductor.workflow-monitor.metadata-refresh-interval", - "type": "java.lang.Integer", - "description": "The interval (counter) after which the workflow monitor refreshes the metadata definitions from the datastore.", - "defaultValue": "10" - }, - { - "name": "conductor.workflow-monitor.stats.delay", - "type": "java.lang.Integer", - "description": "The delay (in milliseconds) at which the workflow monitor publishes workflow and task metrics." - }, - { - "name": "conductor.external-payload-storage.type", - "type": "java.lang.String", - "description": "The type of payload storage to be used for externalizing large payloads." - }, - { - "name": "conductor.default-event-processor.enabled", - "type": "java.lang.Boolean", - "description": "Enables the default event processor for handling events.", - "sourceType": "com.netflix.conductor.core.events.DefaultEventProcessor", - "defaultValue": "true" - }, - { - "name": "conductor.event-queues.default.enabled", - "type": "java.lang.Boolean", - "description": "Enables the use of the underlying queue implementation to provide queues for consuming events.", - "sourceType": "com.netflix.conductor.core.events.queue.ConductorEventQueueProvider", - "defaultValue": "true" - }, - { - "name": "conductor.default-event-queue-processor.enabled", - "type": "java.lang.Boolean", - "description": "Enables the processor for the default event queues that conductor is configured to listen on.", - "sourceType": "com.netflix.conductor.core.events.queue.DefaultEventQueueProcessor", - "defaultValue": "true" - }, - { - "name": "conductor.workflow-status-listener.type", - "type": "java.lang.String", - "description": "The implementation of the workflow status listener to be used." - }, - { - "name": "conductor.workflow-execution-lock.type", - "type": "java.lang.String", - "description": "The implementation of the workflow execution lock to be used.", - "defaultValue": "noop_lock" - } - ], - "hints": [ - { - "name": "conductor.external-payload-storage.type", - "values": [ - { - "value": "dummy", - "description": "Use the dummy no-op implementation as the external payload storage." - } - ] - }, - { - "name": "conductor.workflow-status-listener.type", - "values": [ - { - "value": "stub", - "description": "Use the no-op implementation of the workflow status listener." - } - ] - }, - { - "name": "conductor.workflow-execution-lock.type", - "values": [ - { - "value": "noop_lock", - "description": "Use the no-op implementation as the lock provider." - }, - { - "value": "local_only", - "description": "Use the local in-memory cache based implementation as the lock provider." - } - ] - } - ] -} diff --git a/core/src/main/resources/META-INF/validation.xml b/core/src/main/resources/META-INF/validation.xml deleted file mode 100644 index 4c8ec2ce9..000000000 --- a/core/src/main/resources/META-INF/validation.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - META-INF/validation/constraints.xml - - \ No newline at end of file diff --git a/core/src/main/resources/META-INF/validation/constraints.xml b/core/src/main/resources/META-INF/validation/constraints.xml deleted file mode 100644 index e23c210e9..000000000 --- a/core/src/main/resources/META-INF/validation/constraints.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - com.netflix.conductor.common.metadata.workflow - - - - - - - \ No newline at end of file diff --git a/core/src/test/groovy/com/netflix/conductor/core/execution/AsyncSystemTaskExecutorTest.groovy b/core/src/test/groovy/com/netflix/conductor/core/execution/AsyncSystemTaskExecutorTest.groovy deleted file mode 100644 index 2cc4a00bb..000000000 --- a/core/src/test/groovy/com/netflix/conductor/core/execution/AsyncSystemTaskExecutorTest.groovy +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution - -import java.time.Duration - -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.core.config.ConductorProperties -import com.netflix.conductor.core.dal.ExecutionDAOFacade -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask -import com.netflix.conductor.core.utils.IDGenerator -import com.netflix.conductor.core.utils.QueueUtils -import com.netflix.conductor.dao.MetadataDAO -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.model.WorkflowModel - -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification -import spock.lang.Subject - -import static com.netflix.conductor.common.metadata.tasks.TaskType.SUB_WORKFLOW - -class AsyncSystemTaskExecutorTest extends Specification { - - ExecutionDAOFacade executionDAOFacade - QueueDAO queueDAO - MetadataDAO metadataDAO - WorkflowExecutor workflowExecutor - - @Subject - AsyncSystemTaskExecutor executor - - WorkflowSystemTask workflowSystemTask - ConductorProperties properties = new ConductorProperties() - - def setup() { - executionDAOFacade = Mock(ExecutionDAOFacade.class) - queueDAO = Mock(QueueDAO.class) - metadataDAO = Mock(MetadataDAO.class) - workflowExecutor = Mock(WorkflowExecutor.class) - - workflowSystemTask = Mock(WorkflowSystemTask.class) - - properties.taskExecutionPostponeDuration = Duration.ofSeconds(1) - properties.systemTaskWorkerCallbackDuration = Duration.ofSeconds(1) - - executor = new AsyncSystemTaskExecutor(executionDAOFacade, queueDAO, metadataDAO, properties, workflowExecutor) - } - - // this is not strictly a unit test, but its essential to test AsyncSystemTaskExecutor with SubWorkflow - def "Execute SubWorkflow task"() { - given: - String workflowId = "workflowId" - String subWorkflowId = "subWorkflowId" - SubWorkflow subWorkflowTask = new SubWorkflow(new ObjectMapper()) - - String task1Id = new IDGenerator().generate() - TaskModel task1 = new TaskModel() - task1.setTaskType(SUB_WORKFLOW.name()) - task1.setReferenceTaskName("waitTask") - task1.setWorkflowInstanceId(workflowId) - task1.setScheduledTime(System.currentTimeMillis()) - task1.setTaskId(task1Id) - task1.getInputData().put("asyncComplete", true) - task1.getInputData().put("subWorkflowName", "junit1") - task1.getInputData().put("subWorkflowVersion", 1) - task1.setStatus(TaskModel.Status.SCHEDULED) - - String queueName = QueueUtils.getQueueName(task1) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - WorkflowModel subWorkflow = new WorkflowModel(workflowId: subWorkflowId, status: WorkflowModel.Status.RUNNING) - - when: - executor.execute(subWorkflowTask, task1Id) - - then: - 1 * executionDAOFacade.getTaskModel(task1Id) >> task1 - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * workflowExecutor.startWorkflow(*_) >> subWorkflowId - 1 * workflowExecutor.getWorkflow(subWorkflowId, false) >> subWorkflow - - // SUB_WORKFLOW is asyncComplete so its removed from the queue - 1 * queueDAO.remove(queueName, task1Id) - - task1.status == TaskModel.Status.IN_PROGRESS - task1.subWorkflowId == subWorkflowId - task1.startTime != 0 - } - - def "Execute with a non-existing task id"() { - given: - String taskId = "taskId" - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> null - 0 * workflowSystemTask.start(*_) - 0 * executionDAOFacade.updateTask(_) - } - - def "Execute with a task id that fails to load"() { - given: - String taskId = "taskId" - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> { throw new RuntimeException("datastore unavailable") } - 0 * workflowSystemTask.start(*_) - 0 * executionDAOFacade.updateTask(_) - } - - def "Execute with a task id that is in terminal state"() { - given: - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.COMPLETED, taskId: taskId) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * queueDAO.remove(task.taskType, taskId) - 0 * workflowSystemTask.start(*_) - 0 * executionDAOFacade.updateTask(_) - } - - def "Execute with a task id that is part of a workflow in terminal state"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.COMPLETED) - String queueName = QueueUtils.getQueueName(task) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * queueDAO.remove(queueName, taskId) - - task.status == TaskModel.Status.CANCELED - task.startTime == 0 - } - - def "Execute with a task id that exceeds in-progress limit"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - workflowPriority: 10) - String queueName = QueueUtils.getQueueName(task) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.exceedsInProgressLimit(task) >> true - 1 * queueDAO.postpone(queueName, taskId, task.workflowPriority, properties.taskExecutionPostponeDuration.seconds) - - task.status == TaskModel.Status.SCHEDULED - task.startTime == 0 - } - - def "Execute with a task id that is rate limited"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - rateLimitPerFrequency: 1, taskDefName: "taskDefName", workflowPriority: 10) - String queueName = QueueUtils.getQueueName(task) - TaskDef taskDef = new TaskDef() - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * metadataDAO.getTaskDef(task.taskDefName) >> taskDef - 1 * executionDAOFacade.exceedsRateLimitPerFrequency(task, taskDef) >> taskDef - 1 * queueDAO.postpone(queueName, taskId, task.workflowPriority, properties.taskExecutionPostponeDuration.seconds) - - task.status == TaskModel.Status.SCHEDULED - task.startTime == 0 - } - - def "Execute with a task id that is rate limited but postpone fails"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - rateLimitPerFrequency: 1, taskDefName: "taskDefName", workflowPriority: 10) - String queueName = QueueUtils.getQueueName(task) - TaskDef taskDef = new TaskDef() - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * metadataDAO.getTaskDef(task.taskDefName) >> taskDef - 1 * executionDAOFacade.exceedsRateLimitPerFrequency(task, taskDef) >> taskDef - 1 * queueDAO.postpone(queueName, taskId, task.workflowPriority, properties.taskExecutionPostponeDuration.seconds) >> { throw new RuntimeException("queue unavailable") } - - task.status == TaskModel.Status.SCHEDULED - task.startTime == 0 - } - - def "Execute with a task id that is in SCHEDULED state"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - taskDefName: "taskDefName", workflowPriority: 10) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - String queueName = QueueUtils.getQueueName(task) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) - 1 * queueDAO.postpone(queueName, taskId, task.workflowPriority, properties.systemTaskWorkerCallbackDuration.seconds) - 1 * workflowSystemTask.start(workflow, task, workflowExecutor) >> { task.status = TaskModel.Status.IN_PROGRESS } - - 0 * workflowExecutor.decide(workflowId) // verify that workflow is NOT decided - - task.status == TaskModel.Status.IN_PROGRESS - task.startTime != 0 // verify that startTime is set - task.endTime == 0 // verify that endTime is not set - task.pollCount == 1 // verify that poll count is incremented - task.callbackAfterSeconds == properties.systemTaskWorkerCallbackDuration.seconds - } - - def "Execute with a task id that is in SCHEDULED state and WorkflowSystemTask.start sets the task in a terminal state"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - taskDefName: "taskDefName", workflowPriority: 10) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - String queueName = QueueUtils.getQueueName(task) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) - - 1 * workflowSystemTask.start(workflow, task, workflowExecutor) >> { task.status = TaskModel.Status.COMPLETED } - 1 * queueDAO.remove(queueName, taskId) - 1 * workflowExecutor.decide(workflowId) // verify that workflow is decided - - task.status == TaskModel.Status.COMPLETED - task.startTime != 0 // verify that startTime is set - task.endTime != 0 // verify that endTime is set - task.pollCount == 1 // verify that poll count is incremented - } - - def "Execute with a task id that is in SCHEDULED state but WorkflowSystemTask.start fails"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - taskDefName: "taskDefName", workflowPriority: 10) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) - - // simulating a "start" failure that happens after the Task object is modified - // the modification will be persisted - 1 * workflowSystemTask.start(workflow, task, workflowExecutor) >> { - task.status = TaskModel.Status.IN_PROGRESS - throw new RuntimeException("unknown system task failure") - } - - 0 * workflowExecutor.decide(workflowId) // verify that workflow is NOT decided - - task.status == TaskModel.Status.IN_PROGRESS - task.startTime != 0 // verify that startTime is set - task.endTime == 0 // verify that endTime is not set - task.pollCount == 1 // verify that poll count is incremented - } - - def "Execute with a task id that is in SCHEDULED state and is set to asyncComplete"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.SCHEDULED, taskId: taskId, workflowInstanceId: workflowId, - taskDefName: "taskDefName", workflowPriority: 10) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - String queueName = QueueUtils.getQueueName(task) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) // 1st call for pollCount, 2nd call for status update - - 1 * workflowSystemTask.isAsyncComplete(task) >> true - 1 * workflowSystemTask.start(workflow, task, workflowExecutor) >> { task.status = TaskModel.Status.IN_PROGRESS } - 1 * queueDAO.remove(queueName, taskId) - - 1 * workflowExecutor.decide(workflowId) // verify that workflow is decided - - task.status == TaskModel.Status.IN_PROGRESS - task.startTime != 0 // verify that startTime is set - task.endTime == 0 // verify that endTime is not set - task.pollCount == 1 // verify that poll count is incremented - } - - def "Execute with a task id that is in IN_PROGRESS state"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.IN_PROGRESS, taskId: taskId, workflowInstanceId: workflowId, - rateLimitPerFrequency: 1, taskDefName: "taskDefName", workflowPriority: 10, pollCount: 1) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) // 1st call for pollCount, 2nd call for status update - - 0 * workflowSystemTask.start(workflow, task, workflowExecutor) - 1 * workflowSystemTask.execute(workflow, task, workflowExecutor) - - task.status == TaskModel.Status.IN_PROGRESS - task.endTime == 0 // verify that endTime is not set - task.pollCount == 2 // verify that poll count is incremented - } - - def "Execute with a task id that is in IN_PROGRESS state and is set to asyncComplete"() { - given: - String workflowId = "workflowId" - String taskId = "taskId" - TaskModel task = new TaskModel(taskType: "type1", status: TaskModel.Status.IN_PROGRESS, taskId: taskId, workflowInstanceId: workflowId, - rateLimitPerFrequency: 1, taskDefName: "taskDefName", workflowPriority: 10, pollCount: 1) - WorkflowModel workflow = new WorkflowModel(workflowId: workflowId, status: WorkflowModel.Status.RUNNING) - - when: - executor.execute(workflowSystemTask, taskId) - - then: - 1 * executionDAOFacade.getTaskModel(taskId) >> task - 1 * executionDAOFacade.getWorkflowModel(workflowId, true) >> workflow - 1 * executionDAOFacade.updateTask(task) // only one call since pollCount is not incremented - - 1 * workflowSystemTask.isAsyncComplete(task) >> true - 0 * workflowSystemTask.start(workflow, task, workflowExecutor) - 1 * workflowSystemTask.execute(workflow, task, workflowExecutor) - - task.status == TaskModel.Status.IN_PROGRESS - task.endTime == 0 // verify that endTime is not set - task.pollCount == 1 // verify that poll count is NOT incremented - } - -} diff --git a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/DoWhileSpec.groovy b/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/DoWhileSpec.groovy deleted file mode 100644 index cc4d658d5..000000000 --- a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/DoWhileSpec.groovy +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.common.utils.TaskUtils -import com.netflix.conductor.core.exception.TerminateWorkflowException -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.core.utils.ParametersUtils -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.model.WorkflowModel - -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification -import spock.lang.Subject - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HTTP - -class DoWhileSpec extends Specification { - - @Subject - DoWhile doWhile - - ParametersUtils parametersUtils - WorkflowExecutor workflowExecutor - TaskModel doWhileTaskModel - - WorkflowTask task1, task2 - TaskModel taskModel1, taskModel2 - - def setup() { - workflowExecutor = Mock(WorkflowExecutor.class) - parametersUtils = new ParametersUtils(new ObjectMapper()) - - task1 = new WorkflowTask(name: 'task1', taskReferenceName: 'task1') - task2 = new WorkflowTask(name: 'task2', taskReferenceName: 'task2') - - doWhile = new DoWhile(parametersUtils) - } - - def "first iteration"() { - given: - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - doWhileWorkflowTask.loopCondition = "if (\$.doWhileTask['iteration'] < 1) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - - def workflowModel = new WorkflowModel() - workflowModel.tasks = [doWhileTaskModel] - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that return value is true, iteration value is updated in DO_WHILE TaskModel" - retVal - - and: "verify the iteration value" - doWhileTaskModel.iteration == 1 - doWhileTaskModel.outputData['iteration'] == 1 - - and: "verify whether the first task is scheduled" - 1 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - } - - def "next iteration - one iteration of all tasks inside DO_WHILE are complete"() { - given: "WorkflowModel consists of one iteration of tasks inside DO_WHILE already completed" - taskModel1 = createTaskModel(task1) - taskModel2 = createTaskModel(task2) - - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - doWhileWorkflowTask.loopCondition = "if (\$.doWhileTask['iteration'] < 2) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - def workflowModel = new WorkflowModel(workflowDefinition: new WorkflowDef(name: 'test_workflow')) - // setup the WorkflowModel - workflowModel.tasks = [doWhileTaskModel, taskModel1, taskModel2] - - // this is the expected format of iteration 1's output data - def iteration1OutputData = [:] - iteration1OutputData[task1.taskReferenceName] = taskModel1.outputData - iteration1OutputData[task2.taskReferenceName] = taskModel2.outputData - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that the return value is true, since the iteration is updated" - retVal - - and: "verify that the DO_WHILE TaskModel is correct" - doWhileTaskModel.iteration == 2 - doWhileTaskModel.outputData['iteration'] == 2 - doWhileTaskModel.outputData['1'] == iteration1OutputData - doWhileTaskModel.status == TaskModel.Status.IN_PROGRESS - - and: "verify whether the first task in the next iteration is scheduled" - 1 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - - and: "verify that WorkflowExecutor.getTaskDefinition throws TerminateWorkflowException, execute method is not impacted" - 1 * workflowExecutor.getTaskDefinition(doWhileTaskModel) >> { throw new TerminateWorkflowException("") } - } - - def "next iteration - a task failed in the previous iteration"() { - given: "WorkflowModel consists of one iteration of tasks one of which is FAILED" - taskModel1 = createTaskModel(task1) - - taskModel2 = createTaskModel(task2, TaskModel.Status.FAILED) - taskModel2.reasonForIncompletion = 'no specific reason, i am tired of success' - - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - doWhileWorkflowTask.loopCondition = "if (\$.doWhileTask['iteration'] < 2) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - def workflowModel = new WorkflowModel(workflowDefinition: new WorkflowDef(name: 'test_workflow')) - // setup the WorkflowModel - workflowModel.tasks = [doWhileTaskModel, taskModel1, taskModel2] - - // this is the expected format of iteration 1's output data - def iteration1OutputData = [:] - iteration1OutputData[task1.taskReferenceName] = taskModel1.outputData - iteration1OutputData[task2.taskReferenceName] = taskModel2.outputData - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that return value is true, status is updated" - retVal - - and: "verify the status and reasonForIncompletion fields" - doWhileTaskModel.iteration == 1 - doWhileTaskModel.outputData['iteration'] == 1 - doWhileTaskModel.outputData['1'] == iteration1OutputData - doWhileTaskModel.status == TaskModel.Status.FAILED - doWhileTaskModel.reasonForIncompletion && doWhileTaskModel.reasonForIncompletion.contains(taskModel2.reasonForIncompletion) - - and: "verify that next iteration is NOT scheduled" - 0 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - } - - def "next iteration - a task is in progress in the previous iteration"() { - given: "WorkflowModel consists of one iteration of tasks inside DO_WHILE already completed" - taskModel1 = createTaskModel(task1) - taskModel2 = createTaskModel(task2, TaskModel.Status.IN_PROGRESS) - taskModel2.outputData = [:] // no output data, task is in progress - - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - doWhileWorkflowTask.loopCondition = "if (\$.doWhileTask['iteration'] < 2) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - def workflowModel = new WorkflowModel(workflowDefinition: new WorkflowDef(name: 'test_workflow')) - // setup the WorkflowModel - workflowModel.tasks = [doWhileTaskModel, taskModel1, taskModel2] - - // this is the expected format of iteration 1's output data - def iteration1OutputData = [:] - iteration1OutputData[task1.taskReferenceName] = taskModel1.outputData - iteration1OutputData[task2.taskReferenceName] = [:] - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that return value is false, since the DO_WHILE task model is not updated" - !retVal - - and: "verify that DO_WHILE task model is not modified" - doWhileTaskModel.iteration == 1 - doWhileTaskModel.outputData['iteration'] == 1 - doWhileTaskModel.outputData['1'] == iteration1OutputData - doWhileTaskModel.status == TaskModel.Status.IN_PROGRESS - - and: "verify that next iteration is NOT scheduled" - 0 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - } - - def "final step - all iterations are complete and all tasks in them are successful"() { - given: "WorkflowModel consists of one iteration of tasks inside DO_WHILE already completed" - taskModel1 = createTaskModel(task1) - taskModel2 = createTaskModel(task2) - - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - doWhileWorkflowTask.loopCondition = "if (\$.doWhileTask['iteration'] < 1) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - def workflowModel = new WorkflowModel(workflowDefinition: new WorkflowDef(name: 'test_workflow')) - // setup the WorkflowModel - workflowModel.tasks = [doWhileTaskModel, taskModel1, taskModel2] - - // this is the expected format of iteration 1's output data - def iteration1OutputData = [:] - iteration1OutputData[task1.taskReferenceName] = taskModel1.outputData - iteration1OutputData[task2.taskReferenceName] = taskModel2.outputData - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that the return value is true, DO_WHILE TaskModel is updated" - retVal - - and: "verify the status and other fields are set correctly" - doWhileTaskModel.iteration == 1 - doWhileTaskModel.outputData['iteration'] == 1 - doWhileTaskModel.outputData['1'] == iteration1OutputData - doWhileTaskModel.status == TaskModel.Status.COMPLETED - - and: "verify that next iteration is not scheduled" - 0 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - } - - def "next iteration - one iteration of all tasks inside DO_WHILE are complete, but the condition is incorrect"() { - given: "WorkflowModel consists of one iteration of tasks inside DO_WHILE already completed" - taskModel1 = createTaskModel(task1) - taskModel2 = createTaskModel(task2) - - WorkflowTask doWhileWorkflowTask = new WorkflowTask(taskReferenceName: 'doWhileTask', type: TASK_TYPE_DO_WHILE) - // condition will produce a ScriptException - doWhileWorkflowTask.loopCondition = "if (dollar_sign_goes_here.doWhileTask['iteration'] < 2) { true; } else { false; }" - doWhileWorkflowTask.loopOver = [task1, task2] - - doWhileTaskModel = new TaskModel(workflowTask: doWhileWorkflowTask, taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE, referenceTaskName: doWhileWorkflowTask.taskReferenceName) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - def workflowModel = new WorkflowModel(workflowDefinition: new WorkflowDef(name: 'test_workflow')) - // setup the WorkflowModel - workflowModel.tasks = [doWhileTaskModel, taskModel1, taskModel2] - - // this is the expected format of iteration 1's output data - def iteration1OutputData = [:] - iteration1OutputData[task1.taskReferenceName] = taskModel1.outputData - iteration1OutputData[task2.taskReferenceName] = taskModel2.outputData - - when: - def retVal = doWhile.execute(workflowModel, doWhileTaskModel, workflowExecutor) - - then: "verify that the return value is true since DO_WHILE TaskModel is updated" - retVal - - and: "verify the status of DO_WHILE TaskModel" - doWhileTaskModel.iteration == 1 - doWhileTaskModel.outputData['iteration'] == 1 - doWhileTaskModel.outputData['1'] == iteration1OutputData - doWhileTaskModel.status == TaskModel.Status.FAILED_WITH_TERMINAL_ERROR - doWhileTaskModel.reasonForIncompletion != null - - and: "verify that next iteration is not scheduled" - 0 * workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflowModel) - } - - def "cancel sets the status as CANCELED"() { - given: - doWhileTaskModel = new TaskModel(taskId: UUID.randomUUID().toString(), - taskType: TASK_TYPE_DO_WHILE) - doWhileTaskModel.iteration = 1 - doWhileTaskModel.outputData['iteration'] = 1 - doWhileTaskModel.status = TaskModel.Status.IN_PROGRESS - - when: "cancel is called with null for WorkflowModel and WorkflowExecutor" - // null is used to note that those arguments are not intended to be used by this method - doWhile.cancel(null, doWhileTaskModel, null) - - then: - doWhileTaskModel.status == TaskModel.Status.CANCELED - } - - private static createTaskModel(WorkflowTask workflowTask, TaskModel.Status status = TaskModel.Status.COMPLETED, int iteration = 1) { - TaskModel taskModel1 = new TaskModel(workflowTask: workflowTask, taskType: TASK_TYPE_HTTP) - - taskModel1.status = status - taskModel1.outputData = ['k1': 'v1'] - taskModel1.iteration = iteration - taskModel1.referenceTaskName = TaskUtils.appendIteration(workflowTask.taskReferenceName, iteration) - - return taskModel1 - } -} diff --git a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/EventSpec.groovy b/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/EventSpec.groovy deleted file mode 100644 index a8a4451e6..000000000 --- a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/EventSpec.groovy +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.core.events.EventQueues -import com.netflix.conductor.core.events.queue.Message -import com.netflix.conductor.core.events.queue.ObservableQueue -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.core.utils.ParametersUtils -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.model.WorkflowModel - -import com.fasterxml.jackson.core.JsonParseException -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification -import spock.lang.Subject - -class EventSpec extends Specification { - - EventQueues eventQueues - ParametersUtils parametersUtils - ObjectMapper objectMapper - ObservableQueue observableQueue - - String payloadJSON = "payloadJSON" - WorkflowDef testWorkflowDefinition - WorkflowModel workflow - - @Subject - Event event - - def setup() { - parametersUtils = Mock(ParametersUtils.class) - eventQueues = Mock(EventQueues.class) - observableQueue = Mock(ObservableQueue.class) - objectMapper = Mock(ObjectMapper.class) { - writeValueAsString(_) >> payloadJSON - } - - testWorkflowDefinition = new WorkflowDef(name: "testWorkflow", version: 2) - workflow = new WorkflowModel(workflowDefinition: testWorkflowDefinition, workflowId: 'workflowId', correlationId: 'corrId') - - event = new Event(eventQueues, parametersUtils, objectMapper) - } - - def "verify that event task is NOT async"() { - when: - def async = event.isAsync() - - then: - !async - } - - def "event cancel calls ack on the queue"() { - given: - // status is intentionally left as null - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': 'conductor']) - - String queueName = "conductor:${workflow.workflowName}:${task.referenceTaskName}" - - when: - event.cancel(workflow, task, null) - - then: - task.status == null // task status is NOT updated by the cancel method - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': 'conductor'] - 1 * eventQueues.getQueue(queueName) >> observableQueue - // Event.cancel sends a list with one Message object to ack - 1 * observableQueue.ack({it.size() == 1}) - } - - def "event task with 'conductor' as sink"() { - given: - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': 'conductor']) - - String queueName = "conductor:${workflow.workflowName}:${task.referenceTaskName}" - Message expectedMessage - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.COMPLETED - verifyOutputData(task, queueName) - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': 'conductor'] - 1 * eventQueues.getQueue(queueName) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish({ it.size() == 1 }) >> { it -> expectedMessage = it[0][0] as Message } - - verifyMessage(expectedMessage, task) - } - - def "event task with 'conductor:' as sink"() { - given: - String eventName = 'testEvent' - String sinkValue = "conductor:$eventName".toString() - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': sinkValue]) - - String queueName = "conductor:${workflow.workflowName}:$eventName" - Message expectedMessage - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.COMPLETED - verifyOutputData(task, queueName) - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(queueName) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish({ it.size() == 1 }) >> { it -> expectedMessage = it[0][0] as Message } - - verifyMessage(expectedMessage, task) - } - - def "event task with 'sqs' as sink"() { - given: - String eventName = 'testEvent' - String sinkValue = "sqs:$eventName".toString() - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': sinkValue]) - - // for non conductor queues, queueName is the same as the value of the 'sink' field in the inputData - String queueName = sinkValue - Message expectedMessage - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.COMPLETED - verifyOutputData(task, queueName) - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(queueName) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish({ it.size() == 1 }) >> { it -> expectedMessage = it[0][0] as Message } - - verifyMessage(expectedMessage, task) - } - - def "event task with 'conductor' as sink and async complete"() { - given: - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': 'conductor', 'asyncComplete': true]) - - String queueName = "conductor:${workflow.workflowName}:${task.referenceTaskName}" - Message expectedMessage - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.IN_PROGRESS - verifyOutputData(task, queueName) - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': 'conductor'] - 1 * eventQueues.getQueue(queueName) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish({ it.size() == 1 }) >> { args -> expectedMessage = args[0][0] as Message } - - verifyMessage(expectedMessage, task) - } - - def "event task with incorrect 'conductor' sink value"() { - given: - String sinkValue = 'conductorinvalidsink' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': sinkValue]) - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.FAILED - task.reasonForIncompletion != null - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - } - - def "event task with sink value that does not resolve to a queue"() { - given: - String sinkValue = 'rabbitmq:abc_123' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', inputData: ['sink': sinkValue]) - - // for non conductor queues, queueName is the same as the value of the 'sink' field in the inputData - String queueName = sinkValue - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.FAILED - task.reasonForIncompletion != null - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(queueName) >> {throw new IllegalArgumentException() } - } - - def "publishing to a queue throws a retryable ApplicationException"() { - given: - String sinkValue = 'conductor' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', status: TaskModel.Status.SCHEDULED, inputData: ['sink': sinkValue]) - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.SCHEDULED - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(_) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish(_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "transient error") } - } - - def "publishing to a queue throws a non-retryable ApplicationException"() { - given: - String sinkValue = 'conductor' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', status: TaskModel.Status.SCHEDULED, inputData: ['sink': sinkValue]) - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.FAILED - task.reasonForIncompletion != null - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(_) >> observableQueue - // capture the Message object sent to the publish method. Event.start sends a list with one Message object - 1 * observableQueue.publish(_) >> { throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, "fatal error") } - } - - def "event task fails to convert the payload to json"() { - given: - String sinkValue = 'conductor' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', status: TaskModel.Status.SCHEDULED, inputData: ['sink': sinkValue]) - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.FAILED - task.reasonForIncompletion != null - - 1 * objectMapper.writeValueAsString(_ as Map) >> { throw new JsonParseException(null, "invalid json") } - } - - def "event task fails with an unexpected exception"() { - given: - String sinkValue = 'conductor' - - TaskModel task = new TaskModel(referenceTaskName: 'task0', taskId: 'task_id_0', status: TaskModel.Status.SCHEDULED, inputData: ['sink': sinkValue]) - - when: - event.start(workflow, task, null) - - then: - task.status == TaskModel.Status.FAILED - task.reasonForIncompletion != null - - 1 * parametersUtils.getTaskInputV2(_, workflow, task.taskId, _) >> ['sink': sinkValue] - 1 * eventQueues.getQueue(_) >> { throw new NullPointerException("some object is null") } - } - - private void verifyOutputData(TaskModel task, String queueName) { - assert task.outputData != null - assert task.outputData['event_produced'] == queueName - assert task.outputData['workflowInstanceId'] == workflow.workflowId - assert task.outputData['workflowVersion'] == workflow.workflowVersion - assert task.outputData['workflowType'] == workflow.workflowName - assert task.outputData['correlationId'] == workflow.correlationId - } - - private void verifyMessage(Message expectedMessage, TaskModel task) { - assert expectedMessage != null - assert expectedMessage.id == task.taskId - assert expectedMessage.receipt == task.taskId - assert expectedMessage.payload == payloadJSON - } -} diff --git a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducerSpec.groovy b/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducerSpec.groovy deleted file mode 100644 index 3673c0091..000000000 --- a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducerSpec.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks - -import java.time.Duration - -import org.junit.Test - -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.service.MetadataService - -import spock.lang.Specification -import spock.lang.Subject - -class IsolatedTaskQueueProducerSpec extends Specification { - - SystemTaskWorker systemTaskWorker - MetadataService metadataService - - @Subject - IsolatedTaskQueueProducer isolatedTaskQueueProducer - - def asyncSystemTask = new WorkflowSystemTask("asyncTask") { - @Override - boolean isAsync() { - return true - } - } - - def setup() { - systemTaskWorker = Mock(SystemTaskWorker.class) - metadataService = Mock(MetadataService.class) - - isolatedTaskQueueProducer = new IsolatedTaskQueueProducer(metadataService, [asyncSystemTask] as Set, systemTaskWorker, false, - Duration.ofSeconds(10)) - } - - @Test - def "addTaskQueuesAddsElementToQueue"() { - given: - TaskDef taskDef = new TaskDef(isolationGroupId: "isolated") - - when: - isolatedTaskQueueProducer.addTaskQueues() - - then: - 1 * systemTaskWorker.startPolling(asyncSystemTask, "${asyncSystemTask.taskType}-${taskDef.isolationGroupId}") - 1 * metadataService.getTaskDefs() >> Collections.singletonList(taskDef) - } -} diff --git a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/StartWorkflowSpec.groovy b/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/StartWorkflowSpec.groovy deleted file mode 100644 index 7ef18baae..000000000 --- a/core/src/test/groovy/com/netflix/conductor/core/execution/tasks/StartWorkflowSpec.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks - - -import javax.validation.ConstraintViolation -import javax.validation.Validator - -import com.netflix.conductor.common.config.ObjectMapperProvider -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.model.WorkflowModel - -import spock.lang.Specification -import spock.lang.Subject - -import static com.netflix.conductor.core.execution.tasks.StartWorkflow.START_WORKFLOW_PARAMETER -import static com.netflix.conductor.model.TaskModel.Status.FAILED -import static com.netflix.conductor.model.TaskModel.Status.SCHEDULED - -/** - * Unit test for StartWorkflow. Success and Javax validation cases are covered by the StartWorkflowSpec in test-harness module. - */ -class StartWorkflowSpec extends Specification { - - @Subject - StartWorkflow startWorkflow - - WorkflowExecutor workflowExecutor - Validator validator - WorkflowModel workflowModel - TaskModel taskModel - - def setup() { - workflowExecutor = Mock(WorkflowExecutor.class) - validator = Mock(Validator.class) { - validate(_) >> new HashSet>() - } - - def inputData = [:] - inputData[START_WORKFLOW_PARAMETER] = ['name': 'some_workflow'] - taskModel = new TaskModel(status: SCHEDULED, inputData: inputData) - workflowModel = new WorkflowModel() - - startWorkflow = new StartWorkflow(new ObjectMapperProvider().getObjectMapper(), validator) - } - - def "StartWorkflow task is asynchronous"() { - expect: - startWorkflow.isAsync() - } - - def "startWorkflow parameter is missing"() { - given: "a task with no start_workflow in input" - taskModel.inputData = [:] - - when: - startWorkflow.start(workflowModel, taskModel, workflowExecutor) - - then: - taskModel.status == FAILED - taskModel.reasonForIncompletion != null - } - - def "ObjectMapper throws an IllegalArgumentException"() { - given: "a task with no start_workflow in input" - taskModel.inputData[START_WORKFLOW_PARAMETER] = "I can't be converted to StartWorkflowRequest" - - when: - startWorkflow.start(workflowModel, taskModel, workflowExecutor) - - then: - taskModel.status == FAILED - taskModel.reasonForIncompletion != null - } - - def "WorkflowExecutor throws a retryable exception"() { - when: - startWorkflow.start(workflowModel, taskModel, workflowExecutor) - - then: - taskModel.status == SCHEDULED - 1 * workflowExecutor.startWorkflow(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "") } - } - - def "WorkflowExecutor throws a non-retryable ApplicationException"() { - when: - startWorkflow.start(workflowModel, taskModel, workflowExecutor) - - then: - taskModel.status == FAILED - taskModel.reasonForIncompletion != null - 1 * workflowExecutor.startWorkflow(*_) >> { throw new ApplicationException(ApplicationException.Code.NOT_FOUND, "") } - } - - def "WorkflowExecutor throws a RuntimeException"() { - when: - startWorkflow.start(workflowModel, taskModel, workflowExecutor) - - then: - taskModel.status == FAILED - taskModel.reasonForIncompletion != null - 1 * workflowExecutor.startWorkflow(*_) >> { throw new RuntimeException("I am an unexpected exception") } - } -} diff --git a/core/src/test/groovy/com/netflix/conductor/model/TaskModelSpec.groovy b/core/src/test/groovy/com/netflix/conductor/model/TaskModelSpec.groovy deleted file mode 100644 index c95f8edb6..000000000 --- a/core/src/test/groovy/com/netflix/conductor/model/TaskModelSpec.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.model - -import com.netflix.conductor.common.config.ObjectMapperProvider - -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification -import spock.lang.Subject - -class TaskModelSpec extends Specification { - - @Subject - TaskModel taskModel - - private static final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper() - - def setup() { - taskModel = new TaskModel() - } - - def "check inputData serialization"() { - given: - String path = "task/input/${UUID.randomUUID()}.json" - taskModel.addInput(['key1': 'value1', 'key2': 'value2']) - taskModel.externalizeInput(path) - - when: - def json = objectMapper.writeValueAsString(taskModel) - println(json) - - then: - json != null - JsonNode node = objectMapper.readTree(json) - node.path("inputData").isEmpty() - node.path("externalInputPayloadStoragePath").isTextual() - } - - def "check outputData serialization"() { - given: - String path = "task/output/${UUID.randomUUID()}.json" - taskModel.addOutput(['key1': 'value1', 'key2': 'value2']) - taskModel.externalizeOutput(path) - - when: - def json = objectMapper.writeValueAsString(taskModel) - println(json) - - then: - json != null - JsonNode node = objectMapper.readTree(json) - node.path("outputData").isEmpty() - node.path("externalOutputPayloadStoragePath").isTextual() - } -} diff --git a/core/src/test/groovy/com/netflix/conductor/model/WorkflowModelSpec.groovy b/core/src/test/groovy/com/netflix/conductor/model/WorkflowModelSpec.groovy deleted file mode 100644 index 3b7763a23..000000000 --- a/core/src/test/groovy/com/netflix/conductor/model/WorkflowModelSpec.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.model - -import com.netflix.conductor.common.config.ObjectMapperProvider -import com.netflix.conductor.common.metadata.workflow.WorkflowDef - -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper -import spock.lang.Specification -import spock.lang.Subject - -class WorkflowModelSpec extends Specification { - - @Subject - WorkflowModel workflowModel - - private static final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper() - - def setup() { - def workflowDef = new WorkflowDef(name: "test def name", version: 1) - workflowModel = new WorkflowModel(workflowDefinition: workflowDef) - } - - def "check input serialization"() { - given: - String path = "task/input/${UUID.randomUUID()}.json" - workflowModel.input = ['key1': 'value1', 'key2': 'value2'] - workflowModel.externalizeInput(path) - - when: - def json = objectMapper.writeValueAsString(workflowModel) - println(json) - - then: - json != null - JsonNode node = objectMapper.readTree(json) - node.path("input").isEmpty() - node.path("externalInputPayloadStoragePath").isTextual() - } - - def "check output serialization"() { - given: - String path = "task/output/${UUID.randomUUID()}.json" - workflowModel.output = ['key1': 'value1', 'key2': 'value2'] - workflowModel.externalizeOutput(path) - - when: - def json = objectMapper.writeValueAsString(workflowModel) - println(json) - - then: - json != null - JsonNode node = objectMapper.readTree(json) - node.path("output").isEmpty() - node.path("externalOutputPayloadStoragePath").isTextual() - } -} diff --git a/core/src/test/java/com/netflix/conductor/TestUtils.java b/core/src/test/java/com/netflix/conductor/TestUtils.java deleted file mode 100644 index 41f1377e4..000000000 --- a/core/src/test/java/com/netflix/conductor/TestUtils.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor; - -import java.util.HashSet; -import java.util.Set; -import java.util.stream.Collectors; - -import javax.validation.ConstraintViolation; - -public class TestUtils { - - public static Set getConstraintViolationMessages( - Set> constraintViolations) { - Set messages = new HashSet<>(constraintViolations.size()); - messages.addAll( - constraintViolations.stream() - .map(ConstraintViolation::getMessage) - .collect(Collectors.toList())); - return messages; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/dal/ExecutionDAOFacadeTest.java b/core/src/test/java/com/netflix/conductor/core/dal/ExecutionDAOFacadeTest.java deleted file mode 100644 index 80a266acc..000000000 --- a/core/src/test/java/com/netflix/conductor/core/dal/ExecutionDAOFacadeTest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.dal; - -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.TestDeciderService; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.dao.*; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class ExecutionDAOFacadeTest { - - private ExecutionDAO executionDAO; - private IndexDAO indexDAO; - private ExecutionDAOFacade executionDAOFacade; - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setUp() { - executionDAO = mock(ExecutionDAO.class); - QueueDAO queueDAO = mock(QueueDAO.class); - indexDAO = mock(IndexDAO.class); - RateLimitingDAO rateLimitingDao = mock(RateLimitingDAO.class); - ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO = - mock(ConcurrentExecutionLimitDAO.class); - PollDataDAO pollDataDAO = mock(PollDataDAO.class); - ConductorProperties properties = mock(ConductorProperties.class); - when(properties.isEventExecutionIndexingEnabled()).thenReturn(true); - when(properties.isAsyncIndexingEnabled()).thenReturn(true); - executionDAOFacade = - new ExecutionDAOFacade( - executionDAO, - queueDAO, - indexDAO, - rateLimitingDao, - concurrentExecutionLimitDAO, - pollDataDAO, - objectMapper, - properties, - externalPayloadStorageUtils); - } - - @Test - public void testGetWorkflow() throws Exception { - when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(new WorkflowModel()); - Workflow workflow = executionDAOFacade.getWorkflow("workflowId", true); - assertNotNull(workflow); - verify(indexDAO, never()).get(any(), any()); - } - - @Test - public void testGetWorkflowModel() throws Exception { - when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(new WorkflowModel()); - WorkflowModel workflowModel = executionDAOFacade.getWorkflowModel("workflowId", true); - assertNotNull(workflowModel); - verify(indexDAO, never()).get(any(), any()); - - when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(null); - InputStream stream = ExecutionDAOFacadeTest.class.getResourceAsStream("/test.json"); - byte[] bytes = IOUtils.toByteArray(stream); - String jsonString = new String(bytes); - when(indexDAO.get(any(), any())).thenReturn(jsonString); - workflowModel = executionDAOFacade.getWorkflowModel("wokflowId", true); - assertNotNull(workflowModel); - verify(indexDAO, times(1)).get(any(), any()); - } - - @Test - public void testGetWorkflowsByCorrelationId() { - when(executionDAO.canSearchAcrossWorkflows()).thenReturn(true); - when(executionDAO.getWorkflowsByCorrelationId(any(), any(), anyBoolean())) - .thenReturn(Collections.singletonList(new WorkflowModel())); - List workflows = - executionDAOFacade.getWorkflowsByCorrelationId( - "workflowName", "correlationId", true); - - assertNotNull(workflows); - assertEquals(1, workflows.size()); - verify(indexDAO, never()) - .searchWorkflows(anyString(), anyString(), anyInt(), anyInt(), any()); - - when(executionDAO.canSearchAcrossWorkflows()).thenReturn(false); - List workflowIds = new ArrayList<>(); - workflowIds.add("workflowId"); - SearchResult searchResult = new SearchResult<>(); - searchResult.setResults(workflowIds); - when(indexDAO.searchWorkflows(anyString(), anyString(), anyInt(), anyInt(), any())) - .thenReturn(searchResult); - when(executionDAO.getWorkflow("workflowId", true)).thenReturn(new WorkflowModel()); - workflows = - executionDAOFacade.getWorkflowsByCorrelationId( - "workflowName", "correlationId", true); - assertNotNull(workflows); - assertEquals(1, workflows.size()); - } - - @Test - public void testRemoveWorkflow() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - when(executionDAO.getWorkflow(anyString(), anyBoolean())).thenReturn(workflow); - executionDAOFacade.removeWorkflow("workflowId", false); - verify(indexDAO, never()).updateWorkflow(any(), any(), any()); - verify(indexDAO, times(1)).asyncRemoveWorkflow(workflow.getWorkflowId()); - } - - @Test - public void testArchiveWorkflow() throws Exception { - InputStream stream = TestDeciderService.class.getResourceAsStream("/completed.json"); - WorkflowModel workflow = objectMapper.readValue(stream, WorkflowModel.class); - - when(executionDAO.getWorkflow(anyString(), anyBoolean())).thenReturn(workflow); - executionDAOFacade.removeWorkflow("workflowId", true); - verify(indexDAO, times(1)).updateWorkflow(any(), any(), any()); - verify(indexDAO, never()).removeWorkflow(any()); - } - - @Test - public void testAddEventExecution() { - when(executionDAO.addEventExecution(any())).thenReturn(false); - boolean added = executionDAOFacade.addEventExecution(new EventExecution()); - assertFalse(added); - verify(indexDAO, never()).addEventExecution(any()); - - when(executionDAO.addEventExecution(any())).thenReturn(true); - added = executionDAOFacade.addEventExecution(new EventExecution()); - assertTrue(added); - verify(indexDAO, times(1)).asyncAddEventExecution(any()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java b/core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java deleted file mode 100644 index 9391aea63..000000000 --- a/core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.Comparator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; -import java.util.stream.Collectors; - -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; - -import rx.Observable; - -public class MockObservableQueue implements ObservableQueue { - - private final String uri; - private final String name; - private final String type; - private final Set messages = new TreeSet<>(Comparator.comparing(Message::getId)); - - public MockObservableQueue(String uri, String name, String type) { - this.uri = uri; - this.name = name; - this.type = type; - } - - @Override - public Observable observe() { - return Observable.from(messages); - } - - public String getType() { - return type; - } - - @Override - public String getName() { - return name; - } - - @Override - public String getURI() { - return uri; - } - - @Override - public List ack(List msgs) { - messages.removeAll(msgs); - return msgs.stream().map(Message::getId).collect(Collectors.toList()); - } - - @Override - public void publish(List messages) { - this.messages.addAll(messages); - } - - @Override - public void setUnackTimeout(Message message, long unackTimeout) {} - - @Override - public long size() { - return messages.size(); - } - - @Override - public String toString() { - return "MockObservableQueue [uri=" + uri + ", name=" + name + ", type=" + type + "]"; - } - - @Override - public void start() {} - - @Override - public void stop() {} - - @Override - public boolean isRunning() { - return false; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java b/core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java deleted file mode 100644 index cad61a9bd..000000000 --- a/core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import org.springframework.lang.NonNull; - -import com.netflix.conductor.core.events.queue.ObservableQueue; - -public class MockQueueProvider implements EventQueueProvider { - - private final String type; - - public MockQueueProvider(String type) { - this.type = type; - } - - @Override - public String getQueueType() { - return "mock"; - } - - @Override - @NonNull - public ObservableQueue getQueue(String queueURI) { - return new MockObservableQueue(queueURI, queueURI, type); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java b/core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java deleted file mode 100644 index 855890402..000000000 --- a/core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java +++ /dev/null @@ -1,526 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.*; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.stubbing.Answer; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.events.EventHandler.Action; -import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; -import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; -import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails; -import com.netflix.conductor.core.config.ConductorCoreConfiguration; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.JsonUtils; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.MetadataService; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@ContextConfiguration( - classes = { - TestObjectMapperConfiguration.class, - TestDefaultEventProcessor.TestConfiguration.class, - ConductorCoreConfiguration.class - }) -@RunWith(SpringRunner.class) -public class TestDefaultEventProcessor { - - private String event; - private ObservableQueue queue; - private MetadataService metadataService; - private ExecutionService executionService; - private WorkflowExecutor workflowExecutor; - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - private SimpleActionProcessor actionProcessor; - private ParametersUtils parametersUtils; - private JsonUtils jsonUtils; - private ConductorProperties properties; - private Message message; - - @Autowired private Map evaluators; - - @Autowired private ObjectMapper objectMapper; - - @Autowired - private @Qualifier("onTransientErrorRetryTemplate") RetryTemplate retryTemplate; - - @Configuration - @ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans - public static class TestConfiguration {} - - @Before - public void setup() { - event = "sqs:arn:account090:sqstest1"; - String queueURI = "arn:account090:sqstest1"; - - metadataService = mock(MetadataService.class); - executionService = mock(ExecutionService.class); - workflowExecutor = mock(WorkflowExecutor.class); - externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); - actionProcessor = mock(SimpleActionProcessor.class); - parametersUtils = new ParametersUtils(objectMapper); - jsonUtils = new JsonUtils(objectMapper); - - queue = mock(ObservableQueue.class); - message = - new Message( - "t0", - "{\"Type\":\"Notification\",\"MessageId\":\"7e4e6415-01e9-5caf-abaa-37fd05d446ff\",\"Message\":\"{\\n \\\"testKey1\\\": \\\"level1\\\",\\n \\\"metadata\\\": {\\n \\\"testKey2\\\": 123456 }\\n }\",\"Timestamp\":\"2018-08-10T21:22:05.029Z\",\"SignatureVersion\":\"1\"}", - "t0"); - - when(queue.getURI()).thenReturn(queueURI); - when(queue.getName()).thenReturn(queueURI); - when(queue.getType()).thenReturn("sqs"); - - properties = mock(ConductorProperties.class); - when(properties.isEventMessageIndexingEnabled()).thenReturn(true); - when(properties.getEventProcessorThreadCount()).thenReturn(2); - } - - @Test - public void testEventProcessor() { - // setup event handler - EventHandler eventHandler = new EventHandler(); - eventHandler.setName(UUID.randomUUID().toString()); - eventHandler.setActive(true); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "dev"); - - Action startWorkflowAction = new Action(); - startWorkflowAction.setAction(Type.start_workflow); - startWorkflowAction.setStart_workflow(new StartWorkflow()); - startWorkflowAction.getStart_workflow().setName("workflow_x"); - startWorkflowAction.getStart_workflow().setVersion(1); - startWorkflowAction.getStart_workflow().setTaskToDomain(taskToDomain); - eventHandler.getActions().add(startWorkflowAction); - - Action completeTaskAction = new Action(); - completeTaskAction.setAction(Type.complete_task); - completeTaskAction.setComplete_task(new TaskDetails()); - completeTaskAction.getComplete_task().setTaskRefName("task_x"); - completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString()); - completeTaskAction.getComplete_task().setOutput(new HashMap<>()); - eventHandler.getActions().add(completeTaskAction); - - eventHandler.setEvent(event); - - when(metadataService.getEventHandlersForEvent(event, true)) - .thenReturn(Collections.singletonList(eventHandler)); - when(executionService.addEventExecution(any())).thenReturn(true); - when(queue.rePublishIfNoAck()).thenReturn(false); - - String id = UUID.randomUUID().toString(); - AtomicBoolean started = new AtomicBoolean(false); - doAnswer( - (Answer) - invocation -> { - started.set(true); - return id; - }) - .when(workflowExecutor) - .startWorkflow( - eq(startWorkflowAction.getStart_workflow().getName()), - eq(startWorkflowAction.getStart_workflow().getVersion()), - eq(startWorkflowAction.getStart_workflow().getCorrelationId()), - anyMap(), - eq(null), - eq(event), - anyMap()); - - AtomicBoolean completed = new AtomicBoolean(false); - doAnswer( - (Answer) - invocation -> { - completed.set(true); - return null; - }) - .when(workflowExecutor) - .updateTask(any()); - - TaskModel task = new TaskModel(); - task.setReferenceTaskName(completeTaskAction.getComplete_task().getTaskRefName()); - WorkflowModel workflow = new WorkflowModel(); - workflow.setTasks(Collections.singletonList(task)); - when(workflowExecutor.getWorkflow( - completeTaskAction.getComplete_task().getWorkflowId(), true)) - .thenReturn(workflow); - doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any()); - - SimpleActionProcessor actionProcessor = - new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - eventProcessor.handle(queue, message); - assertTrue(started.get()); - assertTrue(completed.get()); - verify(queue, atMost(1)).ack(any()); - verify(queue, never()).publish(any()); - } - - @Test - public void testEventHandlerWithCondition() { - EventHandler eventHandler = new EventHandler(); - eventHandler.setName("cms_intermediate_video_ingest_handler"); - eventHandler.setActive(true); - eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue"); - eventHandler.setCondition( - "$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456"); - - Map startWorkflowInput = new LinkedHashMap<>(); - startWorkflowInput.put("param1", "${Message.metadata.testKey2}"); - startWorkflowInput.put("param2", "SQS-${MessageId}"); - - Action startWorkflowAction = new Action(); - startWorkflowAction.setAction(Type.start_workflow); - startWorkflowAction.setStart_workflow(new StartWorkflow()); - startWorkflowAction.getStart_workflow().setName("cms_artwork_automation"); - startWorkflowAction.getStart_workflow().setVersion(1); - startWorkflowAction.getStart_workflow().setInput(startWorkflowInput); - startWorkflowAction.setExpandInlineJSON(true); - eventHandler.getActions().add(startWorkflowAction); - - eventHandler.setEvent(event); - - when(metadataService.getEventHandlersForEvent(event, true)) - .thenReturn(Collections.singletonList(eventHandler)); - when(executionService.addEventExecution(any())).thenReturn(true); - when(queue.rePublishIfNoAck()).thenReturn(false); - - String id = UUID.randomUUID().toString(); - AtomicBoolean started = new AtomicBoolean(false); - doAnswer( - (Answer) - invocation -> { - started.set(true); - return id; - }) - .when(workflowExecutor) - .startWorkflow( - eq(startWorkflowAction.getStart_workflow().getName()), - eq(startWorkflowAction.getStart_workflow().getVersion()), - eq(startWorkflowAction.getStart_workflow().getCorrelationId()), - anyMap(), - eq(null), - eq(event), - eq(null)); - - SimpleActionProcessor actionProcessor = - new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - eventProcessor.handle(queue, message); - assertTrue(started.get()); - } - - @Test - public void testEventHandlerWithConditionEvaluator() { - EventHandler eventHandler = new EventHandler(); - eventHandler.setName("cms_intermediate_video_ingest_handler"); - eventHandler.setActive(true); - eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue"); - eventHandler.setEvaluatorType(JavascriptEvaluator.NAME); - eventHandler.setCondition( - "$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456"); - - Map startWorkflowInput = new LinkedHashMap<>(); - startWorkflowInput.put("param1", "${Message.metadata.testKey2}"); - startWorkflowInput.put("param2", "SQS-${MessageId}"); - - Action startWorkflowAction = new Action(); - startWorkflowAction.setAction(Type.start_workflow); - startWorkflowAction.setStart_workflow(new StartWorkflow()); - startWorkflowAction.getStart_workflow().setName("cms_artwork_automation"); - startWorkflowAction.getStart_workflow().setVersion(1); - startWorkflowAction.getStart_workflow().setInput(startWorkflowInput); - startWorkflowAction.setExpandInlineJSON(true); - eventHandler.getActions().add(startWorkflowAction); - - eventHandler.setEvent(event); - - when(metadataService.getEventHandlersForEvent(event, true)) - .thenReturn(Collections.singletonList(eventHandler)); - when(executionService.addEventExecution(any())).thenReturn(true); - when(queue.rePublishIfNoAck()).thenReturn(false); - - String id = UUID.randomUUID().toString(); - AtomicBoolean started = new AtomicBoolean(false); - doAnswer( - (Answer) - invocation -> { - started.set(true); - return id; - }) - .when(workflowExecutor) - .startWorkflow( - eq(startWorkflowAction.getStart_workflow().getName()), - eq(startWorkflowAction.getStart_workflow().getVersion()), - eq(startWorkflowAction.getStart_workflow().getCorrelationId()), - anyMap(), - eq(null), - eq(event), - eq(null)); - - SimpleActionProcessor actionProcessor = - new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - eventProcessor.handle(queue, message); - assertTrue(started.get()); - } - - @Test - public void testEventProcessorWithRetriableError() { - EventHandler eventHandler = new EventHandler(); - eventHandler.setName(UUID.randomUUID().toString()); - eventHandler.setActive(true); - eventHandler.setEvent(event); - - Action completeTaskAction = new Action(); - completeTaskAction.setAction(Type.complete_task); - completeTaskAction.setComplete_task(new TaskDetails()); - completeTaskAction.getComplete_task().setTaskRefName("task_x"); - completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString()); - completeTaskAction.getComplete_task().setOutput(new HashMap<>()); - eventHandler.getActions().add(completeTaskAction); - - when(queue.rePublishIfNoAck()).thenReturn(false); - when(metadataService.getEventHandlersForEvent(event, true)) - .thenReturn(Collections.singletonList(eventHandler)); - when(executionService.addEventExecution(any())).thenReturn(true); - when(actionProcessor.execute(any(), any(), any(), any())) - .thenThrow( - new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, "some retriable error")); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - eventProcessor.handle(queue, message); - verify(queue, never()).ack(any()); - verify(queue, never()).publish(any()); - } - - @Test - public void testEventProcessorWithNonRetriableError() { - EventHandler eventHandler = new EventHandler(); - eventHandler.setName(UUID.randomUUID().toString()); - eventHandler.setActive(true); - eventHandler.setEvent(event); - - Action completeTaskAction = new Action(); - completeTaskAction.setAction(Type.complete_task); - completeTaskAction.setComplete_task(new TaskDetails()); - completeTaskAction.getComplete_task().setTaskRefName("task_x"); - completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString()); - completeTaskAction.getComplete_task().setOutput(new HashMap<>()); - eventHandler.getActions().add(completeTaskAction); - - when(metadataService.getEventHandlersForEvent(event, true)) - .thenReturn(Collections.singletonList(eventHandler)); - when(executionService.addEventExecution(any())).thenReturn(true); - - when(actionProcessor.execute(any(), any(), any(), any())) - .thenThrow( - new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "some non-retriable error")); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - eventProcessor.handle(queue, message); - verify(queue, atMost(1)).ack(any()); - verify(queue, never()).publish(any()); - } - - @Test - public void testExecuteInvalidAction() { - AtomicInteger executeInvoked = new AtomicInteger(0); - doAnswer( - (Answer>) - invocation -> { - executeInvoked.incrementAndGet(); - throw new UnsupportedOperationException("error"); - }) - .when(actionProcessor) - .execute(any(), any(), any(), any()); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - EventExecution eventExecution = new EventExecution("id", "messageId"); - eventExecution.setName("handler"); - eventExecution.setStatus(EventExecution.Status.IN_PROGRESS); - eventExecution.setEvent("event"); - Action action = new Action(); - eventExecution.setAction(Type.start_workflow); - - eventProcessor.execute(eventExecution, action, "payload"); - assertEquals(1, executeInvoked.get()); - assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus()); - assertNotNull(eventExecution.getOutput().get("exception")); - } - - @Test - public void testExecuteNonRetriableApplicationException() { - AtomicInteger executeInvoked = new AtomicInteger(0); - doAnswer( - (Answer>) - invocation -> { - executeInvoked.incrementAndGet(); - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "some non-retriable error"); - }) - .when(actionProcessor) - .execute(any(), any(), any(), any()); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - EventExecution eventExecution = new EventExecution("id", "messageId"); - eventExecution.setStatus(EventExecution.Status.IN_PROGRESS); - eventExecution.setEvent("event"); - eventExecution.setName("handler"); - Action action = new Action(); - action.setAction(Type.start_workflow); - eventExecution.setAction(Type.start_workflow); - - eventProcessor.execute(eventExecution, action, "payload"); - assertEquals(1, executeInvoked.get()); - assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus()); - assertNotNull(eventExecution.getOutput().get("exception")); - } - - @Test - public void testExecuteRetriableApplicationException() { - AtomicInteger executeInvoked = new AtomicInteger(0); - doAnswer( - (Answer>) - invocation -> { - executeInvoked.incrementAndGet(); - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, - "some retriable error"); - }) - .when(actionProcessor) - .execute(any(), any(), any(), any()); - - DefaultEventProcessor eventProcessor = - new DefaultEventProcessor( - executionService, - metadataService, - actionProcessor, - jsonUtils, - properties, - objectMapper, - evaluators, - retryTemplate); - EventExecution eventExecution = new EventExecution("id", "messageId"); - eventExecution.setStatus(EventExecution.Status.IN_PROGRESS); - eventExecution.setEvent("event"); - Action action = new Action(); - action.setAction(Type.start_workflow); - - eventProcessor.execute(eventExecution, action, "payload"); - assertEquals(3, executeInvoked.get()); - assertNull(eventExecution.getOutput().get("exception")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java b/core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java deleted file mode 100644 index 2078b7c99..000000000 --- a/core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class TestScriptEval { - - @Test - public void testScript() throws Exception { - Map payload = new HashMap<>(); - Map app = new HashMap<>(); - app.put("name", "conductor"); - app.put("version", 2.0); - app.put("license", "Apache 2.0"); - - payload.put("app", app); - payload.put("author", "Netflix"); - payload.put("oss", true); - - String script1 = "$.app.name == 'conductor'"; // true - String script2 = "$.version > 3"; // false - String script3 = "$.oss"; // true - String script4 = "$.author == 'me'"; // false - - assertTrue(ScriptEvaluator.evalBool(script1, payload)); - assertFalse(ScriptEvaluator.evalBool(script2, payload)); - assertTrue(ScriptEvaluator.evalBool(script3, payload)); - assertFalse(ScriptEvaluator.evalBool(script4, payload)); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java b/core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java deleted file mode 100644 index 6e9d8ca9f..000000000 --- a/core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.events; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.events.EventHandler.Action; -import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; -import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; -import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskResult.Status; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.JsonUtils; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class TestSimpleActionProcessor { - - private WorkflowExecutor workflowExecutor; - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - private SimpleActionProcessor actionProcessor; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setup() { - externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); - - workflowExecutor = mock(WorkflowExecutor.class); - - actionProcessor = - new SimpleActionProcessor( - workflowExecutor, - new ParametersUtils(objectMapper), - new JsonUtils(objectMapper)); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - @Test - public void testStartWorkflow_correlationId() throws Exception { - StartWorkflow startWorkflow = new StartWorkflow(); - startWorkflow.setName("testWorkflow"); - startWorkflow.getInput().put("testInput", "${testId}"); - startWorkflow.setCorrelationId("${correlationId}"); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "dev"); - startWorkflow.setTaskToDomain(taskToDomain); - - Action action = new Action(); - action.setAction(Type.start_workflow); - action.setStart_workflow(startWorkflow); - - Object payload = - objectMapper.readValue( - "{\"correlationId\":\"test-id\", \"testId\":\"test_1\"}", Object.class); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testWorkflow"); - workflowDef.setVersion(1); - - when(workflowExecutor.startWorkflow( - eq("testWorkflow"), - eq(null), - any(), - any(), - any(), - eq("testEvent"), - anyMap())) - .thenReturn("workflow_1"); - - Map output = - actionProcessor.execute(action, payload, "testEvent", "testMessage"); - - assertNotNull(output); - assertEquals("workflow_1", output.get("workflowId")); - - ArgumentCaptor correlationIdCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor inputParamCaptor = ArgumentCaptor.forClass(Map.class); - ArgumentCaptor taskToDomainCaptor = ArgumentCaptor.forClass(Map.class); - verify(workflowExecutor) - .startWorkflow( - eq("testWorkflow"), - eq(null), - correlationIdCaptor.capture(), - inputParamCaptor.capture(), - any(), - eq("testEvent"), - taskToDomainCaptor.capture()); - assertEquals("test_1", inputParamCaptor.getValue().get("testInput")); - assertEquals("test-id", correlationIdCaptor.getValue()); - assertEquals("testMessage", inputParamCaptor.getValue().get("conductor.event.messageId")); - assertEquals("testEvent", inputParamCaptor.getValue().get("conductor.event.name")); - assertEquals(taskToDomain, taskToDomainCaptor.getValue()); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - @Test - public void testStartWorkflow() throws Exception { - StartWorkflow startWorkflow = new StartWorkflow(); - startWorkflow.setName("testWorkflow"); - startWorkflow.getInput().put("testInput", "${testId}"); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "dev"); - startWorkflow.setTaskToDomain(taskToDomain); - - Action action = new Action(); - action.setAction(Type.start_workflow); - action.setStart_workflow(startWorkflow); - - Object payload = objectMapper.readValue("{\"testId\":\"test_1\"}", Object.class); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testWorkflow"); - workflowDef.setVersion(1); - - when(workflowExecutor.startWorkflow( - eq("testWorkflow"), - eq(null), - any(), - any(), - any(), - eq("testEvent"), - anyMap())) - .thenReturn("workflow_1"); - - Map output = - actionProcessor.execute(action, payload, "testEvent", "testMessage"); - - assertNotNull(output); - assertEquals("workflow_1", output.get("workflowId")); - - ArgumentCaptor correlationIdCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor inputParamCaptor = ArgumentCaptor.forClass(Map.class); - ArgumentCaptor taskToDomainCaptor = ArgumentCaptor.forClass(Map.class); - verify(workflowExecutor) - .startWorkflow( - eq("testWorkflow"), - eq(null), - correlationIdCaptor.capture(), - inputParamCaptor.capture(), - any(), - eq("testEvent"), - taskToDomainCaptor.capture()); - assertEquals("test_1", inputParamCaptor.getValue().get("testInput")); - assertNull(correlationIdCaptor.getValue()); - assertEquals("testMessage", inputParamCaptor.getValue().get("conductor.event.messageId")); - assertEquals("testEvent", inputParamCaptor.getValue().get("conductor.event.name")); - assertEquals(taskToDomain, taskToDomainCaptor.getValue()); - } - - @Test - public void testCompleteTask() throws Exception { - TaskDetails taskDetails = new TaskDetails(); - taskDetails.setWorkflowId("${workflowId}"); - taskDetails.setTaskRefName("testTask"); - taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}"); - taskDetails.getOutput().put("someKey", "${Message.someKey}"); - taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}"); - - Action action = new Action(); - action.setAction(Type.complete_task); - action.setComplete_task(taskDetails); - - String payloadJson = - "{\"workflowId\":\"workflow_1\",\"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}"; - Object payload = objectMapper.readValue(payloadJson, Object.class); - - TaskModel task = new TaskModel(); - task.setReferenceTaskName("testTask"); - WorkflowModel workflow = new WorkflowModel(); - workflow.getTasks().add(task); - - when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow); - doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any()); - - actionProcessor.execute(action, payload, "testEvent", "testMessage"); - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskResult.class); - verify(workflowExecutor).updateTask(argumentCaptor.capture()); - assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus()); - assertEquals( - "testMessage", - argumentCaptor.getValue().getOutputData().get("conductor.event.messageId")); - assertEquals( - "testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name")); - assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId")); - assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName")); - assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey")); - // Assert values not in message are evaluated to null - assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey")); - // Assert null values from message are kept - assertTrue( - "testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey")); - assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey")); - } - - @Test - public void testCompleteLoopOverTask() throws Exception { - TaskDetails taskDetails = new TaskDetails(); - taskDetails.setWorkflowId("${workflowId}"); - taskDetails.setTaskRefName("testTask"); - taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}"); - taskDetails.getOutput().put("someKey", "${Message.someKey}"); - taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}"); - - Action action = new Action(); - action.setAction(Type.complete_task); - action.setComplete_task(taskDetails); - - String payloadJson = - "{\"workflowId\":\"workflow_1\", \"taskRefName\":\"testTask\", \"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}"; - Object payload = objectMapper.readValue(payloadJson, Object.class); - - TaskModel task = new TaskModel(); - task.setIteration(1); - task.setReferenceTaskName("testTask__1"); - WorkflowModel workflow = new WorkflowModel(); - workflow.getTasks().add(task); - - when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow); - doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any()); - - actionProcessor.execute(action, payload, "testEvent", "testMessage"); - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskResult.class); - verify(workflowExecutor).updateTask(argumentCaptor.capture()); - assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus()); - assertEquals( - "testMessage", - argumentCaptor.getValue().getOutputData().get("conductor.event.messageId")); - assertEquals( - "testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name")); - assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId")); - assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName")); - assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey")); - // Assert values not in message are evaluated to null - assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey")); - // Assert null values from message are kept - assertTrue( - "testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey")); - assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey")); - } - - @Test - public void testCompleteTaskByTaskId() throws Exception { - TaskDetails taskDetails = new TaskDetails(); - taskDetails.setWorkflowId("${workflowId}"); - taskDetails.setTaskId("${taskId}"); - - Action action = new Action(); - action.setAction(Type.complete_task); - action.setComplete_task(taskDetails); - - Object payload = - objectMapper.readValue( - "{\"workflowId\":\"workflow_1\", \"taskId\":\"task_1\"}", Object.class); - - TaskModel task = new TaskModel(); - task.setTaskId("task_1"); - task.setReferenceTaskName("testTask"); - - when(workflowExecutor.getTask(eq("task_1"))).thenReturn(task); - doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any()); - - actionProcessor.execute(action, payload, "testEvent", "testMessage"); - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskResult.class); - verify(workflowExecutor).updateTask(argumentCaptor.capture()); - assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus()); - assertEquals( - "testMessage", - argumentCaptor.getValue().getOutputData().get("conductor.event.messageId")); - assertEquals( - "testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name")); - assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId")); - assertEquals("task_1", argumentCaptor.getValue().getOutputData().get("taskId")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java deleted file mode 100644 index 8512fef3a..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ /dev/null @@ -1,608 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.io.InputStream; -import java.time.Duration; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.util.unit.DataSize; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; -import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper; -import com.netflix.conductor.core.execution.mapper.EventTaskMapper; -import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper; -import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper; -import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper; -import com.netflix.conductor.core.execution.mapper.JoinTaskMapper; -import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper; -import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper; -import com.netflix.conductor.core.execution.mapper.SwitchTaskMapper; -import com.netflix.conductor.core.execution.mapper.TaskMapper; -import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; -import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.core.execution.tasks.Decision; -import com.netflix.conductor.core.execution.tasks.Join; -import com.netflix.conductor.core.execution.tasks.Switch; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.DECISION; -import static com.netflix.conductor.common.metadata.tasks.TaskType.DYNAMIC; -import static com.netflix.conductor.common.metadata.tasks.TaskType.EVENT; -import static com.netflix.conductor.common.metadata.tasks.TaskType.FORK_JOIN; -import static com.netflix.conductor.common.metadata.tasks.TaskType.FORK_JOIN_DYNAMIC; -import static com.netflix.conductor.common.metadata.tasks.TaskType.HTTP; -import static com.netflix.conductor.common.metadata.tasks.TaskType.JOIN; -import static com.netflix.conductor.common.metadata.tasks.TaskType.SIMPLE; -import static com.netflix.conductor.common.metadata.tasks.TaskType.SUB_WORKFLOW; -import static com.netflix.conductor.common.metadata.tasks.TaskType.SWITCH; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DECISION; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SWITCH; -import static com.netflix.conductor.common.metadata.tasks.TaskType.USER_DEFINED; -import static com.netflix.conductor.common.metadata.tasks.TaskType.WAIT; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration( - classes = { - TestObjectMapperConfiguration.class, - TestDeciderOutcomes.TestConfiguration.class - }) -@RunWith(SpringRunner.class) -public class TestDeciderOutcomes { - - private DeciderService deciderService; - - @Autowired private Map evaluators; - - @Autowired private ObjectMapper objectMapper; - - @Autowired private SystemTaskRegistry systemTaskRegistry; - - @Configuration - @ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans. - public static class TestConfiguration { - - @Bean(TASK_TYPE_DECISION) - public Decision decision() { - return new Decision(); - } - - @Bean(TASK_TYPE_SWITCH) - public Switch switchTask() { - return new Switch(); - } - - @Bean(TASK_TYPE_JOIN) - public Join join() { - return new Join(); - } - - @Bean - public SystemTaskRegistry systemTaskRegistry(Set tasks) { - return new SystemTaskRegistry(tasks); - } - } - - @Before - public void init() { - MetadataDAO metadataDAO = mock(MetadataDAO.class); - - ExternalPayloadStorageUtils externalPayloadStorageUtils = - mock(ExternalPayloadStorageUtils.class); - ConductorProperties properties = mock(ConductorProperties.class); - when(properties.getTaskInputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L)); - when(properties.getMaxTaskInputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10240L)); - - TaskDef taskDef = new TaskDef(); - taskDef.setRetryCount(1); - taskDef.setName("mockTaskDef"); - taskDef.setResponseTimeoutSeconds(60 * 60); - when(metadataDAO.getTaskDef(anyString())).thenReturn(taskDef); - ParametersUtils parametersUtils = new ParametersUtils(objectMapper); - Map taskMappers = new HashMap<>(); - taskMappers.put(DECISION, new DecisionTaskMapper()); - taskMappers.put(SWITCH, new SwitchTaskMapper(evaluators)); - taskMappers.put(DYNAMIC, new DynamicTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(FORK_JOIN, new ForkJoinTaskMapper()); - taskMappers.put(JOIN, new JoinTaskMapper()); - taskMappers.put( - FORK_JOIN_DYNAMIC, - new ForkJoinDynamicTaskMapper( - new IDGenerator(), parametersUtils, objectMapper, metadataDAO)); - taskMappers.put(USER_DEFINED, new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(SIMPLE, new SimpleTaskMapper(parametersUtils)); - taskMappers.put(SUB_WORKFLOW, new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(EVENT, new EventTaskMapper(parametersUtils)); - taskMappers.put(WAIT, new WaitTaskMapper(parametersUtils)); - taskMappers.put(HTTP, new HTTPTaskMapper(parametersUtils, metadataDAO)); - - this.deciderService = - new DeciderService( - new IDGenerator(), - parametersUtils, - metadataDAO, - externalPayloadStorageUtils, - systemTaskRegistry, - taskMappers, - Duration.ofMinutes(60)); - } - - @Test - public void testWorkflowWithNoTasks() throws Exception { - InputStream stream = new ClassPathResource("./conditional_flow.json").getInputStream(); - WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class); - assertNotNull(def); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCreateTime(0L); - workflow.getInput().put("param1", "nested"); - workflow.getInput().put("param2", "one"); - - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertFalse(outcome.isComplete); - assertTrue(outcome.tasksToBeUpdated.isEmpty()); - assertEquals(3, outcome.tasksToBeScheduled.size()); - - outcome.tasksToBeScheduled.forEach(t -> t.setStatus(TaskModel.Status.COMPLETED)); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - outcome = deciderService.decide(workflow); - assertFalse(outcome.isComplete); - assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName()); - } - - @Test - public void testWorkflowWithNoTasksWithSwitch() throws Exception { - InputStream stream = - new ClassPathResource("./conditional_flow_with_switch.json").getInputStream(); - WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class); - assertNotNull(def); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCreateTime(0L); - workflow.getInput().put("param1", "nested"); - workflow.getInput().put("param2", "one"); - - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertFalse(outcome.isComplete); - assertTrue(outcome.tasksToBeUpdated.isEmpty()); - assertEquals(3, outcome.tasksToBeScheduled.size()); - - outcome.tasksToBeScheduled.forEach(t -> t.setStatus(TaskModel.Status.COMPLETED)); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - outcome = deciderService.decide(workflow); - assertFalse(outcome.isComplete); - assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName()); - } - - @Test - public void testRetries() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("test_task"); - workflowTask.setType("USER_TASK"); - workflowTask.setTaskReferenceName("t0"); - workflowTask.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - workflowTask.getInputParameters().put("requestId", "${workflow.input.requestId}"); - workflowTask.setTaskDefinition(new TaskDef("test_task")); - - def.getTasks().add(workflowTask); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.getInput().put("requestId", 123); - workflow.setCreateTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals( - workflowTask.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - - String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); - - outcome.tasksToBeScheduled.get(0).setStatus(TaskModel.Status.FAILED); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - - outcome = deciderService.decide(workflow); - assertNotNull(outcome); - - assertEquals(1, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); - assertNotSame(task1Id, outcome.tasksToBeScheduled.get(0).getTaskId()); - assertEquals( - outcome.tasksToBeScheduled.get(0).getTaskId(), - outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getRetriedTaskId()); - assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); - - WorkflowTask fork = new WorkflowTask(); - fork.setName("fork0"); - fork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); - fork.setTaskReferenceName("fork0"); - fork.setDynamicForkTasksInputParamName("forkedInputs"); - fork.setDynamicForkTasksParam("forks"); - fork.getInputParameters().put("forks", "${workflow.input.forks}"); - fork.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); - - WorkflowTask join = new WorkflowTask(); - join.setName("join0"); - join.setType("JOIN"); - join.setTaskReferenceName("join0"); - - def.getTasks().clear(); - def.getTasks().add(fork); - def.getTasks().add(join); - - List forks = new LinkedList<>(); - Map> forkedInputs = new HashMap<>(); - - for (int i = 0; i < 1; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("f" + i); - wft.setTaskReferenceName("f" + i); - wft.setWorkflowTaskType(TaskType.SIMPLE); - wft.getInputParameters().put("requestId", "${workflow.input.requestId}"); - wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - wft.setTaskDefinition(new TaskDef("f" + i)); - forks.add(wft); - Map input = new HashMap<>(); - input.put("k", "v"); - input.put("k1", 1); - forkedInputs.put(wft.getTaskReferenceName(), input); - } - workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.getInput().put("requestId", 123); - workflow.setCreateTime(System.currentTimeMillis()); - - workflow.getInput().put("forks", forks); - workflow.getInput().put("forkedInputs", forkedInputs); - - outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertEquals(3, outcome.tasksToBeScheduled.size()); - assertEquals(0, outcome.tasksToBeUpdated.size()); - - assertEquals("v", outcome.tasksToBeScheduled.get(1).getInputData().get("k")); - assertEquals(1, outcome.tasksToBeScheduled.get(1).getInputData().get("k1")); - assertEquals( - outcome.tasksToBeScheduled.get(1).getTaskId(), - outcome.tasksToBeScheduled.get(1).getInputData().get("taskId")); - task1Id = outcome.tasksToBeScheduled.get(1).getTaskId(); - - outcome.tasksToBeScheduled.get(1).setStatus(TaskModel.Status.FAILED); - for (TaskModel taskToBeScheduled : outcome.tasksToBeScheduled) { - taskToBeScheduled.setUpdateTime(System.currentTimeMillis()); - } - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - - outcome = deciderService.decide(workflow); - assertTrue( - outcome.tasksToBeScheduled.stream() - .anyMatch(task1 -> task1.getReferenceTaskName().equals("f0"))); - - Optional optionalTask = - outcome.tasksToBeScheduled.stream() - .filter(t -> t.getReferenceTaskName().equals("f0")) - .findFirst(); - assertTrue(optionalTask.isPresent()); - TaskModel task = optionalTask.get(); - assertEquals("v", task.getInputData().get("k")); - assertEquals(1, task.getInputData().get("k1")); - assertEquals(task.getTaskId(), task.getInputData().get("taskId")); - assertNotSame(task1Id, task.getTaskId()); - assertEquals(task1Id, task.getRetriedTaskId()); - } - - @Test - public void testOptional() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask task1 = new WorkflowTask(); - task1.setName("task0"); - task1.setType("SIMPLE"); - task1.setTaskReferenceName("t0"); - task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - task1.setOptional(true); - task1.setTaskDefinition(new TaskDef("task0")); - - WorkflowTask task2 = new WorkflowTask(); - task2.setName("task1"); - task2.setType("SIMPLE"); - task2.setTaskReferenceName("t1"); - task2.setTaskDefinition(new TaskDef("task1")); - - def.getTasks().add(task1); - def.getTasks().add(task2); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCreateTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals( - task1.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - - for (int i = 0; i < 3; i++) { - String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - - workflow.getTasks().clear(); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - workflow.getTasks().get(0).setStatus(TaskModel.Status.FAILED); - - outcome = deciderService.decide(workflow); - - assertNotNull(outcome); - assertEquals(1, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - - assertEquals(TaskModel.Status.FAILED, workflow.getTasks().get(0).getStatus()); - assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); - assertEquals( - task1.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals(i + 1, outcome.tasksToBeScheduled.get(0).getRetryCount()); - } - - String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); - - workflow.getTasks().clear(); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - workflow.getTasks().get(0).setStatus(TaskModel.Status.FAILED); - - outcome = deciderService.decide(workflow); - - assertNotNull(outcome); - assertEquals(1, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - - assertEquals( - TaskModel.Status.COMPLETED_WITH_ERRORS, workflow.getTasks().get(0).getStatus()); - assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); - assertEquals( - task2.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - } - - @Test - public void testOptionalWithDynamicFork() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask task1 = new WorkflowTask(); - task1.setName("fork0"); - task1.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); - task1.setTaskReferenceName("fork0"); - task1.setDynamicForkTasksInputParamName("forkedInputs"); - task1.setDynamicForkTasksParam("forks"); - task1.getInputParameters().put("forks", "${workflow.input.forks}"); - task1.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); - - WorkflowTask task2 = new WorkflowTask(); - task2.setName("join0"); - task2.setType("JOIN"); - task2.setTaskReferenceName("join0"); - - def.getTasks().add(task1); - def.getTasks().add(task2); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - List forks = new LinkedList<>(); - Map> forkedInputs = new HashMap<>(); - - for (int i = 0; i < 3; i++) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("f" + i); - workflowTask.setTaskReferenceName("f" + i); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setOptional(true); - workflowTask.setTaskDefinition(new TaskDef("f" + i)); - forks.add(workflowTask); - - forkedInputs.put(workflowTask.getTaskReferenceName(), new HashMap<>()); - } - workflow.getInput().put("forks", forks); - workflow.getInput().put("forkedInputs", forkedInputs); - - workflow.setCreateTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertEquals(5, outcome.tasksToBeScheduled.size()); - assertEquals(0, outcome.tasksToBeUpdated.size()); - assertEquals(TASK_TYPE_FORK, outcome.tasksToBeScheduled.get(0).getTaskType()); - assertEquals(TaskModel.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus()); - - for (int retryCount = 0; retryCount < 4; retryCount++) { - - for (TaskModel taskToBeScheduled : outcome.tasksToBeScheduled) { - if (taskToBeScheduled.getTaskDefName().equals("join0")) { - assertEquals(TaskModel.Status.IN_PROGRESS, taskToBeScheduled.getStatus()); - } else if (taskToBeScheduled.getTaskType().matches("(f0|f1|f2)")) { - assertEquals(TaskModel.Status.SCHEDULED, taskToBeScheduled.getStatus()); - taskToBeScheduled.setStatus(TaskModel.Status.FAILED); - } - - taskToBeScheduled.setUpdateTime(System.currentTimeMillis()); - } - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - outcome = deciderService.decide(workflow); - assertNotNull(outcome); - } - assertEquals(TASK_TYPE_JOIN, outcome.tasksToBeScheduled.get(0).getTaskType()); - - for (int i = 0; i < 3; i++) { - assertEquals( - TaskModel.Status.COMPLETED_WITH_ERRORS, - outcome.tasksToBeUpdated.get(i).getStatus()); - assertEquals("f" + (i), outcome.tasksToBeUpdated.get(i).getTaskDefName()); - } - - assertEquals(TaskModel.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(0).getStatus()); - new Join().execute(workflow, outcome.tasksToBeScheduled.get(0), null); - assertEquals( - TaskModel.Status.COMPLETED_WITH_ERRORS, - outcome.tasksToBeScheduled.get(0).getStatus()); - } - - @Test - public void testDecisionCases() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask even = new WorkflowTask(); - even.setName("even"); - even.setType("SIMPLE"); - even.setTaskReferenceName("even"); - even.setTaskDefinition(new TaskDef("even")); - - WorkflowTask odd = new WorkflowTask(); - odd.setName("odd"); - odd.setType("SIMPLE"); - odd.setTaskReferenceName("odd"); - odd.setTaskDefinition(new TaskDef("odd")); - - WorkflowTask defaultt = new WorkflowTask(); - defaultt.setName("defaultt"); - defaultt.setType("SIMPLE"); - defaultt.setTaskReferenceName("defaultt"); - defaultt.setTaskDefinition(new TaskDef("defaultt")); - - WorkflowTask decide = new WorkflowTask(); - decide.setName("decide"); - decide.setWorkflowTaskType(TaskType.DECISION); - decide.setTaskReferenceName("d0"); - decide.getInputParameters().put("Id", "${workflow.input.Id}"); - decide.getInputParameters().put("location", "${workflow.input.location}"); - decide.setCaseExpression( - "if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0) || $.location == 'usa') 'even'; else 'odd'; "); - - decide.getDecisionCases().put("even", Collections.singletonList(even)); - decide.getDecisionCases().put("odd", Collections.singletonList(odd)); - decide.setDefaultCase(Collections.singletonList(defaultt)); - - def.getTasks().add(decide); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCreateTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow); - assertNotNull(outcome); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals( - decide.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals( - defaultt.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); // default - assertEquals( - Collections.singletonList("bad input"), - outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - - workflow.getInput().put("Id", 9); - workflow.getInput().put("location", "usa"); - outcome = deciderService.decide(workflow); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals( - decide.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals( - even.getTaskReferenceName(), - outcome.tasksToBeScheduled - .get(1) - .getReferenceTaskName()); // even because of location == usa - assertEquals( - Collections.singletonList("even"), - outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - - workflow.getInput().put("Id", 9); - workflow.getInput().put("location", "canada"); - outcome = deciderService.decide(workflow); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals( - decide.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals( - odd.getTaskReferenceName(), - outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); // odd - assertEquals( - Collections.singletonList("odd"), - outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java deleted file mode 100644 index b9b921a49..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ /dev/null @@ -1,1465 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.io.IOException; -import java.io.InputStream; -import java.time.Duration; -import java.util.*; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; - -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; -import com.netflix.conductor.core.execution.mapper.TaskMapper; -import com.netflix.conductor.core.execution.tasks.SubWorkflow; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.spectator.api.Counter; -import com.netflix.spectator.api.DefaultRegistry; -import com.netflix.spectator.api.Registry; -import com.netflix.spectator.api.Spectator; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.*; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@ContextConfiguration( - classes = {TestObjectMapperConfiguration.class, TestDeciderService.TestConfiguration.class}) -@RunWith(SpringRunner.class) -public class TestDeciderService { - - @Configuration - @ComponentScan(basePackageClasses = TaskMapper.class) // loads all TaskMapper beans - public static class TestConfiguration { - - @Bean(TASK_TYPE_SUB_WORKFLOW) - public SubWorkflow subWorkflow(ObjectMapper objectMapper) { - return new SubWorkflow(objectMapper); - } - - @Bean("asyncCompleteSystemTask") - public WorkflowSystemTaskStub asyncCompleteSystemTask() { - return new WorkflowSystemTaskStub("asyncCompleteSystemTask") { - @Override - public boolean isAsyncComplete(TaskModel task) { - return true; - } - }; - } - - @Bean - public SystemTaskRegistry systemTaskRegistry(Set tasks) { - return new SystemTaskRegistry(tasks); - } - - @Bean - public MetadataDAO mockMetadataDAO() { - return mock(MetadataDAO.class); - } - - @Bean - public Map taskMapperMap(Collection taskMappers) { - return taskMappers.stream() - .collect(Collectors.toMap(TaskMapper::getTaskType, Function.identity())); - } - - @Bean - public ParametersUtils parametersUtils(ObjectMapper mapper) { - return new ParametersUtils(mapper); - } - - @Bean - public IDGenerator idGenerator() { - return new IDGenerator(); - } - } - - private DeciderService deciderService; - - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - private static Registry registry; - - @Autowired private ObjectMapper objectMapper; - - @Autowired private SystemTaskRegistry systemTaskRegistry; - - @Autowired - @Qualifier("taskMapperMap") - private Map taskMappers; - - @Autowired private ParametersUtils parametersUtils; - - @Autowired private MetadataDAO metadataDAO; - - @Rule public ExpectedException exception = ExpectedException.none(); - - @BeforeClass - public static void init() { - registry = new DefaultRegistry(); - Spectator.globalRegistry().add(registry); - } - - @Before - public void setup() { - externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("TestDeciderService"); - workflowDef.setVersion(1); - TaskDef taskDef = new TaskDef(); - when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); - when(metadataDAO.getLatestWorkflowDef(any())).thenReturn(Optional.of(workflowDef)); - - deciderService = - new DeciderService( - new IDGenerator(), - parametersUtils, - metadataDAO, - externalPayloadStorageUtils, - systemTaskRegistry, - taskMappers, - Duration.ofMinutes(60)); - } - - @Test - public void testGetTaskInputV2() { - WorkflowModel workflow = createDefaultWorkflow(); - - workflow.getWorkflowDefinition().setSchemaVersion(2); - - Map inputParams = new HashMap<>(); - inputParams.put("workflowInputParam", "${workflow.input.requestId}"); - inputParams.put("taskOutputParam", "${task2.output.location}"); - inputParams.put("taskOutputParam2", "${task2.output.locationBad}"); - inputParams.put("taskOutputParam3", "${task3.output.location}"); - inputParams.put("constParam", "Some String value"); - inputParams.put("nullValue", null); - inputParams.put("task2Status", "${task2.status}"); - inputParams.put("channelMap", "${workflow.input.channelMapping}"); - Map taskInput = - parametersUtils.getTaskInput(inputParams, workflow, null, null); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("workflowInputParam")); - assertTrue(taskInput.containsKey("taskOutputParam")); - assertTrue(taskInput.containsKey("taskOutputParam2")); - assertTrue(taskInput.containsKey("taskOutputParam3")); - assertNull(taskInput.get("taskOutputParam2")); - - assertNotNull(taskInput.get("channelMap")); - assertEquals(5, taskInput.get("channelMap")); - - assertEquals("request id 001", taskInput.get("workflowInputParam")); - assertEquals("http://location", taskInput.get("taskOutputParam")); - assertNull(taskInput.get("taskOutputParam3")); - assertNull(taskInput.get("nullValue")); - assertEquals( - workflow.getTasks().get(0).getStatus().name(), - taskInput.get("task2Status")); // task2 and task3 are the tasks respectively - } - - @Test - public void testGetTaskInputV2Partial() { - WorkflowModel workflow = createDefaultWorkflow(); - System.setProperty("EC2_INSTANCE", "i-123abcdef990"); - workflow.getWorkflowDefinition().setSchemaVersion(2); - - Map inputParams = new HashMap<>(); - inputParams.put("workflowInputParam", "${workflow.input.requestId}"); - inputParams.put("workfowOutputParam", "${workflow.output.name}"); - inputParams.put("taskOutputParam", "${task2.output.location}"); - inputParams.put("taskOutputParam2", "${task2.output.locationBad}"); - inputParams.put("taskOutputParam3", "${task3.output.location}"); - inputParams.put("constParam", "Some String value &"); - inputParams.put("partial", "${task2.output.location}/something?host=${EC2_INSTANCE}"); - inputParams.put("jsonPathExtracted", "${workflow.output.names[*].year}"); - inputParams.put("secondName", "${workflow.output.names[1].name}"); - inputParams.put( - "concatenatedName", - "The Band is: ${workflow.output.names[1].name}-\t${EC2_INSTANCE}"); - - TaskDef taskDef = new TaskDef(); - taskDef.getInputTemplate().put("opname", "${workflow.output.name}"); - List listParams = new LinkedList<>(); - List listParams2 = new LinkedList<>(); - listParams2.add("${workflow.input.requestId}-10-${EC2_INSTANCE}"); - listParams.add(listParams2); - Map map = new HashMap<>(); - map.put("name", "${workflow.output.names[0].name}"); - map.put("hasAwards", "${workflow.input.hasAwards}"); - listParams.add(map); - taskDef.getInputTemplate().put("listValues", listParams); - - Map taskInput = - parametersUtils.getTaskInput(inputParams, workflow, taskDef, null); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("workflowInputParam")); - assertTrue(taskInput.containsKey("taskOutputParam")); - assertTrue(taskInput.containsKey("taskOutputParam2")); - assertTrue(taskInput.containsKey("taskOutputParam3")); - assertNull(taskInput.get("taskOutputParam2")); - assertNotNull(taskInput.get("jsonPathExtracted")); - assertTrue(taskInput.get("jsonPathExtracted") instanceof List); - assertNotNull(taskInput.get("secondName")); - assertTrue(taskInput.get("secondName") instanceof String); - assertEquals("The Doors", taskInput.get("secondName")); - assertEquals("The Band is: The Doors-\ti-123abcdef990", taskInput.get("concatenatedName")); - - assertEquals("request id 001", taskInput.get("workflowInputParam")); - assertEquals("http://location", taskInput.get("taskOutputParam")); - assertNull(taskInput.get("taskOutputParam3")); - assertNotNull(taskInput.get("partial")); - assertEquals("http://location/something?host=i-123abcdef990", taskInput.get("partial")); - } - - @SuppressWarnings("unchecked") - @Test - public void testGetTaskInput() { - Map ip = new HashMap<>(); - ip.put("workflowInputParam", "${workflow.input.requestId}"); - ip.put("taskOutputParam", "${task2.output.location}"); - List> json = new LinkedList<>(); - Map m1 = new HashMap<>(); - m1.put("name", "person name"); - m1.put("city", "New York"); - m1.put("phone", 2120001234); - m1.put("status", "${task2.output.isPersonActive}"); - - Map m2 = new HashMap<>(); - m2.put("employer", "City Of New York"); - m2.put("color", "purple"); - m2.put("requestId", "${workflow.input.requestId}"); - - json.add(m1); - json.add(m2); - ip.put("complexJson", json); - - WorkflowDef def = new WorkflowDef(); - def.setName("testGetTaskInput"); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.getInput().put("requestId", "request id 001"); - TaskModel task = new TaskModel(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.getOutputData().put("isPersonActive", true); - workflow.getTasks().add(task); - Map taskInput = parametersUtils.getTaskInput(ip, workflow, null, null); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("workflowInputParam")); - assertTrue(taskInput.containsKey("taskOutputParam")); - assertEquals("request id 001", taskInput.get("workflowInputParam")); - assertEquals("http://location", taskInput.get("taskOutputParam")); - assertNotNull(taskInput.get("complexJson")); - assertTrue(taskInput.get("complexJson") instanceof List); - - List> resolvedInput = - (List>) taskInput.get("complexJson"); - assertEquals(2, resolvedInput.size()); - } - - @Test - public void testGetTaskInputV1() { - Map ip = new HashMap<>(); - ip.put("workflowInputParam", "workflow.input.requestId"); - ip.put("taskOutputParam", "task2.output.location"); - - WorkflowDef def = new WorkflowDef(); - def.setSchemaVersion(1); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - - workflow.getInput().put("requestId", "request id 001"); - TaskModel task = new TaskModel(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.getOutputData().put("isPersonActive", true); - workflow.getTasks().add(task); - Map taskInput = parametersUtils.getTaskInput(ip, workflow, null, null); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("workflowInputParam")); - assertTrue(taskInput.containsKey("taskOutputParam")); - assertEquals("request id 001", taskInput.get("workflowInputParam")); - assertEquals("http://location", taskInput.get("taskOutputParam")); - } - - @Test - public void testGetTaskInputV2WithInputTemplate() { - TaskDef def = new TaskDef(); - Map inputTemplate = new HashMap<>(); - inputTemplate.put("url", "https://some_url:7004"); - inputTemplate.put("default_url", "https://default_url:7004"); - inputTemplate.put("someKey", "someValue"); - - def.getInputTemplate().putAll(inputTemplate); - - Map workflowInput = new HashMap<>(); - workflowInput.put("some_new_url", "https://some_new_url:7004"); - workflowInput.put("workflow_input_url", "https://workflow_input_url:7004"); - workflowInput.put("some_other_key", "some_other_value"); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testGetTaskInputV2WithInputTemplate"); - workflowDef.setVersion(1); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setInput(workflowInput); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.getInputParameters().put("url", "${workflow.input.some_new_url}"); - workflowTask - .getInputParameters() - .put("workflow_input_url", "${workflow.input.workflow_input_url}"); - workflowTask.getInputParameters().put("someKey", "${workflow.input.someKey}"); - workflowTask.getInputParameters().put("someOtherKey", "${workflow.input.some_other_key}"); - workflowTask - .getInputParameters() - .put("someNowhereToBeFoundKey", "${workflow.input.some_ne_key}"); - - Map taskInput = - parametersUtils.getTaskInputV2( - workflowTask.getInputParameters(), workflow, null, def); - assertTrue(taskInput.containsKey("url")); - assertTrue(taskInput.containsKey("default_url")); - assertEquals(taskInput.get("url"), "https://some_new_url:7004"); - assertEquals(taskInput.get("default_url"), "https://default_url:7004"); - assertEquals(taskInput.get("workflow_input_url"), "https://workflow_input_url:7004"); - assertEquals("some_other_value", taskInput.get("someOtherKey")); - assertEquals("someValue", taskInput.get("someKey")); - assertNull(taskInput.get("someNowhereToBeFoundKey")); - } - - @Test - public void testGetNextTask() { - - WorkflowDef def = createNestedWorkflow(); - WorkflowTask firstTask = def.getTasks().get(0); - assertNotNull(firstTask); - assertEquals("fork1", firstTask.getTaskReferenceName()); - WorkflowTask nextAfterFirst = def.getNextTask(firstTask.getTaskReferenceName()); - assertNotNull(nextAfterFirst); - assertEquals("join1", nextAfterFirst.getTaskReferenceName()); - - WorkflowTask fork2 = def.getTaskByRefName("fork2"); - assertNotNull(fork2); - assertEquals("fork2", fork2.getTaskReferenceName()); - - WorkflowTask taskAfterFork2 = def.getNextTask("fork2"); - assertNotNull(taskAfterFork2); - assertEquals("join2", taskAfterFork2.getTaskReferenceName()); - - WorkflowTask t2 = def.getTaskByRefName("t2"); - assertNotNull(t2); - assertEquals("t2", t2.getTaskReferenceName()); - - WorkflowTask taskAfterT2 = def.getNextTask("t2"); - assertNotNull(taskAfterT2); - assertEquals("t4", taskAfterT2.getTaskReferenceName()); - - WorkflowTask taskAfterT3 = def.getNextTask("t3"); - assertNotNull(taskAfterT3); - assertEquals(DECISION.name(), taskAfterT3.getType()); - assertEquals("d1", taskAfterT3.getTaskReferenceName()); - - WorkflowTask taskAfterT4 = def.getNextTask("t4"); - assertNotNull(taskAfterT4); - assertEquals("join2", taskAfterT4.getTaskReferenceName()); - - WorkflowTask taskAfterT6 = def.getNextTask("t6"); - assertNotNull(taskAfterT6); - assertEquals("t9", taskAfterT6.getTaskReferenceName()); - - WorkflowTask taskAfterJoin2 = def.getNextTask("join2"); - assertNotNull(taskAfterJoin2); - assertEquals("join1", taskAfterJoin2.getTaskReferenceName()); - - WorkflowTask taskAfterJoin1 = def.getNextTask("join1"); - assertNotNull(taskAfterJoin1); - assertEquals("t5", taskAfterJoin1.getTaskReferenceName()); - - WorkflowTask taskAfterSubWF = def.getNextTask("sw1"); - assertNotNull(taskAfterSubWF); - assertEquals("join1", taskAfterSubWF.getTaskReferenceName()); - - WorkflowTask taskAfterT9 = def.getNextTask("t9"); - assertNotNull(taskAfterT9); - assertEquals("join2", taskAfterT9.getTaskReferenceName()); - } - - @Test - public void testCaseStatement() { - - WorkflowDef def = createConditionalWF(); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCreateTime(0L); - workflow.setWorkflowId("a"); - workflow.setCorrelationId("b"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - DeciderOutcome outcome = deciderService.decide(workflow); - List scheduledTasks = outcome.tasksToBeScheduled; - assertNotNull(scheduledTasks); - assertEquals(2, scheduledTasks.size()); - assertEquals(TaskModel.Status.IN_PROGRESS, scheduledTasks.get(0).getStatus()); - assertEquals(TaskModel.Status.SCHEDULED, scheduledTasks.get(1).getStatus()); - } - - @Test - public void testGetTaskByRef() { - WorkflowModel workflow = new WorkflowModel(); - TaskModel t1 = new TaskModel(); - t1.setReferenceTaskName("ref"); - t1.setSeq(0); - t1.setStatus(TaskModel.Status.TIMED_OUT); - - TaskModel t2 = new TaskModel(); - t2.setReferenceTaskName("ref"); - t2.setSeq(1); - t2.setStatus(TaskModel.Status.FAILED); - - TaskModel t3 = new TaskModel(); - t3.setReferenceTaskName("ref"); - t3.setSeq(2); - t3.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().add(t1); - workflow.getTasks().add(t2); - workflow.getTasks().add(t3); - - TaskModel task = workflow.getTaskByRefName("ref"); - assertNotNull(task); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(t3.getSeq(), task.getSeq()); - } - - @Test - public void testTaskTimeout() { - Counter counter = - registry.counter("task_timeout", "class", "WorkflowMonitor", "taskType", "test"); - long counterCount = counter.count(); - - TaskDef taskType = new TaskDef(); - taskType.setName("test"); - taskType.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskType.setTimeoutSeconds(1); - - TaskModel task = new TaskModel(); - task.setTaskType(taskType.getName()); - task.setStartTime(System.currentTimeMillis() - 2_000); // 2 seconds ago! - task.setStatus(TaskModel.Status.IN_PROGRESS); - deciderService.checkTaskTimeout(taskType, task); - - // Task should be marked as timed out - assertEquals(TaskModel.Status.TIMED_OUT, task.getStatus()); - assertNotNull(task.getReasonForIncompletion()); - assertEquals(++counterCount, counter.count()); - - taskType.setTimeoutPolicy(TimeoutPolicy.ALERT_ONLY); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setReasonForIncompletion(null); - deciderService.checkTaskTimeout(taskType, task); - - // Nothing will happen - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals(++counterCount, counter.count()); - - boolean exception = false; - taskType.setTimeoutPolicy(TimeoutPolicy.TIME_OUT_WF); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setReasonForIncompletion(null); - - try { - deciderService.checkTaskTimeout(taskType, task); - } catch (TerminateWorkflowException tw) { - exception = true; - } - assertTrue(exception); - assertEquals(TaskModel.Status.TIMED_OUT, task.getStatus()); - assertNotNull(task.getReasonForIncompletion()); - assertEquals(++counterCount, counter.count()); - - taskType.setTimeoutPolicy(TimeoutPolicy.TIME_OUT_WF); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setReasonForIncompletion(null); - deciderService.checkTaskTimeout(null, task); // this will be a no-op - - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals(counterCount, counter.count()); - } - - @Test - public void testCheckTaskPollTimeout() { - Counter counter = - registry.counter("task_timeout", "class", "WorkflowMonitor", "taskType", "test"); - long counterCount = counter.count(); - - TaskDef taskType = new TaskDef(); - taskType.setName("test"); - taskType.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskType.setPollTimeoutSeconds(1); - - TaskModel task = new TaskModel(); - task.setTaskType(taskType.getName()); - task.setScheduledTime(System.currentTimeMillis() - 2_000); - task.setStatus(TaskModel.Status.SCHEDULED); - deciderService.checkTaskPollTimeout(taskType, task); - - assertEquals(++counterCount, counter.count()); - assertEquals(TaskModel.Status.TIMED_OUT, task.getStatus()); - assertNotNull(task.getReasonForIncompletion()); - - task.setScheduledTime(System.currentTimeMillis()); - task.setReasonForIncompletion(null); - task.setStatus(TaskModel.Status.SCHEDULED); - deciderService.checkTaskPollTimeout(taskType, task); - - assertEquals(counterCount, counter.count()); - assertEquals(TaskModel.Status.SCHEDULED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - } - - @SuppressWarnings("unchecked") - @Test - public void testConcurrentTaskInputCalc() throws InterruptedException { - TaskDef def = new TaskDef(); - - Map inputMap = new HashMap<>(); - inputMap.put("path", "${workflow.input.inputLocation}"); - inputMap.put("type", "${workflow.input.sourceType}"); - inputMap.put("channelMapping", "${workflow.input.channelMapping}"); - - List> input = new LinkedList<>(); - input.add(inputMap); - - Map body = new HashMap<>(); - body.put("input", input); - - def.getInputTemplate().putAll(body); - - ExecutorService executorService = Executors.newFixedThreadPool(10); - final int[] result = new int[10]; - CountDownLatch latch = new CountDownLatch(10); - - for (int i = 0; i < 10; i++) { - final int x = i; - executorService.submit( - () -> { - try { - Map workflowInput = new HashMap<>(); - workflowInput.put("outputLocation", "baggins://outputlocation/" + x); - workflowInput.put("inputLocation", "baggins://inputlocation/" + x); - workflowInput.put("sourceType", "MuxedSource"); - workflowInput.put("channelMapping", x); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testConcurrentTaskInputCalc"); - workflowDef.setVersion(1); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setInput(workflowInput); - - Map taskInput = - parametersUtils.getTaskInputV2( - new HashMap<>(), workflow, null, def); - - Object reqInputObj = taskInput.get("input"); - assertNotNull(reqInputObj); - assertTrue(reqInputObj instanceof List); - List> reqInput = - (List>) reqInputObj; - - Object cmObj = reqInput.get(0).get("channelMapping"); - assertNotNull(cmObj); - if (!(cmObj instanceof Number)) { - result[x] = -1; - } else { - Number channelMapping = (Number) cmObj; - result[x] = channelMapping.intValue(); - } - - latch.countDown(); - } catch (Exception e) { - e.printStackTrace(); - } - }); - } - latch.await(1, TimeUnit.MINUTES); - if (latch.getCount() > 0) { - fail( - "Executions did not complete in a minute. Something wrong with the build server?"); - } - executorService.shutdownNow(); - for (int i = 0; i < result.length; i++) { - assertEquals(i, result[i]); - } - } - - @SuppressWarnings("unchecked") - @Test - public void testTaskRetry() { - WorkflowModel workflow = createDefaultWorkflow(); - - workflow.getWorkflowDefinition().setSchemaVersion(2); - - Map inputParams = new HashMap<>(); - inputParams.put("workflowInputParam", "${workflow.input.requestId}"); - inputParams.put("taskOutputParam", "${task2.output.location}"); - inputParams.put("constParam", "Some String value"); - inputParams.put("nullValue", null); - inputParams.put("task2Status", "${task2.status}"); - inputParams.put("null", null); - inputParams.put("task_id", "${CPEWF_TASK_ID}"); - - Map env = new HashMap<>(); - env.put("env_task_id", "${CPEWF_TASK_ID}"); - inputParams.put("env", env); - - Map taskInput = - parametersUtils.getTaskInput(inputParams, workflow, null, "t1"); - TaskModel task = new TaskModel(); - task.getInputData().putAll(taskInput); - task.setStatus(TaskModel.Status.FAILED); - task.setTaskId("t1"); - - TaskDef taskDef = new TaskDef(); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.getInputParameters().put("task_id", "${CPEWF_TASK_ID}"); - workflowTask.getInputParameters().put("env", env); - - Optional task2 = deciderService.retry(taskDef, workflowTask, task, workflow); - assertEquals("t1", task.getInputData().get("task_id")); - assertEquals( - "t1", ((Map) task.getInputData().get("env")).get("env_task_id")); - - assertNotSame(task.getTaskId(), task2.get().getTaskId()); - assertEquals(task2.get().getTaskId(), task2.get().getInputData().get("task_id")); - assertEquals( - task2.get().getTaskId(), - ((Map) task2.get().getInputData().get("env")).get("env_task_id")); - - TaskModel task3 = new TaskModel(); - task3.getInputData().putAll(taskInput); - task3.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR); - task3.setTaskId("t1"); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - exception.expect(TerminateWorkflowException.class); - deciderService.retry(taskDef, workflowTask, task3, workflow); - } - - @SuppressWarnings("unchecked") - @Test - public void testWorkflowTaskRetry() { - WorkflowModel workflow = createDefaultWorkflow(); - - workflow.getWorkflowDefinition().setSchemaVersion(2); - - Map inputParams = new HashMap<>(); - inputParams.put("workflowInputParam", "${workflow.input.requestId}"); - inputParams.put("taskOutputParam", "${task2.output.location}"); - inputParams.put("constParam", "Some String value"); - inputParams.put("nullValue", null); - inputParams.put("task2Status", "${task2.status}"); - inputParams.put("null", null); - inputParams.put("task_id", "${CPEWF_TASK_ID}"); - - Map env = new HashMap<>(); - env.put("env_task_id", "${CPEWF_TASK_ID}"); - inputParams.put("env", env); - - Map taskInput = - parametersUtils.getTaskInput(inputParams, workflow, null, "t1"); - - // Create a first failed task - TaskModel task = new TaskModel(); - task.getInputData().putAll(taskInput); - task.setStatus(TaskModel.Status.FAILED); - task.setTaskId("t1"); - - TaskDef taskDef = new TaskDef(); - assertEquals(3, taskDef.getRetryCount()); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.getInputParameters().put("task_id", "${CPEWF_TASK_ID}"); - workflowTask.getInputParameters().put("env", env); - workflowTask.setRetryCount(1); - - // Retry the failed task and assert that a new one has been created - Optional task2 = deciderService.retry(taskDef, workflowTask, task, workflow); - assertEquals("t1", task.getInputData().get("task_id")); - assertEquals( - "t1", ((Map) task.getInputData().get("env")).get("env_task_id")); - - assertNotSame(task.getTaskId(), task2.get().getTaskId()); - assertEquals(task2.get().getTaskId(), task2.get().getInputData().get("task_id")); - assertEquals( - task2.get().getTaskId(), - ((Map) task2.get().getInputData().get("env")).get("env_task_id")); - - // Set the retried task to FAILED, retry it again and assert that the workflow failed - task2.get().setStatus(TaskModel.Status.FAILED); - exception.expect(TerminateWorkflowException.class); - final Optional task3 = - deciderService.retry(taskDef, workflowTask, task2.get(), workflow); - - assertFalse(task3.isPresent()); - assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus()); - } - - @Test - public void testLinearBackoff() { - WorkflowModel workflow = createDefaultWorkflow(); - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.FAILED); - task.setTaskId("t1"); - - TaskDef taskDef = new TaskDef(); - taskDef.setRetryDelaySeconds(60); - taskDef.setRetryLogic(TaskDef.RetryLogic.LINEAR_BACKOFF); - taskDef.setBackoffScaleFactor(2); - WorkflowTask workflowTask = new WorkflowTask(); - - Optional task2 = deciderService.retry(taskDef, workflowTask, task, workflow); - assertEquals(120, task2.get().getCallbackAfterSeconds()); // 60*2*1 - - Optional task3 = - deciderService.retry(taskDef, workflowTask, task2.get(), workflow); - assertEquals(240, task3.get().getCallbackAfterSeconds()); // 60*2*2 - - Optional task4 = - deciderService.retry(taskDef, workflowTask, task3.get(), workflow); - // // 60*2*3 - assertEquals(360, task4.get().getCallbackAfterSeconds()); // 60*2*3 - - taskDef.setRetryCount(Integer.MAX_VALUE); - task4.get().setRetryCount(Integer.MAX_VALUE - 100); - Optional task5 = - deciderService.retry(taskDef, workflowTask, task4.get(), workflow); - assertEquals(Integer.MAX_VALUE, task5.get().getCallbackAfterSeconds()); - } - - @Test - public void testExponentialBackoff() { - WorkflowModel workflow = createDefaultWorkflow(); - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.FAILED); - task.setTaskId("t1"); - - TaskDef taskDef = new TaskDef(); - taskDef.setRetryDelaySeconds(60); - taskDef.setRetryLogic(TaskDef.RetryLogic.EXPONENTIAL_BACKOFF); - WorkflowTask workflowTask = new WorkflowTask(); - - Optional task2 = deciderService.retry(taskDef, workflowTask, task, workflow); - assertEquals(60, task2.get().getCallbackAfterSeconds()); - - Optional task3 = - deciderService.retry(taskDef, workflowTask, task2.get(), workflow); - assertEquals(120, task3.get().getCallbackAfterSeconds()); - - Optional task4 = - deciderService.retry(taskDef, workflowTask, task3.get(), workflow); - assertEquals(240, task4.get().getCallbackAfterSeconds()); - - taskDef.setRetryCount(Integer.MAX_VALUE); - task4.get().setRetryCount(Integer.MAX_VALUE - 100); - Optional task5 = - deciderService.retry(taskDef, workflowTask, task4.get(), workflow); - assertEquals(Integer.MAX_VALUE, task5.get().getCallbackAfterSeconds()); - } - - @Test - public void testFork() throws IOException { - InputStream stream = TestDeciderService.class.getResourceAsStream("/test.json"); - WorkflowModel workflow = objectMapper.readValue(stream, WorkflowModel.class); - - DeciderOutcome outcome = deciderService.decide(workflow); - assertFalse(outcome.isComplete); - assertEquals(5, outcome.tasksToBeScheduled.size()); - assertEquals(1, outcome.tasksToBeUpdated.size()); - } - - @Test - public void testDecideSuccessfulWorkflow() { - WorkflowDef workflowDef = createLinearWorkflow(); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - TaskModel task1 = new TaskModel(); - task1.setTaskType("junit_task_l1"); - task1.setReferenceTaskName("s1"); - task1.setSeq(1); - task1.setRetried(false); - task1.setExecuted(false); - task1.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().add(task1); - - DeciderOutcome deciderOutcome = deciderService.decide(workflow); - assertNotNull(deciderOutcome); - - assertFalse(workflow.getTaskByRefName("s1").isRetried()); - assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); - assertEquals("s1", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); - assertEquals(1, deciderOutcome.tasksToBeScheduled.size()); - assertEquals("s2", deciderOutcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertFalse(deciderOutcome.isComplete); - - TaskModel task2 = new TaskModel(); - task2.setTaskType("junit_task_l2"); - task2.setReferenceTaskName("s2"); - task2.setSeq(2); - task2.setRetried(false); - task2.setExecuted(false); - task2.setStatus(TaskModel.Status.COMPLETED); - workflow.getTasks().add(task2); - - deciderOutcome = deciderService.decide(workflow); - assertNotNull(deciderOutcome); - assertTrue(workflow.getTaskByRefName("s2").isExecuted()); - assertFalse(workflow.getTaskByRefName("s2").isRetried()); - assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); - assertEquals("s2", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); - assertEquals(0, deciderOutcome.tasksToBeScheduled.size()); - assertTrue(deciderOutcome.isComplete); - } - - @Test - public void testDecideWithLoopTask() { - WorkflowDef workflowDef = createLinearWorkflow(); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - TaskModel task1 = new TaskModel(); - task1.setTaskType("junit_task_l1"); - task1.setReferenceTaskName("s1"); - task1.setSeq(1); - task1.setIteration(1); - task1.setRetried(false); - task1.setExecuted(false); - task1.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().add(task1); - - DeciderOutcome deciderOutcome = deciderService.decide(workflow); - assertNotNull(deciderOutcome); - - assertFalse(workflow.getTaskByRefName("s1").isRetried()); - assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); - assertEquals("s1", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); - assertEquals(1, deciderOutcome.tasksToBeScheduled.size()); - assertEquals("s2__1", deciderOutcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertFalse(deciderOutcome.isComplete); - } - - @Test - public void testDecideFailedTask() { - WorkflowDef workflowDef = createLinearWorkflow(); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - TaskModel task = new TaskModel(); - task.setTaskType("junit_task_l1"); - task.setReferenceTaskName("s1"); - task.setSeq(1); - task.setRetried(false); - task.setExecuted(false); - task.setStatus(TaskModel.Status.FAILED); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("s1"); - workflowTask.setName("junit_task_l1"); - workflowTask.setTaskDefinition(new TaskDef("junit_task_l1")); - task.setWorkflowTask(workflowTask); - - workflow.getTasks().add(task); - - DeciderOutcome deciderOutcome = deciderService.decide(workflow); - assertNotNull(deciderOutcome); - assertFalse(workflow.getTaskByRefName("s1").isExecuted()); - assertTrue(workflow.getTaskByRefName("s1").isRetried()); - assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); - assertEquals("s1", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); - assertEquals(1, deciderOutcome.tasksToBeScheduled.size()); - assertEquals("s1", deciderOutcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertFalse(deciderOutcome.isComplete); - } - - @Test - public void testGetTasksToBeScheduled() { - WorkflowDef workflowDef = createLinearWorkflow(); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("s1"); - workflowTask1.setTaskReferenceName("s1"); - workflowTask1.setType(SIMPLE.name()); - workflowTask1.setTaskDefinition(new TaskDef("s1")); - - List tasksToBeScheduled = - deciderService.getTasksToBeScheduled(workflow, workflowTask1, 0, null); - assertNotNull(tasksToBeScheduled); - assertEquals(1, tasksToBeScheduled.size()); - assertEquals("s1", tasksToBeScheduled.get(0).getReferenceTaskName()); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("s2"); - workflowTask2.setTaskReferenceName("s2"); - workflowTask2.setType(SIMPLE.name()); - workflowTask2.setTaskDefinition(new TaskDef("s2")); - tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask2, 0, null); - assertNotNull(tasksToBeScheduled); - assertEquals(1, tasksToBeScheduled.size()); - assertEquals("s2", tasksToBeScheduled.get(0).getReferenceTaskName()); - } - - @Test - public void testIsResponseTimedOut() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test_rt"); - taskDef.setResponseTimeoutSeconds(10); - - TaskModel task = new TaskModel(); - task.setTaskDefName("test_rt"); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setTaskId("aa"); - task.setTaskType(TaskType.TASK_TYPE_SIMPLE); - task.setUpdateTime(System.currentTimeMillis() - TimeUnit.SECONDS.toMillis(11)); - - assertTrue(deciderService.isResponseTimedOut(taskDef, task)); - - // verify that sub workflow tasks are not response timed out - task.setTaskType(TaskType.TASK_TYPE_SUB_WORKFLOW); - assertFalse(deciderService.isResponseTimedOut(taskDef, task)); - - task.setTaskType("asyncCompleteSystemTask"); - assertFalse(deciderService.isResponseTimedOut(taskDef, task)); - } - - @Test - public void testFilterNextLoopOverTasks() { - - WorkflowModel workflow = new WorkflowModel(); - - TaskModel task1 = new TaskModel(); - task1.setReferenceTaskName("task1"); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setTaskId("task1"); - task1.setIteration(1); - - TaskModel task2 = new TaskModel(); - task2.setReferenceTaskName("task2"); - task2.setStatus(TaskModel.Status.SCHEDULED); - task2.setTaskId("task2"); - - TaskModel task3 = new TaskModel(); - task3.setReferenceTaskName("task3__1"); - task3.setStatus(TaskModel.Status.IN_PROGRESS); - task3.setTaskId("task3__1"); - - TaskModel task4 = new TaskModel(); - task4.setReferenceTaskName("task4"); - task4.setStatus(TaskModel.Status.SCHEDULED); - task4.setTaskId("task4"); - - TaskModel task5 = new TaskModel(); - task5.setReferenceTaskName("task5"); - task5.setStatus(TaskModel.Status.COMPLETED); - task5.setTaskId("task5"); - - workflow.getTasks().addAll(Arrays.asList(task1, task2, task3, task4, task5)); - List tasks = - deciderService.filterNextLoopOverTasks( - Arrays.asList(task2, task3, task4), task1, workflow); - assertEquals(2, tasks.size()); - tasks.forEach( - task -> { - assertTrue( - task.getReferenceTaskName() - .endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(1))); - assertEquals(1, task.getIteration()); - }); - } - - @Test - public void testUpdateWorkflowOutput() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(new WorkflowDef()); - deciderService.updateWorkflowOutput(workflow, null); - assertNotNull(workflow.getOutput()); - assertTrue(workflow.getOutput().isEmpty()); - TaskModel task = new TaskModel(); - Map taskOutput = new HashMap<>(); - taskOutput.put("taskKey", "taskValue"); - task.setOutputData(taskOutput); - workflow.getTasks().add(task); - WorkflowDef workflowDef = new WorkflowDef(); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(workflowDef)); - deciderService.updateWorkflowOutput(workflow, null); - assertNotNull(workflow.getOutput()); - assertEquals("taskValue", workflow.getOutput().get("taskKey")); - } - - // when workflow definition has outputParameters defined - @SuppressWarnings({"unchecked", "rawtypes"}) - @Test - public void testUpdateWorkflowOutput_WhenDefinitionHasOutputParameters() { - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setOutputParameters( - new HashMap() { - { - put("workflowKey", "workflowValue"); - } - }); - workflow.setWorkflowDefinition(workflowDef); - TaskModel task = new TaskModel(); - task.setReferenceTaskName("test_task"); - task.setOutputData( - new HashMap() { - { - put("taskKey", "taskValue"); - } - }); - workflow.getTasks().add(task); - deciderService.updateWorkflowOutput(workflow, null); - assertNotNull(workflow.getOutput()); - assertEquals("workflowValue", workflow.getOutput().get("workflowKey")); - } - - @Test - public void testUpdateWorkflowOutput_WhenWorkflowHasTerminateTask() { - WorkflowModel workflow = new WorkflowModel(); - TaskModel task = new TaskModel(); - task.setTaskType(TASK_TYPE_TERMINATE); - task.setStatus(TaskModel.Status.COMPLETED); - task.setOutputData( - new HashMap() { - { - put("taskKey", "taskValue"); - } - }); - workflow.getTasks().add(task); - deciderService.updateWorkflowOutput(workflow, null); - assertNotNull(workflow.getOutput()); - assertEquals("taskValue", workflow.getOutput().get("taskKey")); - verify(externalPayloadStorageUtils, never()).downloadPayload(anyString()); - - // when terminate task has output in external payload storage - String externalOutputPayloadStoragePath = "/task/output/terminate.json"; - workflow.getTasks().get(0).setOutputData(null); - workflow.getTasks() - .get(0) - .setExternalOutputPayloadStoragePath(externalOutputPayloadStoragePath); - when(externalPayloadStorageUtils.downloadPayload(externalOutputPayloadStoragePath)) - .thenReturn( - new HashMap() { - { - put("taskKey", "taskValue"); - } - }); - deciderService.updateWorkflowOutput(workflow, null); - assertNotNull(workflow.getOutput()); - assertEquals("taskValue", workflow.getOutput().get("taskKey")); - verify(externalPayloadStorageUtils, times(1)).downloadPayload(anyString()); - } - - @Test - public void testCheckWorkflowTimeout() { - Counter counter = - registry.counter( - "workflow_failure", - "class", - "WorkflowMonitor", - "workflowName", - "test", - "status", - "TIMED_OUT", - "ownerApp", - "junit"); - long counterCount = counter.count(); - assertEquals(0, counter.count()); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - WorkflowModel workflow = new WorkflowModel(); - workflow.setOwnerApp("junit"); - workflow.setCreateTime(System.currentTimeMillis() - 10_000); - workflow.setWorkflowId("workflow_id"); - - // no-op - workflow.setWorkflowDefinition(null); - deciderService.checkWorkflowTimeout(workflow); - - // no-op - workflow.setWorkflowDefinition(workflowDef); - deciderService.checkWorkflowTimeout(workflow); - - // alert - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.ALERT_ONLY); - workflowDef.setTimeoutSeconds(2); - workflow.setWorkflowDefinition(workflowDef); - deciderService.checkWorkflowTimeout(workflow); - assertEquals(++counterCount, counter.count()); - - // time out - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflow.setWorkflowDefinition(workflowDef); - try { - deciderService.checkWorkflowTimeout(workflow); - } catch (TerminateWorkflowException twe) { - assertTrue(twe.getMessage().contains("Workflow timed out")); - } - - // for a retried workflow - workflow.setLastRetriedTime(System.currentTimeMillis() - 5_000); - try { - deciderService.checkWorkflowTimeout(workflow); - } catch (TerminateWorkflowException twe) { - assertTrue(twe.getMessage().contains("Workflow timed out")); - } - } - - @Test - public void testCheckForWorkflowCompletion() { - WorkflowDef conditionalWorkflowDef = createConditionalWF(); - WorkflowTask terminateWT = new WorkflowTask(); - terminateWT.setType(TaskType.TERMINATE.name()); - terminateWT.setTaskReferenceName("terminate"); - terminateWT.setName("terminate"); - terminateWT.getInputParameters().put("terminationStatus", "COMPLETED"); - conditionalWorkflowDef.getTasks().add(terminateWT); - - // when workflow has no tasks - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(conditionalWorkflowDef); - - // then workflow completion check returns false - assertFalse(deciderService.checkForWorkflowCompletion(workflow)); - - // when only part of the tasks are completed - TaskModel decTask = new TaskModel(); - decTask.setTaskType(DECISION.name()); - decTask.setReferenceTaskName("conditional2"); - decTask.setStatus(TaskModel.Status.COMPLETED); - - TaskModel task1 = new TaskModel(); - decTask.setTaskType(SIMPLE.name()); - task1.setReferenceTaskName("t1"); - task1.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().addAll(Arrays.asList(decTask, task1)); - - // then workflow completion check returns false - assertFalse(deciderService.checkForWorkflowCompletion(workflow)); - - // when the terminate task is COMPLETED - TaskModel task2 = new TaskModel(); - decTask.setTaskType(SIMPLE.name()); - task2.setReferenceTaskName("t2"); - task2.setStatus(TaskModel.Status.SCHEDULED); - - TaskModel terminateTask = new TaskModel(); - decTask.setTaskType(TaskType.TERMINATE.name()); - terminateTask.setReferenceTaskName("terminate"); - terminateTask.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().addAll(Arrays.asList(task2, terminateTask)); - - // then the workflow completion check returns true - assertTrue(deciderService.checkForWorkflowCompletion(workflow)); - } - - private WorkflowDef createConditionalWF() { - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - Map inputParams1 = new HashMap<>(); - inputParams1.put("p1", "workflow.input.param1"); - inputParams1.put("p2", "workflow.input.param2"); - workflowTask1.setInputParameters(inputParams1); - workflowTask1.setTaskReferenceName("t1"); - workflowTask1.setTaskDefinition(new TaskDef("junit_task_1")); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - Map inputParams2 = new HashMap<>(); - inputParams2.put("tp1", "workflow.input.param1"); - workflowTask2.setInputParameters(inputParams2); - workflowTask2.setTaskReferenceName("t2"); - workflowTask2.setTaskDefinition(new TaskDef("junit_task_2")); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - Map inputParams3 = new HashMap<>(); - inputParams2.put("tp3", "workflow.input.param2"); - workflowTask3.setInputParameters(inputParams3); - workflowTask3.setTaskReferenceName("t3"); - workflowTask3.setTaskDefinition(new TaskDef("junit_task_3")); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("Conditional Workflow"); - workflowDef.setDescription("Conditional Workflow"); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask decisionTask2 = new WorkflowTask(); - decisionTask2.setType(DECISION.name()); - decisionTask2.setCaseValueParam("case"); - decisionTask2.setName("conditional2"); - decisionTask2.setTaskReferenceName("conditional2"); - Map> dc = new HashMap<>(); - dc.put("one", Arrays.asList(workflowTask1, workflowTask3)); - dc.put("two", Collections.singletonList(workflowTask2)); - decisionTask2.setDecisionCases(dc); - decisionTask2.getInputParameters().put("case", "workflow.input.param2"); - - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(DECISION.name()); - decisionTask.setCaseValueParam("case"); - decisionTask.setName("conditional"); - decisionTask.setTaskReferenceName("conditional"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("nested", Collections.singletonList(decisionTask2)); - decisionCases.put("three", Collections.singletonList(workflowTask3)); - decisionTask.setDecisionCases(decisionCases); - decisionTask.getInputParameters().put("case", "workflow.input.param1"); - decisionTask.getDefaultCase().add(workflowTask2); - workflowDef.getTasks().add(decisionTask); - - WorkflowTask notifyTask = new WorkflowTask(); - notifyTask.setName("junit_task_4"); - notifyTask.setTaskReferenceName("junit_task_4"); - notifyTask.setTaskDefinition(new TaskDef("junit_task_4")); - - WorkflowTask finalDecisionTask = new WorkflowTask(); - finalDecisionTask.setName("finalcondition"); - finalDecisionTask.setTaskReferenceName("tf"); - finalDecisionTask.setType(DECISION.name()); - finalDecisionTask.setCaseValueParam("finalCase"); - Map fi = new HashMap<>(); - fi.put("finalCase", "workflow.input.finalCase"); - finalDecisionTask.setInputParameters(fi); - finalDecisionTask.getDecisionCases().put("notify", Collections.singletonList(notifyTask)); - - workflowDef.getTasks().add(finalDecisionTask); - return workflowDef; - } - - private WorkflowDef createLinearWorkflow() { - - Map inputParams = new HashMap<>(); - inputParams.put("p1", "workflow.input.param1"); - inputParams.put("p2", "workflow.input.param2"); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_l1"); - workflowTask1.setInputParameters(inputParams); - workflowTask1.setTaskReferenceName("s1"); - workflowTask1.setTaskDefinition(new TaskDef("junit_task_l1")); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_l2"); - workflowTask2.setInputParameters(inputParams); - workflowTask2.setTaskReferenceName("s2"); - workflowTask2.setTaskDefinition(new TaskDef("junit_task_l2")); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - workflowDef.setName("Linear Workflow"); - workflowDef.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); - - return workflowDef; - } - - private WorkflowModel createDefaultWorkflow() { - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("TestDeciderService"); - workflowDef.setVersion(1); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - workflow.getInput().put("requestId", "request id 001"); - workflow.getInput().put("hasAwards", true); - workflow.getInput().put("channelMapping", 5); - Map name = new HashMap<>(); - name.put("name", "The Who"); - name.put("year", 1970); - Map name2 = new HashMap<>(); - name2.put("name", "The Doors"); - name2.put("year", 1975); - - List names = new LinkedList<>(); - names.add(name); - names.add(name2); - - workflow.getOutput().put("name", name); - workflow.getOutput().put("names", names); - workflow.getOutput().put("awards", 200); - - TaskModel task = new TaskModel(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.setStatus(TaskModel.Status.COMPLETED); - - TaskModel task2 = new TaskModel(); - task2.setReferenceTaskName("task3"); - task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc"); - task2.setStatus(TaskModel.Status.SCHEDULED); - - workflow.getTasks().add(task); - workflow.getTasks().add(task2); - - return workflow; - } - - private WorkflowDef createNestedWorkflow() { - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("Nested Workflow"); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - Map inputParams = new HashMap<>(); - inputParams.put("p1", "workflow.input.param1"); - inputParams.put("p2", "workflow.input.param2"); - - List tasks = new ArrayList<>(10); - - for (int i = 0; i < 10; i++) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_" + i); - workflowTask.setInputParameters(inputParams); - workflowTask.setTaskReferenceName("t" + i); - workflowTask.setTaskDefinition(new TaskDef("junit_task_" + i)); - tasks.add(workflowTask); - } - - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(DECISION.name()); - decisionTask.setName("Decision"); - decisionTask.setTaskReferenceName("d1"); - decisionTask.setDefaultCase(Collections.singletonList(tasks.get(8))); - decisionTask.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("a", Arrays.asList(tasks.get(6), tasks.get(9))); - decisionCases.put("b", Collections.singletonList(tasks.get(7))); - decisionTask.setDecisionCases(decisionCases); - - WorkflowDef subWorkflowDef = createLinearWorkflow(); - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(SUB_WORKFLOW.name()); - subWorkflow.setName("sw1"); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(subWorkflowDef.getName()); - subWorkflow.setSubWorkflowParam(subWorkflowParams); - subWorkflow.setTaskReferenceName("sw1"); - - WorkflowTask forkTask2 = new WorkflowTask(); - forkTask2.setType(FORK_JOIN.name()); - forkTask2.setName("second fork"); - forkTask2.setTaskReferenceName("fork2"); - forkTask2.getForkTasks().add(Arrays.asList(tasks.get(2), tasks.get(4))); - forkTask2.getForkTasks().add(Arrays.asList(tasks.get(3), decisionTask)); - - WorkflowTask joinTask2 = new WorkflowTask(); - joinTask2.setName("join2"); - joinTask2.setType(JOIN.name()); - joinTask2.setTaskReferenceName("join2"); - joinTask2.setJoinOn(Arrays.asList("t4", "d1")); - - WorkflowTask forkTask1 = new WorkflowTask(); - forkTask1.setType(FORK_JOIN.name()); - forkTask1.setName("fork1"); - forkTask1.setTaskReferenceName("fork1"); - forkTask1.getForkTasks().add(Collections.singletonList(tasks.get(1))); - forkTask1.getForkTasks().add(Arrays.asList(forkTask2, joinTask2)); - forkTask1.getForkTasks().add(Collections.singletonList(subWorkflow)); - - WorkflowTask joinTask1 = new WorkflowTask(); - joinTask1.setName("join1"); - joinTask1.setType(JOIN.name()); - joinTask1.setTaskReferenceName("join1"); - joinTask1.setJoinOn(Arrays.asList("t1", "fork2")); - - workflowDef.getTasks().add(forkTask1); - workflowDef.getTasks().add(joinTask1); - workflowDef.getTasks().add(tasks.get(5)); - - return workflowDef; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java deleted file mode 100644 index 2556e55d0..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class TestWorkflowDef { - - @Test - public void testContainsType() { - WorkflowDef def = new WorkflowDef(); - def.setName("test_workflow"); - def.setVersion(1); - def.setSchemaVersion(2); - def.getTasks().add(createWorkflowTask("simple_task_1")); - def.getTasks().add(createWorkflowTask("simple_task_2")); - - WorkflowTask task3 = createWorkflowTask("decision_task_1"); - def.getTasks().add(task3); - task3.setType(TaskType.DECISION.name()); - task3.getDecisionCases() - .put( - "Case1", - Arrays.asList( - createWorkflowTask("case_1_task_1"), - createWorkflowTask("case_1_task_2"))); - task3.getDecisionCases() - .put( - "Case2", - Arrays.asList( - createWorkflowTask("case_2_task_1"), - createWorkflowTask("case_2_task_2"))); - task3.getDecisionCases() - .put( - "Case3", - Collections.singletonList( - deciderTask( - "decision_task_2", - toMap("Case31", "case31_task_1", "case_31_task_2"), - Collections.singletonList("case3_def_task")))); - def.getTasks().add(createWorkflowTask("simple_task_3")); - - assertTrue(def.containsType(TaskType.SIMPLE.name())); - assertTrue(def.containsType(TaskType.DECISION.name())); - assertFalse(def.containsType(TaskType.DO_WHILE.name())); - } - - @Test - public void testGetNextTask_Decision() { - WorkflowDef def = new WorkflowDef(); - def.setName("test_workflow"); - def.setVersion(1); - def.setSchemaVersion(2); - def.getTasks().add(createWorkflowTask("simple_task_1")); - def.getTasks().add(createWorkflowTask("simple_task_2")); - - WorkflowTask task3 = createWorkflowTask("decision_task_1"); - def.getTasks().add(task3); - task3.setType(TaskType.DECISION.name()); - task3.getDecisionCases() - .put( - "Case1", - Arrays.asList( - createWorkflowTask("case_1_task_1"), - createWorkflowTask("case_1_task_2"))); - task3.getDecisionCases() - .put( - "Case2", - Arrays.asList( - createWorkflowTask("case_2_task_1"), - createWorkflowTask("case_2_task_2"))); - task3.getDecisionCases() - .put( - "Case3", - Collections.singletonList( - deciderTask( - "decision_task_2", - toMap("Case31", "case31_task_1", "case_31_task_2"), - Collections.singletonList("case3_def_task")))); - def.getTasks().add(createWorkflowTask("simple_task_3")); - - // Assertions - WorkflowTask next = def.getNextTask("simple_task_1"); - assertNotNull(next); - assertEquals("simple_task_2", next.getTaskReferenceName()); - - next = def.getNextTask("simple_task_2"); - assertNotNull(next); - assertEquals(task3.getTaskReferenceName(), next.getTaskReferenceName()); - - next = def.getNextTask("decision_task_1"); - assertNotNull(next); - assertEquals("simple_task_3", next.getTaskReferenceName()); - - next = def.getNextTask("case_1_task_1"); - assertNotNull(next); - assertEquals("case_1_task_2", next.getTaskReferenceName()); - - next = def.getNextTask("case_1_task_2"); - assertNotNull(next); - assertEquals("simple_task_3", next.getTaskReferenceName()); - - next = def.getNextTask("case3_def_task"); - assertNotNull(next); - assertEquals("simple_task_3", next.getTaskReferenceName()); - - next = def.getNextTask("case31_task_1"); - assertNotNull(next); - assertEquals("case_31_task_2", next.getTaskReferenceName()); - } - - @Test - public void testGetNextTask_Conditional() { - String COND_TASK_WF = "COND_TASK_WF"; - List workflowTasks = new ArrayList<>(10); - for (int i = 0; i < 10; i++) { - workflowTasks.add(createWorkflowTask("junit_task_" + i)); - } - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(COND_TASK_WF); - workflowDef.setDescription(COND_TASK_WF); - - WorkflowTask subCaseTask = new WorkflowTask(); - subCaseTask.setType(TaskType.DECISION.name()); - subCaseTask.setCaseValueParam("case2"); - subCaseTask.setName("case2"); - subCaseTask.setTaskReferenceName("case2"); - Map> dcx = new HashMap<>(); - dcx.put("sc1", workflowTasks.subList(4, 5)); - dcx.put("sc2", workflowTasks.subList(5, 7)); - subCaseTask.setDecisionCases(dcx); - - WorkflowTask caseTask = new WorkflowTask(); - caseTask.setType(TaskType.DECISION.name()); - caseTask.setCaseValueParam("case"); - caseTask.setName("case"); - caseTask.setTaskReferenceName("case"); - Map> dc = new HashMap<>(); - dc.put("c1", Arrays.asList(workflowTasks.get(0), subCaseTask, workflowTasks.get(1))); - dc.put("c2", Collections.singletonList(workflowTasks.get(3))); - caseTask.setDecisionCases(dc); - - workflowDef.getTasks().add(caseTask); - workflowDef.getTasks().addAll(workflowTasks.subList(8, 9)); - - WorkflowTask nextTask = workflowDef.getNextTask("case"); - assertEquals("junit_task_8", nextTask.getTaskReferenceName()); - - nextTask = workflowDef.getNextTask("junit_task_8"); - assertNull(nextTask); - - nextTask = workflowDef.getNextTask("junit_task_0"); - assertNotNull(nextTask); - assertEquals("case2", nextTask.getTaskReferenceName()); - - nextTask = workflowDef.getNextTask("case2"); - assertNotNull(nextTask); - assertEquals("junit_task_1", nextTask.getTaskReferenceName()); - } - - private WorkflowTask createWorkflowTask(String name) { - WorkflowTask task = new WorkflowTask(); - task.setName(name); - task.setTaskReferenceName(name); - return task; - } - - private WorkflowTask deciderTask( - String name, Map> decisions, List defaultTasks) { - WorkflowTask task = createWorkflowTask(name); - task.setType(TaskType.DECISION.name()); - decisions.forEach( - (key, value) -> { - List tasks = new LinkedList<>(); - value.forEach(taskName -> tasks.add(createWorkflowTask(taskName))); - task.getDecisionCases().put(key, tasks); - }); - List tasks = new LinkedList<>(); - defaultTasks.forEach(defaultTask -> tasks.add(createWorkflowTask(defaultTask))); - task.setDefaultCase(tasks); - return task; - } - - private Map> toMap(String key, String... values) { - Map> map = new HashMap<>(); - List vals = Arrays.asList(values); - map.put(key, vals); - return map; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java deleted file mode 100644 index 88d67fb58..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ /dev/null @@ -1,2487 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import java.time.Duration; -import java.util.*; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.stubbing.Answer; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.execution.mapper.*; -import com.netflix.conductor.core.execution.tasks.*; -import com.netflix.conductor.core.listener.WorkflowStatusListener; -import com.netflix.conductor.core.metadata.MetadataMapperService; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.service.ExecutionLockService; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.*; -import static com.netflix.conductor.core.exception.ApplicationException.Code.CONFLICT; - -import static java.util.Comparator.comparingInt; -import static java.util.stream.Collectors.groupingBy; -import static java.util.stream.Collectors.maxBy; -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@ContextConfiguration( - classes = { - TestObjectMapperConfiguration.class, - TestWorkflowExecutor.TestConfiguration.class - }) -@RunWith(SpringRunner.class) -public class TestWorkflowExecutor { - - private WorkflowExecutor workflowExecutor; - private ExecutionDAOFacade executionDAOFacade; - private MetadataDAO metadataDAO; - private QueueDAO queueDAO; - private WorkflowStatusListener workflowStatusListener; - private ExecutionLockService executionLockService; - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - - @Configuration - @ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans. - public static class TestConfiguration { - - @Bean(TASK_TYPE_SUB_WORKFLOW) - public SubWorkflow subWorkflow(ObjectMapper objectMapper) { - return new SubWorkflow(objectMapper); - } - - @Bean(TASK_TYPE_LAMBDA) - public Lambda lambda() { - return new Lambda(); - } - - @Bean(TASK_TYPE_WAIT) - public Wait waitBean() { - return new Wait(); - } - - @Bean("HTTP") - public WorkflowSystemTask http() { - return new WorkflowSystemTaskStub("HTTP") { - @Override - public boolean isAsync() { - return true; - } - }; - } - - @Bean("HTTP2") - public WorkflowSystemTask http2() { - return new WorkflowSystemTaskStub("HTTP2"); - } - - @Bean(TASK_TYPE_JSON_JQ_TRANSFORM) - public WorkflowSystemTask jsonBean() { - return new WorkflowSystemTaskStub("JSON_JQ_TRANSFORM") { - @Override - public boolean isAsync() { - return false; - } - - @Override - public void start( - WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - task.setStatus(TaskModel.Status.COMPLETED); - } - }; - } - - @Bean - public SystemTaskRegistry systemTaskRegistry(Set tasks) { - return new SystemTaskRegistry(tasks); - } - } - - @Autowired private ObjectMapper objectMapper; - - @Autowired private SystemTaskRegistry systemTaskRegistry; - - @Autowired private DefaultListableBeanFactory beanFactory; - - @Autowired private Map evaluators; - - @Before - public void init() { - executionDAOFacade = mock(ExecutionDAOFacade.class); - metadataDAO = mock(MetadataDAO.class); - queueDAO = mock(QueueDAO.class); - workflowStatusListener = mock(WorkflowStatusListener.class); - externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); - executionLockService = mock(ExecutionLockService.class); - ParametersUtils parametersUtils = new ParametersUtils(objectMapper); - IDGenerator idGenerator = new IDGenerator(); - Map taskMappers = new HashMap<>(); - taskMappers.put(DECISION, new DecisionTaskMapper()); - taskMappers.put(SWITCH, new SwitchTaskMapper(evaluators)); - taskMappers.put(DYNAMIC, new DynamicTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(FORK_JOIN, new ForkJoinTaskMapper()); - taskMappers.put(JOIN, new JoinTaskMapper()); - taskMappers.put( - FORK_JOIN_DYNAMIC, - new ForkJoinDynamicTaskMapper( - idGenerator, parametersUtils, objectMapper, metadataDAO)); - taskMappers.put(USER_DEFINED, new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(SIMPLE, new SimpleTaskMapper(parametersUtils)); - taskMappers.put(SUB_WORKFLOW, new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(EVENT, new EventTaskMapper(parametersUtils)); - taskMappers.put(WAIT, new WaitTaskMapper(parametersUtils)); - taskMappers.put(HTTP, new HTTPTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(LAMBDA, new LambdaTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put(INLINE, new InlineTaskMapper(parametersUtils, metadataDAO)); - - DeciderService deciderService = - new DeciderService( - idGenerator, - parametersUtils, - metadataDAO, - externalPayloadStorageUtils, - systemTaskRegistry, - taskMappers, - Duration.ofMinutes(60)); - MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO); - - ConductorProperties properties = mock(ConductorProperties.class); - when(properties.getActiveWorkerLastPollTimeout()).thenReturn(Duration.ofSeconds(100)); - when(properties.getTaskExecutionPostponeDuration()).thenReturn(Duration.ofSeconds(60)); - when(properties.getWorkflowOffsetTimeout()).thenReturn(Duration.ofSeconds(30)); - - workflowExecutor = - new WorkflowExecutor( - deciderService, - metadataDAO, - queueDAO, - metadataMapperService, - workflowStatusListener, - executionDAOFacade, - properties, - executionLockService, - systemTaskRegistry, - parametersUtils, - idGenerator); - } - - @Test - public void testScheduleTask() { - IDGenerator idGenerator = new IDGenerator(); - WorkflowSystemTaskStub httpTask = beanFactory.getBean("HTTP", WorkflowSystemTaskStub.class); - WorkflowSystemTaskStub http2Task = - beanFactory.getBean("HTTP2", WorkflowSystemTaskStub.class); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("1"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("1"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - List tasks = new LinkedList<>(); - - WorkflowTask taskToSchedule = new WorkflowTask(); - taskToSchedule.setWorkflowTaskType(TaskType.USER_DEFINED); - taskToSchedule.setType("HTTP"); - - WorkflowTask taskToSchedule2 = new WorkflowTask(); - taskToSchedule2.setWorkflowTaskType(TaskType.USER_DEFINED); - taskToSchedule2.setType("HTTP2"); - - WorkflowTask wait = new WorkflowTask(); - wait.setWorkflowTaskType(TaskType.WAIT); - wait.setType("WAIT"); - wait.setTaskReferenceName("wait"); - - TaskModel task1 = new TaskModel(); - task1.setTaskType(taskToSchedule.getType()); - task1.setTaskDefName(taskToSchedule.getName()); - task1.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); - task1.setWorkflowInstanceId(workflow.getWorkflowId()); - task1.setCorrelationId(workflow.getCorrelationId()); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setInputData(new HashMap<>()); - task1.setStatus(TaskModel.Status.SCHEDULED); - task1.setRetryCount(0); - task1.setCallbackAfterSeconds(taskToSchedule.getStartDelay()); - task1.setWorkflowTask(taskToSchedule); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TASK_TYPE_WAIT); - task2.setTaskDefName(taskToSchedule.getName()); - task2.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); - task2.setWorkflowInstanceId(workflow.getWorkflowId()); - task2.setCorrelationId(workflow.getCorrelationId()); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setInputData(new HashMap<>()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.IN_PROGRESS); - task2.setWorkflowTask(taskToSchedule); - - TaskModel task3 = new TaskModel(); - task3.setTaskType(taskToSchedule2.getType()); - task3.setTaskDefName(taskToSchedule.getName()); - task3.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); - task3.setWorkflowInstanceId(workflow.getWorkflowId()); - task3.setCorrelationId(workflow.getCorrelationId()); - task3.setScheduledTime(System.currentTimeMillis()); - task3.setTaskId(idGenerator.generate()); - task3.setInputData(new HashMap<>()); - task3.setStatus(TaskModel.Status.SCHEDULED); - task3.setRetryCount(0); - task3.setCallbackAfterSeconds(taskToSchedule.getStartDelay()); - task3.setWorkflowTask(taskToSchedule); - - tasks.add(task1); - tasks.add(task2); - tasks.add(task3); - - when(executionDAOFacade.createTasks(tasks)).thenReturn(tasks); - AtomicInteger startedTaskCount = new AtomicInteger(0); - doAnswer( - invocation -> { - startedTaskCount.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTask(any()); - - AtomicInteger queuedTaskCount = new AtomicInteger(0); - final Answer answer = - invocation -> { - String queueName = invocation.getArgument(0, String.class); - queuedTaskCount.incrementAndGet(); - return null; - }; - doAnswer(answer).when(queueDAO).push(any(), any(), anyLong()); - doAnswer(answer).when(queueDAO).push(any(), any(), anyInt(), anyLong()); - - boolean stateChanged = workflowExecutor.scheduleTask(workflow, tasks); - assertEquals(2, startedTaskCount.get()); - assertEquals(1, queuedTaskCount.get()); - assertTrue(stateChanged); - assertFalse(httpTask.isStarted()); - assertTrue(http2Task.isStarted()); - } - - @Test(expected = TerminateWorkflowException.class) - public void testScheduleTaskFailure() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("wid_01"); - - List tasks = new LinkedList<>(); - - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.TASK_TYPE_SIMPLE); - task1.setTaskDefName("task_1"); - task1.setReferenceTaskName("task_1"); - task1.setWorkflowInstanceId(workflow.getWorkflowId()); - task1.setTaskId("tid_01"); - task1.setStatus(TaskModel.Status.SCHEDULED); - task1.setRetryCount(0); - - tasks.add(task1); - - when(executionDAOFacade.createTasks(tasks)).thenThrow(new RuntimeException()); - workflowExecutor.scheduleTask(workflow, tasks); - } - - /** Simulate Queue push failures and assert that scheduleTask doesn't throw an exception. */ - @Test - public void testQueueFailuresDuringScheduleTask() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("wid_01"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("wid"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - List tasks = new LinkedList<>(); - - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.TASK_TYPE_SIMPLE); - task1.setTaskDefName("task_1"); - task1.setReferenceTaskName("task_1"); - task1.setWorkflowInstanceId(workflow.getWorkflowId()); - task1.setTaskId("tid_01"); - task1.setStatus(TaskModel.Status.SCHEDULED); - task1.setRetryCount(0); - - tasks.add(task1); - - when(executionDAOFacade.createTasks(tasks)).thenReturn(tasks); - doThrow(new RuntimeException()) - .when(queueDAO) - .push(anyString(), anyString(), anyInt(), anyLong()); - assertFalse(workflowExecutor.scheduleTask(workflow, tasks)); - } - - @Test - @SuppressWarnings("unchecked") - public void testCompleteWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setWorkflowId("1"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOwnerApp("junit_test"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - workflow.setOutput(Collections.EMPTY_MAP); - - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - AtomicInteger updateTasksCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTasksCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTasks(any()); - - AtomicInteger removeQueueEntryCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - removeQueueEntryCalledCounter.incrementAndGet(); - return null; - }) - .when(queueDAO) - .remove(anyString(), anyString()); - - workflowExecutor.completeWorkflow(workflow); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus()); - assertEquals(1, updateWorkflowCalledCounter.get()); - assertEquals(0, updateTasksCalledCounter.get()); - assertEquals(0, removeQueueEntryCalledCounter.get()); - verify(workflowStatusListener, times(1)) - .onWorkflowCompletedIfEnabled(any(WorkflowModel.class)); - verify(workflowStatusListener, times(0)) - .onWorkflowFinalizedIfEnabled(any(WorkflowModel.class)); - - def.setWorkflowStatusListenerEnabled(true); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflowExecutor.completeWorkflow(workflow); - verify(workflowStatusListener, times(2)) - .onWorkflowCompletedIfEnabled(any(WorkflowModel.class)); - verify(workflowStatusListener, times(0)) - .onWorkflowFinalizedIfEnabled(any(WorkflowModel.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testTerminateWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setWorkflowId("1"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOwnerApp("junit_test"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - workflow.setOutput(Collections.EMPTY_MAP); - - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - AtomicInteger updateTasksCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTasksCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTasks(any()); - - AtomicInteger removeQueueEntryCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - removeQueueEntryCalledCounter.incrementAndGet(); - return null; - }) - .when(queueDAO) - .remove(anyString(), anyString()); - - workflowExecutor.terminateWorkflow("workflowId", "reason"); - assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus()); - assertEquals(1, updateWorkflowCalledCounter.get()); - assertEquals(1, removeQueueEntryCalledCounter.get()); - - verify(workflowStatusListener, times(1)) - .onWorkflowTerminatedIfEnabled(any(WorkflowModel.class)); - verify(workflowStatusListener, times(1)) - .onWorkflowFinalizedIfEnabled(any(WorkflowModel.class)); - - def.setWorkflowStatusListenerEnabled(true); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflowExecutor.completeWorkflow(workflow); - verify(workflowStatusListener, times(1)) - .onWorkflowCompletedIfEnabled(any(WorkflowModel.class)); - verify(workflowStatusListener, times(1)) - .onWorkflowFinalizedIfEnabled(any(WorkflowModel.class)); - } - - @Test - public void testUploadOutputFailuresDuringTerminateWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - def.setWorkflowStatusListenerEnabled(true); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setWorkflowId("1"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOwnerApp("junit_test"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - workflow.setOutput(Collections.EMPTY_MAP); - - List tasks = new LinkedList<>(); - - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(UUID.randomUUID().toString()); - task.setReferenceTaskName("t1"); - task.setWorkflowInstanceId(workflow.getWorkflowId()); - task.setTaskDefName("task1"); - task.setStatus(TaskModel.Status.IN_PROGRESS); - - tasks.add(task); - workflow.setTasks(tasks); - - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - doThrow(new RuntimeException("any exception")) - .when(externalPayloadStorageUtils) - .verifyAndUpload(workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT); - - workflowExecutor.terminateWorkflow(workflow.getWorkflowId(), "reason"); - assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus()); - assertEquals(1, updateWorkflowCalledCounter.get()); - verify(workflowStatusListener, times(1)) - .onWorkflowTerminatedIfEnabled(any(WorkflowModel.class)); - } - - @Test - @SuppressWarnings("unchecked") - public void testQueueExceptionsIgnoredDuringTerminateWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - def.setWorkflowStatusListenerEnabled(true); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setWorkflowId("1"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOwnerApp("junit_test"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - workflow.setOutput(Collections.EMPTY_MAP); - - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - AtomicInteger updateTasksCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTasksCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTasks(any()); - - doThrow(new RuntimeException()).when(queueDAO).remove(anyString(), anyString()); - - workflowExecutor.terminateWorkflow("workflowId", "reason"); - assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus()); - assertEquals(1, updateWorkflowCalledCounter.get()); - verify(workflowStatusListener, times(1)) - .onWorkflowTerminatedIfEnabled(any(WorkflowModel.class)); - } - - @Test - public void testRestartWorkflow() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("test_task"); - workflowTask.setTaskReferenceName("task_ref"); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testDef"); - workflowDef.setVersion(1); - workflowDef.setRestartable(true); - workflowDef.getTasks().add(workflowTask); - - TaskModel task_1 = new TaskModel(); - task_1.setTaskId(UUID.randomUUID().toString()); - task_1.setSeq(1); - task_1.setStatus(TaskModel.Status.FAILED); - task_1.setTaskDefName(workflowTask.getName()); - task_1.setReferenceTaskName(workflowTask.getTaskReferenceName()); - - TaskModel task_2 = new TaskModel(); - task_2.setTaskId(UUID.randomUUID().toString()); - task_2.setSeq(2); - task_2.setStatus(TaskModel.Status.FAILED); - task_2.setTaskDefName(workflowTask.getName()); - task_2.setReferenceTaskName(workflowTask.getTaskReferenceName()); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setWorkflowId("test-workflow-id"); - workflow.getTasks().addAll(Arrays.asList(task_1, task_2)); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setEndTime(500); - workflow.setLastRetriedTime(100); - - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - doNothing().when(executionDAOFacade).removeTask(any()); - when(metadataDAO.getWorkflowDef(workflow.getWorkflowName(), workflow.getWorkflowVersion())) - .thenReturn(Optional.of(workflowDef)); - when(metadataDAO.getTaskDef(workflowTask.getName())).thenReturn(new TaskDef()); - when(executionDAOFacade.updateWorkflow(any())).thenReturn(""); - - workflowExecutor.restart(workflow.getWorkflowId(), false); - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertEquals(0, workflow.getEndTime()); - assertEquals(0, workflow.getLastRetriedTime()); - verify(metadataDAO, never()).getLatestWorkflowDef(any()); - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(WorkflowModel.class); - verify(executionDAOFacade, times(1)).createWorkflow(argumentCaptor.capture()); - assertEquals( - workflow.getWorkflowId(), argumentCaptor.getAllValues().get(0).getWorkflowId()); - assertEquals( - workflow.getWorkflowDefinition(), - argumentCaptor.getAllValues().get(0).getWorkflowDefinition()); - - // add a new version of the workflow definition and restart with latest - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.setEndTime(500); - workflow.setLastRetriedTime(100); - workflowDef = new WorkflowDef(); - workflowDef.setName("testDef"); - workflowDef.setVersion(2); - workflowDef.setRestartable(true); - workflowDef.getTasks().addAll(Collections.singletonList(workflowTask)); - - when(metadataDAO.getLatestWorkflowDef(workflow.getWorkflowName())) - .thenReturn(Optional.of(workflowDef)); - workflowExecutor.restart(workflow.getWorkflowId(), true); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertEquals(0, workflow.getEndTime()); - assertEquals(0, workflow.getLastRetriedTime()); - verify(metadataDAO, times(1)).getLatestWorkflowDef(anyString()); - - argumentCaptor = ArgumentCaptor.forClass(WorkflowModel.class); - verify(executionDAOFacade, times(2)).createWorkflow(argumentCaptor.capture()); - assertEquals( - workflow.getWorkflowId(), argumentCaptor.getAllValues().get(1).getWorkflowId()); - assertEquals(workflowDef, argumentCaptor.getAllValues().get(1).getWorkflowDefinition()); - } - - @Test(expected = ApplicationException.class) - public void testRetryNonTerminalWorkflow() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryNonTerminalWorkflow"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - } - - @Test(expected = ApplicationException.class) - public void testRetryWorkflowNoTasks() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("ApplicationException"); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setTasks(Collections.emptyList()); - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - } - - @Test(expected = ApplicationException.class) - public void testRetryWorkflowNoFailedTasks() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - // add 2 failed task in 2 forks and 1 cancelled in the 3rd fork - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(1); - task_1_1.setRetryCount(0); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setTaskDefName("task1"); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_1_2 = new TaskModel(); - task_1_2.setTaskId(UUID.randomUUID().toString()); - task_1_2.setSeq(2); - task_1_2.setRetryCount(1); - task_1_2.setTaskType(TaskType.SIMPLE.toString()); - task_1_2.setStatus(TaskModel.Status.COMPLETED); - task_1_2.setTaskDefName("task1"); - task_1_2.setReferenceTaskName("task1_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - } - - @Test - public void testRetryWorkflow() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - AtomicInteger updateTasksCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTasksCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTasks(any()); - - AtomicInteger updateTaskCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTaskCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTask(any()); - - // add 2 failed task in 2 forks and 1 cancelled in the 3rd fork - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(20); - task_1_1.setRetryCount(1); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.CANCELED); - task_1_1.setRetried(true); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_1_2 = new TaskModel(); - task_1_2.setTaskId(UUID.randomUUID().toString()); - task_1_2.setSeq(21); - task_1_2.setRetryCount(1); - task_1_2.setTaskType(TaskType.SIMPLE.toString()); - task_1_2.setStatus(TaskModel.Status.FAILED); - task_1_2.setTaskDefName("task1"); - task_1_2.setWorkflowTask(new WorkflowTask()); - task_1_2.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(22); - task_2_1.setRetryCount(1); - task_2_1.setStatus(TaskModel.Status.FAILED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - TaskModel task_3_1 = new TaskModel(); - task_3_1.setTaskId(UUID.randomUUID().toString()); - task_3_1.setSeq(23); - task_3_1.setRetryCount(1); - task_3_1.setStatus(TaskModel.Status.CANCELED); - task_3_1.setTaskType(TaskType.SIMPLE.toString()); - task_3_1.setTaskDefName("task3"); - task_3_1.setWorkflowTask(new WorkflowTask()); - task_3_1.setReferenceTaskName("task3_ref1"); - - TaskModel task_4_1 = new TaskModel(); - task_4_1.setTaskId(UUID.randomUUID().toString()); - task_4_1.setSeq(122); - task_4_1.setRetryCount(1); - task_4_1.setStatus(TaskModel.Status.FAILED); - task_4_1.setTaskType(TaskType.SIMPLE.toString()); - task_4_1.setTaskDefName("task1"); - task_4_1.setWorkflowTask(new WorkflowTask()); - task_4_1.setReferenceTaskName("task4_refABC"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2, task_2_1, task_3_1, task_4_1)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - // then: - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertEquals(1, updateWorkflowCalledCounter.get()); - assertEquals(1, updateTasksCalledCounter.get()); - assertEquals(0, updateTaskCalledCounter.get()); - } - - @Test - public void testRetryWorkflowReturnsNoDuplicates() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(10); - task_1_1.setRetryCount(0); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_1_2 = new TaskModel(); - task_1_2.setTaskId(UUID.randomUUID().toString()); - task_1_2.setSeq(11); - task_1_2.setRetryCount(1); - task_1_2.setTaskType(TaskType.SIMPLE.toString()); - task_1_2.setStatus(TaskModel.Status.COMPLETED); - task_1_2.setTaskDefName("task1"); - task_1_2.setWorkflowTask(new WorkflowTask()); - task_1_2.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(21); - task_2_1.setRetryCount(0); - task_2_1.setStatus(TaskModel.Status.CANCELED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - TaskModel task_3_1 = new TaskModel(); - task_3_1.setTaskId(UUID.randomUUID().toString()); - task_3_1.setSeq(31); - task_3_1.setRetryCount(1); - task_3_1.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR); - task_3_1.setTaskType(TaskType.SIMPLE.toString()); - task_3_1.setTaskDefName("task1"); - task_3_1.setWorkflowTask(new WorkflowTask()); - task_3_1.setReferenceTaskName("task3_ref1"); - - TaskModel task_4_1 = new TaskModel(); - task_4_1.setTaskId(UUID.randomUUID().toString()); - task_4_1.setSeq(41); - task_4_1.setRetryCount(0); - task_4_1.setStatus(TaskModel.Status.TIMED_OUT); - task_4_1.setTaskType(TaskType.SIMPLE.toString()); - task_4_1.setTaskDefName("task1"); - task_4_1.setWorkflowTask(new WorkflowTask()); - task_4_1.setReferenceTaskName("task4_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2, task_2_1, task_3_1, task_4_1)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - assertEquals(8, workflow.getTasks().size()); - } - - @Test - public void testRetryWorkflowMultipleRetries() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(10); - task_1_1.setRetryCount(0); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(20); - task_2_1.setRetryCount(0); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setStatus(TaskModel.Status.CANCELED); - task_2_1.setTaskDefName("task1"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - assertEquals(4, workflow.getTasks().size()); - - // Reset Last Workflow Task to FAILED. - TaskModel lastTask = - workflow.getTasks().stream() - .filter(t -> t.getReferenceTaskName().equals("task1_ref1")) - .collect( - groupingBy( - TaskModel::getReferenceTaskName, - maxBy(comparingInt(TaskModel::getSeq)))) - .values() - .stream() - .map(Optional::get) - .collect(Collectors.toList()) - .get(0); - lastTask.setStatus(TaskModel.Status.FAILED); - workflow.setStatus(WorkflowModel.Status.FAILED); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - assertEquals(5, workflow.getTasks().size()); - - // Reset Last Workflow Task to FAILED. - // Reset Last Workflow Task to FAILED. - TaskModel lastTask2 = - workflow.getTasks().stream() - .filter(t -> t.getReferenceTaskName().equals("task1_ref1")) - .collect( - groupingBy( - TaskModel::getReferenceTaskName, - maxBy(comparingInt(TaskModel::getSeq)))) - .values() - .stream() - .map(Optional::get) - .collect(Collectors.toList()) - .get(0); - lastTask2.setStatus(TaskModel.Status.FAILED); - workflow.setStatus(WorkflowModel.Status.FAILED); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - assertEquals(6, workflow.getTasks().size()); - } - - @Test - public void testRetryWorkflowWithJoinTask() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - TaskModel forkTask = new TaskModel(); - forkTask.setTaskType(TaskType.FORK_JOIN.toString()); - forkTask.setTaskId(UUID.randomUUID().toString()); - forkTask.setSeq(1); - forkTask.setRetryCount(1); - forkTask.setStatus(TaskModel.Status.COMPLETED); - forkTask.setReferenceTaskName("task_fork"); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(20); - task_1_1.setRetryCount(1); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(22); - task_2_1.setRetryCount(1); - task_2_1.setStatus(TaskModel.Status.CANCELED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - TaskModel joinTask = new TaskModel(); - joinTask.setTaskType(TaskType.JOIN.toString()); - joinTask.setTaskId(UUID.randomUUID().toString()); - joinTask.setSeq(25); - joinTask.setRetryCount(1); - joinTask.setStatus(TaskModel.Status.CANCELED); - joinTask.setReferenceTaskName("task_join"); - joinTask.getInputData() - .put( - "joinOn", - Arrays.asList( - task_1_1.getReferenceTaskName(), task_2_1.getReferenceTaskName())); - - workflow.getTasks().addAll(Arrays.asList(forkTask, task_1_1, task_2_1, joinTask)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - assertEquals(6, workflow.getTasks().size()); - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - } - - @Test - public void testRetryFromLastFailedSubWorkflowTaskThenStartWithLastFailedTask() { - IDGenerator idGenerator = new IDGenerator(); - // given - String id = idGenerator.generate(); - String workflowInstanceId = idGenerator.generate(); - TaskModel task = new TaskModel(); - task.setTaskType(TaskType.SIMPLE.name()); - task.setTaskDefName("task"); - task.setReferenceTaskName("task_ref"); - task.setWorkflowInstanceId(workflowInstanceId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED); - task.setRetryCount(0); - task.setWorkflowTask(new WorkflowTask()); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(id); - task.setSeq(1); - - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(workflowInstanceId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.FAILED); - task1.setRetryCount(0); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - task1.setSubWorkflowId(id); - task1.setSeq(2); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setWorkflowId(id); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("subworkflow"); - workflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(workflowDef); - subWorkflow.setStatus(WorkflowModel.Status.FAILED); - subWorkflow.getTasks().addAll(Arrays.asList(task, task1)); - subWorkflow.setParentWorkflowId("testRunWorkflowId"); - - TaskModel task2 = new TaskModel(); - task2.setWorkflowInstanceId(subWorkflow.getWorkflowId()); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.FAILED); - task2.setRetryCount(0); - task2.setOutputData(new HashMap<>()); - task2.setSubWorkflowId(id); - task2.setTaskType(TaskType.SUB_WORKFLOW.name()); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setTasks(Collections.singletonList(task2)); - workflowDef = new WorkflowDef(); - workflowDef.setName("first_workflow"); - workflow.setWorkflowDefinition(workflowDef); - - // when - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(workflowDef)); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task1); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - workflowExecutor.retry(workflow.getWorkflowId(), true); - - // then - assertEquals(task.getStatus(), TaskModel.Status.COMPLETED); - assertEquals(task1.getStatus(), TaskModel.Status.IN_PROGRESS); - assertEquals(workflow.getPreviousStatus(), WorkflowModel.Status.FAILED); - assertEquals(workflow.getStatus(), WorkflowModel.Status.RUNNING); - assertEquals(subWorkflow.getPreviousStatus(), WorkflowModel.Status.FAILED); - assertEquals(subWorkflow.getStatus(), WorkflowModel.Status.RUNNING); - } - - @Test - public void testRetryTimedOutWorkflowWithoutFailedTasks() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.TIMED_OUT); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(20); - task_1_1.setRetryCount(1); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.COMPLETED); - task_1_1.setRetried(true); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(22); - task_2_1.setRetryCount(1); - task_2_1.setStatus(TaskModel.Status.COMPLETED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1)); - - AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateWorkflowCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateWorkflow(any()); - - AtomicInteger updateTasksCalledCounter = new AtomicInteger(0); - doAnswer( - invocation -> { - updateTasksCalledCounter.incrementAndGet(); - return null; - }) - .when(executionDAOFacade) - .updateTasks(any()); - // end of setup - - // when - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - - workflowExecutor.retry(workflow.getWorkflowId(), false); - - // then - assertEquals(WorkflowModel.Status.TIMED_OUT, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertTrue(workflow.getLastRetriedTime() > 0); - assertEquals(1, updateWorkflowCalledCounter.get()); - assertEquals(1, updateTasksCalledCounter.get()); - } - - @Test(expected = ApplicationException.class) - public void testRerunNonTerminalWorkflow() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryNonTerminalWorkflow"); - workflow.setStatus(WorkflowModel.Status.RUNNING); - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId()); - workflowExecutor.rerun(rerunWorkflowRequest); - } - - @Test - public void testRerunWorkflow() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRerunWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRerunWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setReasonForIncompletion("task1 failed"); - workflow.setFailedReferenceTaskNames( - new HashSet<>() { - { - add("task1_ref1"); - } - }); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(20); - task_1_1.setRetryCount(1); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setRetried(true); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(22); - task_2_1.setRetryCount(1); - task_2_1.setStatus(TaskModel.Status.CANCELED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertNull(workflow.getReasonForIncompletion()); - assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames()); - } - - @Test - public void testRerunSubWorkflow() { - IDGenerator idGenerator = new IDGenerator(); - // setup - String parentWorkflowId = idGenerator.generate(); - String subWorkflowId = idGenerator.generate(); - - // sub workflow setup - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(subWorkflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.SIMPLE.name()); - task2.setTaskDefName("task2"); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(subWorkflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.COMPLETED); - task2.setWorkflowTask(new WorkflowTask()); - task2.setOutputData(new HashMap<>()); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setParentWorkflowId(parentWorkflowId); - subWorkflow.setWorkflowId(subWorkflowId); - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName("subworkflow"); - subworkflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(subworkflowDef); - subWorkflow.setOwnerApp("junit_testRerunWorkflowId"); - subWorkflow.setStatus(WorkflowModel.Status.COMPLETED); - subWorkflow.getTasks().addAll(Arrays.asList(task1, task2)); - - // parent workflow setup - TaskModel task = new TaskModel(); - task.setWorkflowInstanceId(parentWorkflowId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(subWorkflowId); - task.setTaskType(TaskType.SUB_WORKFLOW.name()); - task.setWorkflowTask(new WorkflowTask()); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(parentWorkflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("parentworkflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.getTasks().addAll(Arrays.asList(task)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // then: - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, subWorkflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - } - - @Test - public void testRerunWorkflowWithTaskId() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRerunWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setReasonForIncompletion("task1 failed"); - workflow.setFailedReferenceTaskNames( - new HashSet<>() { - { - add("task1_ref1"); - } - }); - - TaskModel task_1_1 = new TaskModel(); - task_1_1.setTaskId(UUID.randomUUID().toString()); - task_1_1.setSeq(20); - task_1_1.setRetryCount(1); - task_1_1.setTaskType(TaskType.SIMPLE.toString()); - task_1_1.setStatus(TaskModel.Status.FAILED); - task_1_1.setRetried(true); - task_1_1.setTaskDefName("task1"); - task_1_1.setWorkflowTask(new WorkflowTask()); - task_1_1.setReferenceTaskName("task1_ref1"); - - TaskModel task_2_1 = new TaskModel(); - task_2_1.setTaskId(UUID.randomUUID().toString()); - task_2_1.setSeq(22); - task_2_1.setRetryCount(1); - task_2_1.setStatus(TaskModel.Status.CANCELED); - task_2_1.setTaskType(TaskType.SIMPLE.toString()); - task_2_1.setTaskDefName("task2"); - task_2_1.setWorkflowTask(new WorkflowTask()); - task_2_1.setReferenceTaskName("task2_ref1"); - - workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - when(metadataDAO.getWorkflowDef(anyString(), anyInt())) - .thenReturn(Optional.of(new WorkflowDef())); - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId()); - rerunWorkflowRequest.setReRunFromTaskId(task_1_1.getTaskId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // when: - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertNull(workflow.getReasonForIncompletion()); - assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames()); - } - - @Test - public void testRerunWorkflowWithSyncSystemTaskId() { - IDGenerator idGenerator = new IDGenerator(); - // setup - String workflowId = idGenerator.generate(); - - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(workflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.JSON_JQ_TRANSFORM.name()); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(workflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId("system-task-id"); - task2.setStatus(TaskModel.Status.FAILED); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("workflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setReasonForIncompletion("task2 failed"); - workflow.setFailedReferenceTaskNames( - new HashSet<>() { - { - add("task2_ref"); - } - }); - workflow.getTasks().addAll(Arrays.asList(task1, task2)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId()); - rerunWorkflowRequest.setReRunFromTaskId(task2.getTaskId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // then: - assertEquals(TaskModel.Status.COMPLETED, task2.getStatus()); - assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertNull(workflow.getReasonForIncompletion()); - assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames()); - } - - @Test - public void testRerunSubWorkflowWithTaskId() { - IDGenerator idGenerator = new IDGenerator(); - - // setup - String parentWorkflowId = idGenerator.generate(); - String subWorkflowId = idGenerator.generate(); - - // sub workflow setup - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(subWorkflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.SIMPLE.name()); - task2.setTaskDefName("task2"); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(subWorkflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.COMPLETED); - task2.setWorkflowTask(new WorkflowTask()); - task2.setOutputData(new HashMap<>()); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setParentWorkflowId(parentWorkflowId); - subWorkflow.setWorkflowId(subWorkflowId); - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName("subworkflow"); - subworkflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(subworkflowDef); - subWorkflow.setOwnerApp("junit_testRerunWorkflowId"); - subWorkflow.setStatus(WorkflowModel.Status.COMPLETED); - subWorkflow.getTasks().addAll(Arrays.asList(task1, task2)); - - // parent workflow setup - TaskModel task = new TaskModel(); - task.setWorkflowInstanceId(parentWorkflowId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(subWorkflowId); - task.setTaskType(TaskType.SUB_WORKFLOW.name()); - task.setWorkflowTask(new WorkflowTask()); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(parentWorkflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("parentworkflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.getTasks().addAll(Arrays.asList(task)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId()); - rerunWorkflowRequest.setReRunFromTaskId(task2.getTaskId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // then: - assertEquals(TaskModel.Status.SCHEDULED, task2.getStatus()); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, subWorkflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - } - - @Test - public void testGetActiveDomain() throws Exception { - String taskType = "test-task"; - String[] domains = new String[] {"domain1", "domain2"}; - - PollData pollData1 = - new PollData( - "queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 1000); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0])) - .thenReturn(pollData1); - String activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertEquals(domains[0], activeDomain); - Thread.sleep(2000L); - - PollData pollData2 = - new PollData( - "queue2", domains[1], "worker2", System.currentTimeMillis() - 99 * 1000); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1])) - .thenReturn(pollData2); - activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertEquals(domains[1], activeDomain); - - Thread.sleep(2000L); - activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertEquals(domains[1], activeDomain); - - domains = new String[] {""}; - when(executionDAOFacade.getTaskPollDataByDomain(any(), any())).thenReturn(new PollData()); - activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertNotNull(activeDomain); - assertEquals("", activeDomain); - - domains = new String[] {}; - activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertNull(activeDomain); - - activeDomain = workflowExecutor.getActiveDomain(taskType, null); - assertNull(activeDomain); - - domains = new String[] {"test-domain"}; - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), anyString())).thenReturn(null); - activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertNotNull(activeDomain); - assertEquals("test-domain", activeDomain); - } - - @Test - public void testInactiveDomains() { - String taskType = "test-task"; - String[] domains = new String[] {"domain1", "domain2"}; - - PollData pollData1 = - new PollData( - "queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 10000); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0])) - .thenReturn(pollData1); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1])).thenReturn(null); - String activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertEquals("domain2", activeDomain); - } - - @Test - public void testDefaultDomain() { - String taskType = "test-task"; - String[] domains = new String[] {"domain1", "domain2", "NO_DOMAIN"}; - - PollData pollData1 = - new PollData( - "queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 10000); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0])) - .thenReturn(pollData1); - when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1])).thenReturn(null); - String activeDomain = workflowExecutor.getActiveDomain(taskType, domains); - assertNull(activeDomain); - } - - @Test - public void testTaskToDomain() { - WorkflowModel workflow = generateSampleWorkflow(); - List tasks = generateSampleTasks(3); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "mydomain"); - workflow.setTaskToDomain(taskToDomain); - - PollData pollData1 = - new PollData( - "queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100); - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), anyString())) - .thenReturn(pollData1); - workflowExecutor.setTaskDomains(tasks, workflow); - - assertNotNull(tasks); - tasks.forEach(task -> assertEquals("mydomain", task.getDomain())); - } - - @Test - public void testTaskToDomainsPerTask() { - WorkflowModel workflow = generateSampleWorkflow(); - List tasks = generateSampleTasks(2); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "mydomain, NO_DOMAIN"); - workflow.setTaskToDomain(taskToDomain); - - PollData pollData1 = - new PollData( - "queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100); - when(executionDAOFacade.getTaskPollDataByDomain(eq("task1"), anyString())) - .thenReturn(pollData1); - when(executionDAOFacade.getTaskPollDataByDomain(eq("task2"), anyString())).thenReturn(null); - workflowExecutor.setTaskDomains(tasks, workflow); - - assertEquals("mydomain", tasks.get(0).getDomain()); - assertNull(tasks.get(1).getDomain()); - } - - @Test - public void testTaskToDomainOverrides() { - WorkflowModel workflow = generateSampleWorkflow(); - List tasks = generateSampleTasks(4); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "mydomain"); - taskToDomain.put("task2", "someInactiveDomain, NO_DOMAIN"); - taskToDomain.put("task3", "someActiveDomain, NO_DOMAIN"); - taskToDomain.put("task4", "someInactiveDomain, someInactiveDomain2"); - workflow.setTaskToDomain(taskToDomain); - - PollData pollData1 = - new PollData( - "queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100); - PollData pollData2 = - new PollData( - "queue2", - "someActiveDomain", - "worker2", - System.currentTimeMillis() - 99 * 100); - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("mydomain"))) - .thenReturn(pollData1); - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someInactiveDomain"))) - .thenReturn(null); - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someActiveDomain"))) - .thenReturn(pollData2); - when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someInactiveDomain"))) - .thenReturn(null); - workflowExecutor.setTaskDomains(tasks, workflow); - - assertEquals("mydomain", tasks.get(0).getDomain()); - assertNull(tasks.get(1).getDomain()); - assertEquals("someActiveDomain", tasks.get(2).getDomain()); - assertEquals("someInactiveDomain2", tasks.get(3).getDomain()); - } - - @Test - public void testDedupAndAddTasks() { - WorkflowModel workflow = new WorkflowModel(); - - TaskModel task1 = new TaskModel(); - task1.setReferenceTaskName("task1"); - task1.setRetryCount(1); - - TaskModel task2 = new TaskModel(); - task2.setReferenceTaskName("task2"); - task2.setRetryCount(2); - - List tasks = new ArrayList<>(Arrays.asList(task1, task2)); - - List taskList = workflowExecutor.dedupAndAddTasks(workflow, tasks); - assertEquals(2, taskList.size()); - assertEquals(tasks, taskList); - assertEquals(workflow.getTasks(), taskList); - - // Adding the same tasks again - taskList = workflowExecutor.dedupAndAddTasks(workflow, tasks); - assertEquals(0, taskList.size()); - assertEquals(workflow.getTasks(), tasks); - - // Adding 2 new tasks - TaskModel newTask = new TaskModel(); - newTask.setReferenceTaskName("newTask"); - newTask.setRetryCount(0); - - taskList = workflowExecutor.dedupAndAddTasks(workflow, Collections.singletonList(newTask)); - assertEquals(1, taskList.size()); - assertEquals(newTask, taskList.get(0)); - assertEquals(3, workflow.getTasks().size()); - } - - @Test(expected = ApplicationException.class) - public void testTerminateCompletedWorkflow() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testTerminateTerminalWorkflow"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - workflowExecutor.terminateWorkflow( - workflow.getWorkflowId(), "test terminating terminal workflow"); - } - - @Test - public void testResetCallbacksForWorkflowTasks() { - String workflowId = "test-workflow-id"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - workflow.setStatus(WorkflowModel.Status.RUNNING); - - TaskModel completedTask = new TaskModel(); - completedTask.setTaskType(TaskType.SIMPLE.name()); - completedTask.setReferenceTaskName("completedTask"); - completedTask.setWorkflowInstanceId(workflowId); - completedTask.setScheduledTime(System.currentTimeMillis()); - completedTask.setCallbackAfterSeconds(300); - completedTask.setTaskId("simple-task-id"); - completedTask.setStatus(TaskModel.Status.COMPLETED); - - TaskModel systemTask = new TaskModel(); - systemTask.setTaskType(TaskType.WAIT.name()); - systemTask.setReferenceTaskName("waitTask"); - systemTask.setWorkflowInstanceId(workflowId); - systemTask.setScheduledTime(System.currentTimeMillis()); - systemTask.setTaskId("system-task-id"); - systemTask.setStatus(TaskModel.Status.SCHEDULED); - - TaskModel simpleTask = new TaskModel(); - simpleTask.setTaskType(TaskType.SIMPLE.name()); - simpleTask.setReferenceTaskName("simpleTask"); - simpleTask.setWorkflowInstanceId(workflowId); - simpleTask.setScheduledTime(System.currentTimeMillis()); - simpleTask.setCallbackAfterSeconds(300); - simpleTask.setTaskId("simple-task-id"); - simpleTask.setStatus(TaskModel.Status.SCHEDULED); - - TaskModel noCallbackTask = new TaskModel(); - noCallbackTask.setTaskType(TaskType.SIMPLE.name()); - noCallbackTask.setReferenceTaskName("noCallbackTask"); - noCallbackTask.setWorkflowInstanceId(workflowId); - noCallbackTask.setScheduledTime(System.currentTimeMillis()); - noCallbackTask.setCallbackAfterSeconds(0); - noCallbackTask.setTaskId("no-callback-task-id"); - noCallbackTask.setStatus(TaskModel.Status.SCHEDULED); - - workflow.getTasks() - .addAll(Arrays.asList(completedTask, systemTask, simpleTask, noCallbackTask)); - when(executionDAOFacade.getWorkflowModel(workflowId, true)).thenReturn(workflow); - - workflowExecutor.resetCallbacksForWorkflow(workflowId); - verify(queueDAO, times(1)).resetOffsetTime(anyString(), anyString()); - } - - @Test - public void testUpdateParentWorkflowTask() { - SubWorkflow subWf = new SubWorkflow(objectMapper); - String parentWorkflowTaskId = "parent_workflow_task_id"; - String workflowId = "workflow_id"; - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setWorkflowId(workflowId); - subWorkflow.setParentWorkflowTaskId(parentWorkflowTaskId); - subWorkflow.setStatus(WorkflowModel.Status.COMPLETED); - - TaskModel subWorkflowTask = new TaskModel(); - subWorkflowTask.setSubWorkflowId(workflowId); - subWorkflowTask.setStatus(TaskModel.Status.IN_PROGRESS); - subWorkflowTask.setExternalOutputPayloadStoragePath(null); - - when(executionDAOFacade.getTaskModel(parentWorkflowTaskId)).thenReturn(subWorkflowTask); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(subWorkflow); - - workflowExecutor.updateParentWorkflowTask(subWorkflow); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskModel.class); - verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture()); - assertEquals(TaskModel.Status.COMPLETED, argumentCaptor.getAllValues().get(0).getStatus()); - assertEquals(workflowId, argumentCaptor.getAllValues().get(0).getSubWorkflowId()); - } - - @Test - public void testStartWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - - Map workflowInput = new HashMap<>(); - String externalInputPayloadStoragePath = null; - String correlationId = null; - Integer priority = null; - String parentWorkflowId = null; - String parentWorkflowTaskId = null; - String event = null; - Map taskToDomain = null; - - when(executionLockService.acquireLock(anyString())).thenReturn(true); - when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow); - - workflowExecutor.startWorkflow( - def, - workflowInput, - externalInputPayloadStoragePath, - correlationId, - priority, - parentWorkflowId, - parentWorkflowTaskId, - event, - taskToDomain); - - verify(executionDAOFacade, times(1)).createWorkflow(any(WorkflowModel.class)); - verify(executionLockService, times(2)).acquireLock(anyString()); - verify(executionDAOFacade, times(1)).getWorkflowModel(anyString(), anyBoolean()); - } - - @Test - public void testScheduleNextIteration() { - WorkflowModel workflow = generateSampleWorkflow(); - workflow.setTaskToDomain( - new HashMap<>() { - { - put("TEST", "domain1"); - } - }); - TaskModel loopTask = mock(TaskModel.class); - WorkflowTask loopWfTask = mock(WorkflowTask.class); - when(loopTask.getWorkflowTask()).thenReturn(loopWfTask); - List loopOver = - new ArrayList<>() { - { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_SIMPLE); - workflowTask.setName("TEST"); - workflowTask.setTaskDefinition(new TaskDef()); - add(workflowTask); - } - }; - when(loopWfTask.getLoopOver()).thenReturn(loopOver); - - workflowExecutor.scheduleNextIteration(loopTask, workflow); - verify(executionDAOFacade).getTaskPollDataByDomain("TEST", "domain1"); - } - - @Test - public void testCancelNonTerminalTasks() { - WorkflowDef def = new WorkflowDef(); - def.setWorkflowStatusListenerEnabled(true); - - WorkflowModel workflow = generateSampleWorkflow(); - workflow.setWorkflowDefinition(def); - - TaskModel subWorkflowTask = new TaskModel(); - subWorkflowTask.setTaskId(UUID.randomUUID().toString()); - subWorkflowTask.setTaskType(TaskType.SUB_WORKFLOW.name()); - subWorkflowTask.setStatus(TaskModel.Status.IN_PROGRESS); - - TaskModel lambdaTask = new TaskModel(); - lambdaTask.setTaskId(UUID.randomUUID().toString()); - lambdaTask.setTaskType(TaskType.LAMBDA.name()); - lambdaTask.setStatus(TaskModel.Status.SCHEDULED); - - TaskModel simpleTask = new TaskModel(); - simpleTask.setTaskId(UUID.randomUUID().toString()); - simpleTask.setTaskType(TaskType.SIMPLE.name()); - simpleTask.setStatus(TaskModel.Status.COMPLETED); - - workflow.getTasks().addAll(Arrays.asList(subWorkflowTask, lambdaTask, simpleTask)); - - List erroredTasks = workflowExecutor.cancelNonTerminalTasks(workflow); - assertTrue(erroredTasks.isEmpty()); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskModel.class); - verify(executionDAOFacade, times(2)).updateTask(argumentCaptor.capture()); - assertEquals(2, argumentCaptor.getAllValues().size()); - assertEquals( - TaskType.SUB_WORKFLOW.name(), argumentCaptor.getAllValues().get(0).getTaskType()); - assertEquals(TaskModel.Status.CANCELED, argumentCaptor.getAllValues().get(0).getStatus()); - assertEquals(TaskType.LAMBDA.name(), argumentCaptor.getAllValues().get(1).getTaskType()); - assertEquals(TaskModel.Status.CANCELED, argumentCaptor.getAllValues().get(1).getStatus()); - verify(workflowStatusListener, times(1)) - .onWorkflowFinalizedIfEnabled(any(WorkflowModel.class)); - } - - @Test - public void testPauseWorkflow() { - when(executionLockService.acquireLock(anyString(), anyLong())).thenReturn(true); - doNothing().when(executionLockService).releaseLock(anyString()); - - String workflowId = "testPauseWorkflowId"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - - // if workflow is in terminal state - workflow.setStatus(WorkflowModel.Status.COMPLETED); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow); - try { - workflowExecutor.pauseWorkflow(workflowId); - fail("Expected " + ApplicationException.class); - } catch (ApplicationException e) { - assertEquals(e.getCode(), CONFLICT); - verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class)); - verify(queueDAO, never()).remove(anyString(), anyString()); - } - - // if workflow is already PAUSED - workflow.setStatus(WorkflowModel.Status.PAUSED); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow); - workflowExecutor.pauseWorkflow(workflowId); - assertEquals(WorkflowModel.Status.PAUSED, workflow.getStatus()); - verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class)); - verify(queueDAO, never()).remove(anyString(), anyString()); - - // if workflow is RUNNING - workflow.setStatus(WorkflowModel.Status.RUNNING); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow); - workflowExecutor.pauseWorkflow(workflowId); - assertEquals(WorkflowModel.Status.PAUSED, workflow.getStatus()); - verify(executionDAOFacade, times(1)).updateWorkflow(any(WorkflowModel.class)); - verify(queueDAO, times(1)).remove(anyString(), anyString()); - } - - @Test - public void testResumeWorkflow() { - String workflowId = "testResumeWorkflowId"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - - // if workflow is not in PAUSED state - workflow.setStatus(WorkflowModel.Status.COMPLETED); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow); - try { - workflowExecutor.resumeWorkflow(workflowId); - } catch (Exception e) { - assertTrue(e instanceof IllegalStateException); - verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class)); - verify(queueDAO, never()).push(anyString(), anyString(), anyInt(), anyLong()); - } - - // if workflow is in PAUSED state - workflow.setStatus(WorkflowModel.Status.PAUSED); - when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow); - workflowExecutor.resumeWorkflow(workflowId); - assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus()); - assertTrue(workflow.getLastRetriedTime() > 0); - verify(executionDAOFacade, times(1)).updateWorkflow(any(WorkflowModel.class)); - verify(queueDAO, times(1)).push(anyString(), anyString(), anyInt(), anyLong()); - } - - @Test - @SuppressWarnings("unchecked") - public void testTerminateWorkflowWithFailureWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("workflow"); - workflowDef.setFailureWorkflow("failure_workflow"); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("1"); - workflow.setCorrelationId("testid"); - workflow.setWorkflowDefinition(new WorkflowDef()); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setOwnerApp("junit_test"); - workflow.setEndTime(100L); - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setWorkflowDefinition(workflowDef); - - TaskModel successTask = new TaskModel(); - successTask.setTaskId("taskid1"); - successTask.setReferenceTaskName("success"); - successTask.setStatus(TaskModel.Status.COMPLETED); - - TaskModel failedTask = new TaskModel(); - failedTask.setTaskId("taskid2"); - failedTask.setReferenceTaskName("failed"); - failedTask.setStatus(TaskModel.Status.FAILED); - workflow.getTasks().addAll(Arrays.asList(successTask, failedTask)); - - WorkflowDef failureWorkflowDef = new WorkflowDef(); - failureWorkflowDef.setName("failure_workflow"); - when(metadataDAO.getLatestWorkflowDef(failureWorkflowDef.getName())) - .thenReturn(Optional.of(failureWorkflowDef)); - - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionLockService.acquireLock(anyString())).thenReturn(true); - - workflowExecutor.decide(workflow.getWorkflowId()); - - assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus()); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(WorkflowModel.class); - verify(executionDAOFacade, times(1)).createWorkflow(argumentCaptor.capture()); - assertEquals( - workflow.getCorrelationId(), - argumentCaptor.getAllValues().get(0).getCorrelationId()); - assertEquals( - workflow.getWorkflowId(), - argumentCaptor.getAllValues().get(0).getInput().get("workflowId")); - assertEquals( - failedTask.getTaskId(), - argumentCaptor.getAllValues().get(0).getInput().get("failureTaskId")); - } - - @Test - public void testRerunOptionalSubWorkflow() { - IDGenerator idGenerator = new IDGenerator(); - // setup - String parentWorkflowId = idGenerator.generate(); - String subWorkflowId = idGenerator.generate(); - - // sub workflow setup - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(subWorkflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.SIMPLE.name()); - task2.setTaskDefName("task2"); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(subWorkflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.FAILED); - task2.setWorkflowTask(new WorkflowTask()); - task2.setOutputData(new HashMap<>()); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setParentWorkflowId(parentWorkflowId); - subWorkflow.setWorkflowId(subWorkflowId); - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName("subworkflow"); - subworkflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(subworkflowDef); - subWorkflow.setOwnerApp("junit_testRerunWorkflowId"); - subWorkflow.setStatus(WorkflowModel.Status.FAILED); - subWorkflow.getTasks().addAll(Arrays.asList(task1, task2)); - - // parent workflow setup - TaskModel task = new TaskModel(); - task.setWorkflowInstanceId(parentWorkflowId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(subWorkflowId); - task.setTaskType(TaskType.SUB_WORKFLOW.name()); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setOptional(true); - task.setWorkflowTask(workflowTask); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(parentWorkflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("parentworkflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.getTasks().addAll(Arrays.asList(task)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId()); - workflowExecutor.rerun(rerunWorkflowRequest); - - // then: parent workflow remains the same - assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus()); - assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus()); - } - - @Test - public void testRestartOptionalSubWorkflow() { - IDGenerator idGenerator = new IDGenerator(); - // setup - String parentWorkflowId = idGenerator.generate(); - String subWorkflowId = idGenerator.generate(); - - // sub workflow setup - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(subWorkflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.SIMPLE.name()); - task2.setTaskDefName("task2"); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(subWorkflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.FAILED); - task2.setWorkflowTask(new WorkflowTask()); - task2.setOutputData(new HashMap<>()); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setParentWorkflowId(parentWorkflowId); - subWorkflow.setWorkflowId(subWorkflowId); - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName("subworkflow"); - subworkflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(subworkflowDef); - subWorkflow.setOwnerApp("junit_testRerunWorkflowId"); - subWorkflow.setStatus(WorkflowModel.Status.FAILED); - subWorkflow.getTasks().addAll(Arrays.asList(task1, task2)); - - // parent workflow setup - TaskModel task = new TaskModel(); - task.setWorkflowInstanceId(parentWorkflowId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(subWorkflowId); - task.setTaskType(TaskType.SUB_WORKFLOW.name()); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setOptional(true); - task.setWorkflowTask(workflowTask); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(parentWorkflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("parentworkflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.getTasks().addAll(Arrays.asList(task)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - workflowExecutor.restart(subWorkflowId, false); - - // then: parent workflow remains the same - assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus()); - assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus()); - } - - @Test - public void testRetryOptionalSubWorkflow() { - IDGenerator idGenerator = new IDGenerator(); - // setup - String parentWorkflowId = idGenerator.generate(); - String subWorkflowId = idGenerator.generate(); - - // sub workflow setup - TaskModel task1 = new TaskModel(); - task1.setTaskType(TaskType.SIMPLE.name()); - task1.setTaskDefName("task1"); - task1.setReferenceTaskName("task1_ref"); - task1.setWorkflowInstanceId(subWorkflowId); - task1.setScheduledTime(System.currentTimeMillis()); - task1.setTaskId(idGenerator.generate()); - task1.setStatus(TaskModel.Status.COMPLETED); - task1.setWorkflowTask(new WorkflowTask()); - task1.setOutputData(new HashMap<>()); - - TaskModel task2 = new TaskModel(); - task2.setTaskType(TaskType.SIMPLE.name()); - task2.setTaskDefName("task2"); - task2.setReferenceTaskName("task2_ref"); - task2.setWorkflowInstanceId(subWorkflowId); - task2.setScheduledTime(System.currentTimeMillis()); - task2.setTaskId(idGenerator.generate()); - task2.setStatus(TaskModel.Status.FAILED); - task2.setWorkflowTask(new WorkflowTask()); - task2.setOutputData(new HashMap<>()); - - WorkflowModel subWorkflow = new WorkflowModel(); - subWorkflow.setParentWorkflowId(parentWorkflowId); - subWorkflow.setWorkflowId(subWorkflowId); - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName("subworkflow"); - subworkflowDef.setVersion(1); - subWorkflow.setWorkflowDefinition(subworkflowDef); - subWorkflow.setOwnerApp("junit_testRerunWorkflowId"); - subWorkflow.setStatus(WorkflowModel.Status.FAILED); - subWorkflow.getTasks().addAll(Arrays.asList(task1, task2)); - - // parent workflow setup - TaskModel task = new TaskModel(); - task.setWorkflowInstanceId(parentWorkflowId); - task.setScheduledTime(System.currentTimeMillis()); - task.setTaskId(idGenerator.generate()); - task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS); - task.setOutputData(new HashMap<>()); - task.setSubWorkflowId(subWorkflowId); - task.setTaskType(TaskType.SUB_WORKFLOW.name()); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setOptional(true); - task.setWorkflowTask(workflowTask); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(parentWorkflowId); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("parentworkflow"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRerunWorkflowId"); - workflow.setStatus(WorkflowModel.Status.COMPLETED); - workflow.getTasks().addAll(Arrays.asList(task)); - // end of setup - - // when: - when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true)) - .thenReturn(workflow); - when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true)) - .thenReturn(subWorkflow); - when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId())) - .thenReturn(task); - when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false)) - .thenReturn(workflow); - - workflowExecutor.retry(subWorkflowId, true); - - // then: parent workflow remains the same - assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus()); - assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus()); - assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus()); - assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus()); - } - - @Test - public void testUpdateTaskWithCallbackAfterSeconds() { - String workflowId = "test-workflow-id"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setWorkflowDefinition(new WorkflowDef()); - - TaskModel simpleTask = new TaskModel(); - simpleTask.setTaskType(TaskType.SIMPLE.name()); - simpleTask.setReferenceTaskName("simpleTask"); - simpleTask.setWorkflowInstanceId(workflowId); - simpleTask.setScheduledTime(System.currentTimeMillis()); - simpleTask.setCallbackAfterSeconds(0); - simpleTask.setTaskId("simple-task-id"); - simpleTask.setStatus(TaskModel.Status.IN_PROGRESS); - - workflow.getTasks().addAll(Arrays.asList(simpleTask)); - when(executionDAOFacade.getWorkflowModel(workflowId, true)).thenReturn(workflow); - when(executionDAOFacade.getTaskModel(simpleTask.getTaskId())).thenReturn(simpleTask); - - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(simpleTask.getTaskId()); - taskResult.setWorkerId("test-worker-id"); - taskResult.log("not ready yet"); - taskResult.setCallbackAfterSeconds(300); - taskResult.setStatus(TaskResult.Status.IN_PROGRESS); - - workflowExecutor.updateTask(taskResult); - verify(queueDAO, times(1)).postpone(anyString(), anyString(), any(), any()); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskModel.class); - verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture()); - assertEquals(TaskModel.Status.SCHEDULED, argumentCaptor.getAllValues().get(0).getStatus()); - assertEquals( - taskResult.getCallbackAfterSeconds(), - argumentCaptor.getAllValues().get(0).getCallbackAfterSeconds()); - assertEquals( - taskResult.getWorkflowInstanceId(), - argumentCaptor.getAllValues().get(0).getWorkerId()); - } - - @Test - public void testUpdateTaskWithOutCallbackAfterSeconds() { - String workflowId = "test-workflow-id"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - workflow.setStatus(WorkflowModel.Status.RUNNING); - workflow.setWorkflowDefinition(new WorkflowDef()); - - TaskModel simpleTask = new TaskModel(); - simpleTask.setTaskType(TaskType.SIMPLE.name()); - simpleTask.setReferenceTaskName("simpleTask"); - simpleTask.setWorkflowInstanceId(workflowId); - simpleTask.setScheduledTime(System.currentTimeMillis()); - simpleTask.setCallbackAfterSeconds(0); - simpleTask.setTaskId("simple-task-id"); - simpleTask.setStatus(TaskModel.Status.IN_PROGRESS); - - workflow.getTasks().addAll(Arrays.asList(simpleTask)); - when(executionDAOFacade.getWorkflowModel(workflowId, true)).thenReturn(workflow); - when(executionDAOFacade.getTaskModel(simpleTask.getTaskId())).thenReturn(simpleTask); - - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(simpleTask.getTaskId()); - taskResult.setWorkerId("test-worker-id"); - taskResult.log("not ready yet"); - taskResult.setStatus(TaskResult.Status.IN_PROGRESS); - - workflowExecutor.updateTask(taskResult); - verify(queueDAO, times(1)).postpone(anyString(), anyString(), any(), any()); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(TaskModel.class); - verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture()); - assertEquals(TaskModel.Status.SCHEDULED, argumentCaptor.getAllValues().get(0).getStatus()); - assertEquals(0, argumentCaptor.getAllValues().get(0).getCallbackAfterSeconds()); - assertEquals( - taskResult.getWorkflowInstanceId(), - argumentCaptor.getAllValues().get(0).getWorkerId()); - } - - private WorkflowModel generateSampleWorkflow() { - // setup - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("testRetryWorkflowId"); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testRetryWorkflowId"); - workflowDef.setVersion(1); - workflow.setWorkflowDefinition(workflowDef); - workflow.setOwnerApp("junit_testRetryWorkflowId"); - workflow.setCreateTime(10L); - workflow.setEndTime(100L); - //noinspection unchecked - workflow.setOutput(Collections.EMPTY_MAP); - workflow.setStatus(WorkflowModel.Status.FAILED); - - return workflow; - } - - private List generateSampleTasks(int count) { - if (count == 0) { - return null; - } - List tasks = new ArrayList<>(); - for (int i = 0; i < count; i++) { - TaskModel task = new TaskModel(); - task.setTaskId(UUID.randomUUID().toString()); - task.setSeq(i); - task.setRetryCount(1); - task.setTaskType("task" + (i + 1)); - task.setStatus(TaskModel.Status.COMPLETED); - task.setTaskDefName("taskX"); - task.setReferenceTaskName("task_ref" + (i + 1)); - tasks.add(task); - } - - return tasks; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/WorkflowSystemTaskStub.java b/core/src/test/java/com/netflix/conductor/core/execution/WorkflowSystemTaskStub.java deleted file mode 100644 index 3175a248e..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/WorkflowSystemTaskStub.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution; - -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -public class WorkflowSystemTaskStub extends WorkflowSystemTask { - - private boolean started = false; - - public WorkflowSystemTaskStub(String taskType) { - super(taskType); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - started = true; - task.setStatus(TaskModel.Status.COMPLETED); - super.start(workflow, task, executor); - } - - public boolean isStarted() { - return started; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java deleted file mode 100644 index cdacc2a25..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class DecisionTaskMapperTest { - - private IDGenerator idGenerator; - private ParametersUtils parametersUtils; - private DeciderService deciderService; - // Subject - private DecisionTaskMapper decisionTaskMapper; - - @Autowired private ObjectMapper objectMapper; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - Map ip1; - WorkflowTask task1; - WorkflowTask task2; - WorkflowTask task3; - - @Before - public void setUp() { - parametersUtils = new ParametersUtils(objectMapper); - idGenerator = new IDGenerator(); - - ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - ip1.put("case", "${workflow.input.case}"); - - task1 = new WorkflowTask(); - task1.setName("Test1"); - task1.setInputParameters(ip1); - task1.setTaskReferenceName("t1"); - - task2 = new WorkflowTask(); - task2.setName("Test2"); - task2.setInputParameters(ip1); - task2.setTaskReferenceName("t2"); - - task3 = new WorkflowTask(); - task3.setName("Test3"); - task3.setInputParameters(ip1); - task3.setTaskReferenceName("t3"); - deciderService = mock(DeciderService.class); - decisionTaskMapper = new DecisionTaskMapper(); - } - - @Test - public void getMappedTasks() { - - // Given - // Task Definition - TaskDef taskDef = new TaskDef(); - Map inputMap = new HashMap<>(); - inputMap.put("Id", "${workflow.input.Id}"); - List> taskDefinitionInput = new LinkedList<>(); - taskDefinitionInput.add(inputMap); - - // Decision task instance - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setName("Decision"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setDefaultCase(Collections.singletonList(task1)); - decisionTask.setCaseValueParam("case"); - decisionTask.getInputParameters().put("Id", "${workflow.input.Id}"); - decisionTask.setCaseExpression( - "if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; "); - Map> decisionCases = new HashMap<>(); - decisionCases.put("even", Collections.singletonList(task2)); - decisionCases.put("odd", Collections.singletonList(task3)); - decisionTask.setDecisionCases(decisionCases); - // Workflow instance - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(workflowDef); - Map workflowInput = new HashMap<>(); - workflowInput.put("Id", "22"); - workflowModel.setInput(workflowInput); - - Map body = new HashMap<>(); - body.put("input", taskDefinitionInput); - taskDef.getInputTemplate().putAll(body); - - Map input = - parametersUtils.getTaskInput( - decisionTask.getInputParameters(), workflowModel, null, null); - - TaskModel theTask = new TaskModel(); - theTask.setReferenceTaskName("Foo"); - theTask.setTaskId(idGenerator.generate()); - - when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null)) - .thenReturn(Collections.singletonList(theTask)); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(decisionTask) - .withTaskInput(input) - .withRetryCount(0) - .withTaskId(idGenerator.generate()) - .withDeciderService(deciderService) - .build(); - - // When - List mappedTasks = decisionTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(2, mappedTasks.size()); - assertEquals("decisionTask", mappedTasks.get(0).getReferenceTaskName()); - assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName()); - } - - @Test - public void getEvaluatedCaseValue() { - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setName("Decision"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setInputParameters(ip1); - decisionTask.setDefaultCase(Collections.singletonList(task1)); - decisionTask.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("0", Collections.singletonList(task2)); - decisionCases.put("1", Collections.singletonList(task3)); - decisionTask.setDecisionCases(decisionCases); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(new WorkflowDef()); - Map workflowInput = new HashMap<>(); - workflowInput.put("param1", "test1"); - workflowInput.put("param2", "test2"); - workflowInput.put("case", "0"); - workflowModel.setInput(workflowInput); - - Map input = - parametersUtils.getTaskInput( - decisionTask.getInputParameters(), workflowModel, null, null); - - assertEquals("0", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, input)); - } - - @Test - public void getEvaluatedCaseValueUsingExpression() { - // Given - // Task Definition - TaskDef taskDef = new TaskDef(); - Map inputMap = new HashMap<>(); - inputMap.put("Id", "${workflow.input.Id}"); - List> taskDefinitionInput = new LinkedList<>(); - taskDefinitionInput.add(inputMap); - - // Decision task instance - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setName("Decision"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setDefaultCase(Collections.singletonList(task1)); - decisionTask.setCaseValueParam("case"); - decisionTask.getInputParameters().put("Id", "${workflow.input.Id}"); - decisionTask.setCaseExpression( - "if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; "); - Map> decisionCases = new HashMap<>(); - decisionCases.put("even", Collections.singletonList(task2)); - decisionCases.put("odd", Collections.singletonList(task3)); - decisionTask.setDecisionCases(decisionCases); - - // Workflow instance - WorkflowDef def = new WorkflowDef(); - def.setSchemaVersion(2); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(def); - Map workflowInput = new HashMap<>(); - workflowInput.put("Id", "22"); - workflowModel.setInput(workflowInput); - - Map body = new HashMap<>(); - body.put("input", taskDefinitionInput); - taskDef.getInputTemplate().putAll(body); - - Map evaluatorInput = - parametersUtils.getTaskInput( - decisionTask.getInputParameters(), workflowModel, taskDef, null); - - assertEquals( - "even", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput)); - } - - @Test - public void getEvaluatedCaseValueException() { - // Given - // Task Definition - TaskDef taskDef = new TaskDef(); - Map inputMap = new HashMap<>(); - inputMap.put("Id", "${workflow.input.Id}"); - List> taskDefinitionInput = new LinkedList<>(); - taskDefinitionInput.add(inputMap); - - // Decision task instance - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setName("Decision"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setDefaultCase(Collections.singletonList(task1)); - decisionTask.setCaseValueParam("case"); - decisionTask.getInputParameters().put("Id", "${workflow.input.Id}"); - decisionTask.setCaseExpression( - "if ($Id == null) 'bad input'; else if ( ($Id != null && $Id % 2 == 0)) 'even'; else 'odd'; "); - Map> decisionCases = new HashMap<>(); - decisionCases.put("even", Collections.singletonList(task2)); - decisionCases.put("odd", Collections.singletonList(task3)); - decisionTask.setDecisionCases(decisionCases); - - // Workflow instance - WorkflowDef def = new WorkflowDef(); - def.setSchemaVersion(2); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(def); - Map workflowInput = new HashMap<>(); - workflowInput.put(".Id", "22"); - workflowModel.setInput(workflowInput); - - Map body = new HashMap<>(); - body.put("input", taskDefinitionInput); - taskDef.getInputTemplate().putAll(body); - - Map evaluatorInput = - parametersUtils.getTaskInput( - decisionTask.getInputParameters(), workflowModel, taskDef, null); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - "Error while evaluating script: " + decisionTask.getCaseExpression()); - - decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java deleted file mode 100644 index 3c4d4edef..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class DoWhileTaskMapperTest { - - private TaskModel task1; - private DeciderService deciderService; - private WorkflowModel workflow; - private WorkflowTask workflowTask1; - private TaskMapperContext taskMapperContext; - private MetadataDAO metadataDAO; - - @Before - public void setup() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.DO_WHILE.name()); - workflowTask.setTaskReferenceName("Test"); - task1 = new TaskModel(); - task1.setReferenceTaskName("task1"); - TaskModel task2 = new TaskModel(); - task2.setReferenceTaskName("task2"); - workflowTask1 = new WorkflowTask(); - workflowTask1.setTaskReferenceName("task1"); - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setTaskReferenceName("task2"); - task1.setWorkflowTask(workflowTask1); - task2.setWorkflowTask(workflowTask2); - workflowTask.setLoopOver(Arrays.asList(task1.getWorkflowTask(), task2.getWorkflowTask())); - workflowTask.setLoopCondition( - "if ($.second_task + $.first_task > 10) { false; } else { true; }"); - - String taskId = new IDGenerator().generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - deciderService = Mockito.mock(DeciderService.class); - metadataDAO = Mockito.mock(MetadataDAO.class); - - taskMapperContext = - TaskMapperContext.newBuilder() - .withDeciderService(deciderService) - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - } - - @Test - public void getMappedTasks() { - - Mockito.doReturn(Collections.singletonList(task1)) - .when(deciderService) - .getTasksToBeScheduled(workflow, workflowTask1, 0); - - List mappedTasks = - new DoWhileTaskMapper(metadataDAO).getMappedTasks(taskMapperContext); - - assertNotNull(mappedTasks); - assertEquals(mappedTasks.size(), 1); - assertEquals(TASK_TYPE_DO_WHILE, mappedTasks.get(0).getTaskType()); - } - - @Test - public void shouldNotScheduleCompletedTask() { - - task1.setStatus(TaskModel.Status.COMPLETED); - - List mappedTasks = - new DoWhileTaskMapper(metadataDAO).getMappedTasks(taskMapperContext); - - assertNotNull(mappedTasks); - assertEquals(mappedTasks.size(), 1); - } - - @Test - public void testAppendIteration() { - assertEquals("task__1", TaskUtils.appendIteration("task", 1)); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java deleted file mode 100644 index 8066db338..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class DynamicTaskMapperTest { - - @Rule public ExpectedException expectedException = ExpectedException.none(); - private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - private DynamicTaskMapper dynamicTaskMapper; - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - - dynamicTaskMapper = new DynamicTaskMapper(parametersUtils, metadataDAO); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("DynoTask"); - workflowTask.setDynamicTaskNameParam("dynamicTaskName"); - TaskDef taskDef = new TaskDef(); - taskDef.setName("DynoTask"); - workflowTask.setTaskDefinition(taskDef); - - Map taskInput = new HashMap<>(); - taskInput.put("dynamicTaskName", "DynoTask"); - - when(parametersUtils.getTaskInput( - anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString())) - .thenReturn(taskInput); - - String taskId = new IDGenerator().generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(workflowTask.getTaskDefinition()) - .withWorkflowTask(workflowTask) - .withTaskInput(taskInput) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - when(metadataDAO.getTaskDef("DynoTask")).thenReturn(new TaskDef()); - - List mappedTasks = dynamicTaskMapper.getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - - TaskModel dynamicTask = mappedTasks.get(0); - assertEquals(taskId, dynamicTask.getTaskId()); - } - - @Test - public void getDynamicTaskName() { - Map taskInput = new HashMap<>(); - taskInput.put("dynamicTaskName", "DynoTask"); - - String dynamicTaskName = dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName"); - - assertEquals("DynoTask", dynamicTaskName); - } - - @Test - public void getDynamicTaskNameNotAvailable() { - Map taskInput = new HashMap<>(); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - String.format( - "Cannot map a dynamic task based on the parameter and input. " - + "Parameter= %s, input= %s", - "dynamicTaskName", taskInput)); - - dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName"); - } - - @Test - public void getDynamicTaskDefinition() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("Foo"); - TaskDef taskDef = new TaskDef(); - taskDef.setName("Foo"); - workflowTask.setTaskDefinition(taskDef); - - when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef()); - - // when - TaskDef dynamicTaskDefinition = dynamicTaskMapper.getDynamicTaskDefinition(workflowTask); - - assertEquals(dynamicTaskDefinition, taskDef); - } - - @Test - public void getDynamicTaskDefinitionNull() { - - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("Foo"); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - String.format( - "Invalid task specified. Cannot find task by name %s in the task definitions", - workflowTask.getName())); - - dynamicTaskMapper.getDynamicTaskDefinition(workflowTask); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java deleted file mode 100644 index 9e5daa1a6..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; - -public class EventTaskMapperTest { - - @Test - public void getMappedTasks() { - ParametersUtils parametersUtils = Mockito.mock(ParametersUtils.class); - EventTaskMapper eventTaskMapper = new EventTaskMapper(parametersUtils); - - WorkflowTask taskToBeScheduled = new WorkflowTask(); - taskToBeScheduled.setSink("SQSSINK"); - String taskId = new IDGenerator().generate(); - - Map eventTaskInput = new HashMap<>(); - eventTaskInput.put("sink", "SQSSINK"); - - when(parametersUtils.getTaskInput( - anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString())) - .thenReturn(eventTaskInput); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(taskToBeScheduled) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = eventTaskMapper.getMappedTasks(taskMapperContext); - assertEquals(1, mappedTasks.size()); - - TaskModel eventTask = mappedTasks.get(0); - assertEquals(taskId, eventTask.getTaskId()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java deleted file mode 100644 index 7c7f74796..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java +++ /dev/null @@ -1,500 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.*; - -import org.apache.commons.lang3.tuple.Pair; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; - -@SuppressWarnings("unchecked") -public class ForkJoinDynamicTaskMapperTest { - - private IDGenerator idGenerator; - private ParametersUtils parametersUtils; - private ObjectMapper objectMapper; - private DeciderService deciderService; - private ForkJoinDynamicTaskMapper forkJoinDynamicTaskMapper; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - MetadataDAO metadataDAO = Mockito.mock(MetadataDAO.class); - idGenerator = new IDGenerator(); - parametersUtils = Mockito.mock(ParametersUtils.class); - objectMapper = Mockito.mock(ObjectMapper.class); - deciderService = Mockito.mock(DeciderService.class); - - forkJoinDynamicTaskMapper = - new ForkJoinDynamicTaskMapper( - idGenerator, parametersUtils, objectMapper, metadataDAO); - } - - @Test - public void getMappedTasksException() { - - WorkflowDef def = new WorkflowDef(); - def.setName("DYNAMIC_FORK_JOIN_WF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(def); - - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamictask_join"); - - def.getTasks().add(dynamicForkJoinToSchedule); - - Map input1 = new HashMap<>(); - input1.put("k1", "v1"); - WorkflowTask wt2 = new WorkflowTask(); - wt2.setName("junit_task_2"); - wt2.setTaskReferenceName("xdt1"); - - Map input2 = new HashMap<>(); - input2.put("k2", "v2"); - - WorkflowTask wt3 = new WorkflowTask(); - wt3.setName("junit_task_3"); - wt3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", input1); - dynamicTasksInput.put("xdt2", input2); - dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3)); - dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput); - - // when - when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(dynamicTasksInput); - - when(objectMapper.convertValue(any(), any(TypeReference.class))) - .thenReturn(Arrays.asList(wt2, wt3)); - - TaskModel simpleTask1 = new TaskModel(); - simpleTask1.setReferenceTaskName("xdt1"); - - TaskModel simpleTask2 = new TaskModel(); - simpleTask2.setReferenceTaskName("xdt2"); - - when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0)) - .thenReturn(Collections.singletonList(simpleTask1)); - when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0)) - .thenReturn(Collections.singletonList(simpleTask2)); - - String taskId = idGenerator.generate(); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(dynamicForkJoinToSchedule) - .withRetryCount(0) - .withTaskId(taskId) - .withDeciderService(deciderService) - .build(); - - // then - expectedException.expect(TerminateWorkflowException.class); - forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext); - } - - @Test - public void getMappedTasks() { - - WorkflowDef def = new WorkflowDef(); - def.setName("DYNAMIC_FORK_JOIN_WF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(def); - - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamictask_join"); - - def.getTasks().add(dynamicForkJoinToSchedule); - def.getTasks().add(join); - - Map input1 = new HashMap<>(); - input1.put("k1", "v1"); - WorkflowTask wt2 = new WorkflowTask(); - wt2.setName("junit_task_2"); - wt2.setTaskReferenceName("xdt1"); - - Map input2 = new HashMap<>(); - input2.put("k2", "v2"); - - WorkflowTask wt3 = new WorkflowTask(); - wt3.setName("junit_task_3"); - wt3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", input1); - dynamicTasksInput.put("xdt2", input2); - dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3)); - dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput); - - // when - when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(dynamicTasksInput); - when(objectMapper.convertValue(any(), any(TypeReference.class))) - .thenReturn(Arrays.asList(wt2, wt3)); - - TaskModel simpleTask1 = new TaskModel(); - simpleTask1.setReferenceTaskName("xdt1"); - - TaskModel simpleTask2 = new TaskModel(); - simpleTask2.setReferenceTaskName("xdt2"); - - when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0)) - .thenReturn(Collections.singletonList(simpleTask1)); - when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0)) - .thenReturn(Collections.singletonList(simpleTask2)); - - String taskId = idGenerator.generate(); - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(dynamicForkJoinToSchedule) - .withRetryCount(0) - .withTaskId(taskId) - .withDeciderService(deciderService) - .build(); - - // then - List mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext); - - assertEquals(4, mappedTasks.size()); - - assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType()); - assertEquals(TASK_TYPE_JOIN, mappedTasks.get(3).getTaskType()); - List joinTaskNames = (List) mappedTasks.get(3).getInputData().get("joinOn"); - assertEquals("xdt1, xdt2", String.join(", ", joinTaskNames)); - } - - @Test - public void getDynamicForkJoinTasksAndInput() { - // Given - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList(); - - Map input = new HashMap<>(); - input.put("k1", "v1"); - dtasks.add("junit_task_2", null, "xdt1", input); - - HashMap input2 = new HashMap<>(); - input2.put("k2", "v2"); - dtasks.add("junit_task_3", null, "xdt2", input2); - - Map dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("dynamicTasks", dtasks); - - // when - when(parametersUtils.getTaskInput( - anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString())) - .thenReturn(dynamicTasksInput); - - when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(dtasks); - - Pair, Map>> dynamicForkJoinTasksAndInput = - forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput( - dynamicForkJoinToSchedule, new WorkflowModel()); - // then - assertNotNull(dynamicForkJoinTasksAndInput.getLeft()); - assertEquals(2, dynamicForkJoinTasksAndInput.getLeft().size()); - assertEquals(2, dynamicForkJoinTasksAndInput.getRight().size()); - } - - @Test - public void getDynamicForkJoinTasksAndInputException() { - // Given - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList(); - - Map input = new HashMap<>(); - input.put("k1", "v1"); - dtasks.add("junit_task_2", null, "xdt1", input); - - HashMap input2 = new HashMap<>(); - input2.put("k2", "v2"); - dtasks.add("junit_task_3", null, "xdt2", input2); - - Map dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("dynamicTasks", dtasks); - - // when - when(parametersUtils.getTaskInput( - anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString())) - .thenReturn(dynamicTasksInput); - - when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(null); - - // then - expectedException.expect(TerminateWorkflowException.class); - - forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput( - dynamicForkJoinToSchedule, new WorkflowModel()); - } - - @Test - public void getDynamicForkTasksAndInput() { - // Given - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - Map input1 = new HashMap<>(); - input1.put("k1", "v1"); - WorkflowTask wt2 = new WorkflowTask(); - wt2.setName("junit_task_2"); - wt2.setTaskReferenceName("xdt1"); - - Map input2 = new HashMap<>(); - input2.put("k2", "v2"); - - WorkflowTask wt3 = new WorkflowTask(); - wt3.setName("junit_task_3"); - wt3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", input1); - dynamicTasksInput.put("xdt2", input2); - dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3)); - dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput); - - // when - when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(dynamicTasksInput); - - when(objectMapper.convertValue(any(), any(TypeReference.class))) - .thenReturn(Arrays.asList(wt2, wt3)); - - Pair, Map>> dynamicTasks = - forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput( - dynamicForkJoinToSchedule, new WorkflowModel(), "dynamicTasks"); - - // then - assertNotNull(dynamicTasks.getLeft()); - } - - @Test - public void getDynamicForkTasksAndInputException() { - - // Given - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - Map input1 = new HashMap<>(); - input1.put("k1", "v1"); - WorkflowTask wt2 = new WorkflowTask(); - wt2.setName("junit_task_2"); - wt2.setTaskReferenceName("xdt1"); - - Map input2 = new HashMap<>(); - input2.put("k2", "v2"); - - WorkflowTask wt3 = new WorkflowTask(); - wt3.setName("junit_task_3"); - wt3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", input1); - dynamicTasksInput.put("xdt2", input2); - dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3)); - dynamicTasksInput.put("dynamicTasksInput", null); - - when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(dynamicTasksInput); - - when(objectMapper.convertValue(any(), any(TypeReference.class))) - .thenReturn(Arrays.asList(wt2, wt3)); - // then - expectedException.expect(TerminateWorkflowException.class); - // when - forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput( - dynamicForkJoinToSchedule, new WorkflowModel(), "dynamicTasks"); - } - - @Test - public void testDynamicTaskDuplicateTaskRefName() { - WorkflowDef def = new WorkflowDef(); - def.setName("DYNAMIC_FORK_JOIN_WF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(def); - - WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); - dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); - dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasks", "dt1.output.dynamicTasks"); - dynamicForkJoinToSchedule - .getInputParameters() - .put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamictask_join"); - - def.getTasks().add(dynamicForkJoinToSchedule); - def.getTasks().add(join); - - Map input1 = new HashMap<>(); - input1.put("k1", "v1"); - WorkflowTask wt2 = new WorkflowTask(); - wt2.setName("junit_task_2"); - wt2.setTaskReferenceName("xdt1"); - - Map input2 = new HashMap<>(); - input2.put("k2", "v2"); - - WorkflowTask wt3 = new WorkflowTask(); - wt3.setName("junit_task_3"); - wt3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", input1); - dynamicTasksInput.put("xdt2", input2); - dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3)); - dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput); - - // dynamic - when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(dynamicTasksInput); - when(objectMapper.convertValue(any(), any(TypeReference.class))) - .thenReturn(Arrays.asList(wt2, wt3)); - - TaskModel simpleTask1 = new TaskModel(); - simpleTask1.setReferenceTaskName("xdt1"); - - // Empty list, this is a bad state, workflow should terminate - when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0)) - .thenReturn(new ArrayList<>()); - - String taskId = idGenerator.generate(); - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(dynamicForkJoinToSchedule) - .withRetryCount(0) - .withTaskId(taskId) - .withDeciderService(deciderService) - .build(); - - expectedException.expect(TerminateWorkflowException.class); - forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java deleted file mode 100644 index e42270fd2..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK; - -import static org.junit.Assert.assertEquals; - -public class ForkJoinTaskMapperTest { - - private DeciderService deciderService; - private ForkJoinTaskMapper forkJoinTaskMapper; - private IDGenerator idGenerator; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - deciderService = Mockito.mock(DeciderService.class); - forkJoinTaskMapper = new ForkJoinTaskMapper(); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - - WorkflowDef def = new WorkflowDef(); - def.setName("FORK_JOIN_WF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask forkTask = new WorkflowTask(); - forkTask.setType(TaskType.FORK_JOIN.name()); - forkTask.setTaskReferenceName("forktask"); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - wft3.setInputParameters(ip1); - wft3.setTaskReferenceName("t3"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - WorkflowTask wft4 = new WorkflowTask(); - wft4.setName("junit_task_4"); - wft4.setInputParameters(ip2); - wft4.setTaskReferenceName("t4"); - - forkTask.getForkTasks().add(Arrays.asList(wft1, wft3)); - forkTask.getForkTasks().add(Collections.singletonList(wft2)); - - def.getTasks().add(forkTask); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("forktask_join"); - join.setJoinOn(Arrays.asList("t3", "t2")); - - def.getTasks().add(join); - def.getTasks().add(wft4); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - - TaskModel task1 = new TaskModel(); - task1.setReferenceTaskName(wft1.getTaskReferenceName()); - - TaskModel task3 = new TaskModel(); - task3.setReferenceTaskName(wft3.getTaskReferenceName()); - - Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0)) - .thenReturn(Collections.singletonList(task1)); - Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0)) - .thenReturn(Collections.singletonList(task3)); - - String taskId = idGenerator.generate(); - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withWorkflowTask(forkTask) - .withRetryCount(0) - .withTaskId(taskId) - .withDeciderService(deciderService) - .build(); - - List mappedTasks = forkJoinTaskMapper.getMappedTasks(taskMapperContext); - - assertEquals(3, mappedTasks.size()); - assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasksException() { - - WorkflowDef def = new WorkflowDef(); - def.setName("FORK_JOIN_WF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask forkTask = new WorkflowTask(); - forkTask.setType(TaskType.FORK_JOIN.name()); - forkTask.setTaskReferenceName("forktask"); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - wft3.setInputParameters(ip1); - wft3.setTaskReferenceName("t3"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - WorkflowTask wft4 = new WorkflowTask(); - wft4.setName("junit_task_4"); - wft4.setInputParameters(ip2); - wft4.setTaskReferenceName("t4"); - - forkTask.getForkTasks().add(Arrays.asList(wft1, wft3)); - forkTask.getForkTasks().add(Collections.singletonList(wft2)); - - def.getTasks().add(forkTask); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("forktask_join"); - join.setJoinOn(Arrays.asList("t3", "t2")); - - def.getTasks().add(wft4); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - - TaskModel task1 = new TaskModel(); - task1.setReferenceTaskName(wft1.getTaskReferenceName()); - - TaskModel task3 = new TaskModel(); - task3.setReferenceTaskName(wft3.getTaskReferenceName()); - - Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0)) - .thenReturn(Collections.singletonList(task1)); - Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0)) - .thenReturn(Collections.singletonList(task3)); - - String taskId = idGenerator.generate(); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withWorkflowTask(forkTask) - .withRetryCount(0) - .withTaskId(taskId) - .withDeciderService(deciderService) - .build(); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - "Fork task definition is not followed by a join task. Check the blueprint"); - forkJoinTaskMapper.getMappedTasks(taskMapperContext); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java deleted file mode 100644 index da548e531..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -public class HTTPTaskMapperTest { - - private HTTPTaskMapper httpTaskMapper; - private IDGenerator idGenerator; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - ParametersUtils parametersUtils = mock(ParametersUtils.class); - MetadataDAO metadataDAO = mock(MetadataDAO.class); - httpTaskMapper = new HTTPTaskMapper(parametersUtils, metadataDAO); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("http_task"); - workflowTask.setType(TaskType.HTTP.name()); - workflowTask.setTaskDefinition(new TaskDef("http_task")); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // when - List mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasks_WithoutTaskDef() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("http_task"); - workflowTask.setType(TaskType.HTTP.name()); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(null) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // when - List mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java deleted file mode 100644 index 2c3be681c..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -public class HumanTaskMapperTest { - - @Test - public void getMappedTasks() { - - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("human_task"); - workflowTask.setType(TaskType.HUMAN.name()); - String taskId = new IDGenerator().generate(); - - ParametersUtils parametersUtils = mock(ParametersUtils.class); - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - HumanTaskMapper humanTaskMapper = new HumanTaskMapper(parametersUtils); - // When - List mappedTasks = humanTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TASK_TYPE_HUMAN, mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java deleted file mode 100644 index 0c6c11872..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; - -public class InlineTaskMapperTest { - - private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("inline_task"); - workflowTask.setType(TaskType.INLINE.name()); - workflowTask.setTaskDefinition(new TaskDef("inline_task")); - workflowTask.setEvaluatorType(JavascriptEvaluator.NAME); - workflowTask.setExpression( - "function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return " - + "{testValue: false} }}; scriptFun();"); - - String taskId = new IDGenerator().generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new InlineTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasks_WithoutTaskDef() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.INLINE.name()); - workflowTask.setEvaluatorType(JavascriptEvaluator.NAME); - workflowTask.setExpression( - "function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return " - + "{testValue: false} }}; scriptFun();"); - - String taskId = new IDGenerator().generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(null) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new InlineTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java deleted file mode 100644 index 4e3be843b..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class JoinTaskMapperTest { - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.JOIN.name()); - workflowTask.setJoinOn(Arrays.asList("task1", "task2")); - - String taskId = new IDGenerator().generate(); - - WorkflowDef wd = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(wd); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = new JoinTaskMapper().getMappedTasks(taskMapperContext); - - assertNotNull(mappedTasks); - assertEquals(TASK_TYPE_JOIN, mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java deleted file mode 100644 index ed4d187b1..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; - -public class JsonJQTransformTaskMapperTest { - - private IDGenerator idGenerator; - private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("json_jq_transform_task"); - workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name()); - workflowTask.setTaskDefinition(new TaskDef("json_jq_transform_task")); - - Map taskInput = new HashMap<>(); - taskInput.put("in1", new String[] {"a", "b"}); - taskInput.put("in2", new String[] {"c", "d"}); - taskInput.put("queryExpression", "{ out: (.in1 + .in2) }"); - workflowTask.setInputParameters(taskInput); - - String taskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(taskInput) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new JsonJQTransformTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasks_WithoutTaskDef() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("json_jq_transform_task"); - workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name()); - - Map taskInput = new HashMap<>(); - taskInput.put("in1", new String[] {"a", "b"}); - taskInput.put("in2", new String[] {"c", "d"}); - taskInput.put("queryExpression", "{ out: (.in1 + .in2) }"); - workflowTask.setInputParameters(taskInput); - - String taskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(null) - .withWorkflowTask(workflowTask) - .withTaskInput(taskInput) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new JsonJQTransformTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java deleted file mode 100644 index bd7dd268e..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -public class KafkaPublishTaskMapperTest { - - private IDGenerator idGenerator; - private KafkaPublishTaskMapper kafkaTaskMapper; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - ParametersUtils parametersUtils = mock(ParametersUtils.class); - MetadataDAO metadataDAO = mock(MetadataDAO.class); - kafkaTaskMapper = new KafkaPublishTaskMapper(parametersUtils, metadataDAO); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("kafka_task"); - workflowTask.setType(TaskType.KAFKA_PUBLISH.name()); - workflowTask.setTaskDefinition(new TaskDef("kafka_task")); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // when - List mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasks_WithoutTaskDef() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("kafka_task"); - workflowTask.setType(TaskType.KAFKA_PUBLISH.name()); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskDef taskdefinition = new TaskDef(); - String testExecutionNameSpace = "testExecutionNameSpace"; - taskdefinition.setExecutionNameSpace(testExecutionNameSpace); - String testIsolationGroupId = "testIsolationGroupId"; - taskdefinition.setIsolationGroupId(testIsolationGroupId); - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(taskdefinition) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // when - List mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType()); - assertEquals(testExecutionNameSpace, mappedTasks.get(0).getExecutionNameSpace()); - assertEquals(testIsolationGroupId, mappedTasks.get(0).getIsolationGroupId()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java deleted file mode 100644 index edeef004e..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; - -public class LambdaTaskMapperTest { - - private IDGenerator idGenerator; - private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("lambda_task"); - workflowTask.setType(TaskType.LAMBDA.name()); - workflowTask.setTaskDefinition(new TaskDef("lambda_task")); - workflowTask.setScriptExpression( - "if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }"); - - String taskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new LambdaTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasks_WithoutTaskDef() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.LAMBDA.name()); - workflowTask.setScriptExpression( - "if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }"); - - String taskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(null) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new LambdaTaskMapper(parametersUtils, metadataDAO) - .getMappedTasks(taskMapperContext); - - assertEquals(1, mappedTasks.size()); - assertNotNull(mappedTasks); - assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java deleted file mode 100644 index 8d35c00ee..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.junit.Assert; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -public class SetVariableTaskMapperTest { - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_SET_VARIABLE); - - String taskId = new IDGenerator().generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = new SetVariableTaskMapper().getMappedTasks(taskMapperContext); - - Assert.assertNotNull(mappedTasks); - Assert.assertEquals(1, mappedTasks.size()); - Assert.assertEquals(TaskType.TASK_TYPE_SET_VARIABLE, mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java deleted file mode 100644 index e3cc14d3c..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; - -public class SimpleTaskMapperTest { - - private SimpleTaskMapper simpleTaskMapper; - - private IDGenerator idGenerator = new IDGenerator(); - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - ParametersUtils parametersUtils = mock(ParametersUtils.class); - simpleTaskMapper = new SimpleTaskMapper(parametersUtils); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("simple_task"); - workflowTask.setTaskDefinition(new TaskDef("simple_task")); - - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - List mappedTasks = simpleTaskMapper.getMappedTasks(taskMapperContext); - assertNotNull(mappedTasks); - assertEquals(1, mappedTasks.size()); - } - - @Test - public void getMappedTasksException() { - - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("simple_task"); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // then - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - String.format( - "Invalid task. Task %s does not have a definition", - workflowTask.getName())); - - // when - simpleTaskMapper.getMappedTasks(taskMapperContext); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java deleted file mode 100644 index 9681bc2ea..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SubWorkflowTaskMapperTest { - - private SubWorkflowTaskMapper subWorkflowTaskMapper; - private ParametersUtils parametersUtils; - private DeciderService deciderService; - private IDGenerator idGenerator; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - MetadataDAO metadataDAO = mock(MetadataDAO.class); - subWorkflowTaskMapper = new SubWorkflowTaskMapper(parametersUtils, metadataDAO); - deciderService = mock(DeciderService.class); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - // Given - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(workflowDef); - WorkflowTask workflowTask = new WorkflowTask(); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("Foo"); - subWorkflowParams.setVersion(2); - workflowTask.setSubWorkflowParam(subWorkflowParams); - workflowTask.setStartDelay(30); - Map taskInput = new HashMap<>(); - Map taskToDomain = - new HashMap<>() { - { - put("*", "unittest"); - } - }; - - Map subWorkflowParamMap = new HashMap<>(); - subWorkflowParamMap.put("name", "FooWorkFlow"); - subWorkflowParamMap.put("version", 2); - subWorkflowParamMap.put("taskToDomain", taskToDomain); - when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(subWorkflowParamMap); - - // When - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(workflowTask) - .withTaskInput(taskInput) - .withRetryCount(0) - .withTaskId(idGenerator.generate()) - .withDeciderService(deciderService) - .build(); - - List mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertFalse(mappedTasks.isEmpty()); - assertEquals(1, mappedTasks.size()); - - TaskModel subWorkFlowTask = mappedTasks.get(0); - assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus()); - assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType()); - assertEquals(30, subWorkFlowTask.getCallbackAfterSeconds()); - assertEquals(taskToDomain, subWorkFlowTask.getInputData().get("subWorkflowTaskToDomain")); - } - - @Test - public void testTaskToDomain() { - // Given - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(workflowDef); - WorkflowTask workflowTask = new WorkflowTask(); - Map taskToDomain = - new HashMap<>() { - { - put("*", "unittest"); - } - }; - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("Foo"); - subWorkflowParams.setVersion(2); - subWorkflowParams.setTaskToDomain(taskToDomain); - workflowTask.setSubWorkflowParam(subWorkflowParams); - Map taskInput = new HashMap<>(); - - Map subWorkflowParamMap = new HashMap<>(); - subWorkflowParamMap.put("name", "FooWorkFlow"); - subWorkflowParamMap.put("version", 2); - - when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any())) - .thenReturn(subWorkflowParamMap); - - // When - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(workflowTask) - .withTaskInput(taskInput) - .withRetryCount(0) - .withTaskId(new IDGenerator().generate()) - .withDeciderService(deciderService) - .build(); - - List mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertFalse(mappedTasks.isEmpty()); - assertEquals(1, mappedTasks.size()); - - TaskModel subWorkFlowTask = mappedTasks.get(0); - assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus()); - assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType()); - } - - @Test - public void getSubWorkflowParams() { - WorkflowTask workflowTask = new WorkflowTask(); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("Foo"); - subWorkflowParams.setVersion(2); - workflowTask.setSubWorkflowParam(subWorkflowParams); - - assertEquals(subWorkflowParams, subWorkflowTaskMapper.getSubWorkflowParams(workflowTask)); - } - - @Test - public void getExceptionWhenNoSubWorkflowParamsPassed() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("FooWorkFLow"); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - String.format( - "Task %s is defined as sub-workflow and is missing subWorkflowParams. " - + "Please check the workflow definition", - workflowTask.getName())); - - subWorkflowTaskMapper.getSubWorkflowParams(workflowTask); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java deleted file mode 100644 index 0b487eda4..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator; -import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration( - classes = { - TestObjectMapperConfiguration.class, - SwitchTaskMapperTest.TestConfiguration.class - }) -@RunWith(SpringRunner.class) -public class SwitchTaskMapperTest { - - private IDGenerator idGenerator; - private ParametersUtils parametersUtils; - private DeciderService deciderService; - // Subject - private SwitchTaskMapper switchTaskMapper; - - @Configuration - @ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans. - public static class TestConfiguration {} - - @Autowired private ObjectMapper objectMapper; - - @Autowired private Map evaluators; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - Map ip1; - WorkflowTask task1; - WorkflowTask task2; - WorkflowTask task3; - - @Before - public void setUp() { - parametersUtils = new ParametersUtils(objectMapper); - idGenerator = new IDGenerator(); - - ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - ip1.put("case", "${workflow.input.case}"); - - task1 = new WorkflowTask(); - task1.setName("Test1"); - task1.setInputParameters(ip1); - task1.setTaskReferenceName("t1"); - - task2 = new WorkflowTask(); - task2.setName("Test2"); - task2.setInputParameters(ip1); - task2.setTaskReferenceName("t2"); - - task3 = new WorkflowTask(); - task3.setName("Test3"); - task3.setInputParameters(ip1); - task3.setTaskReferenceName("t3"); - deciderService = mock(DeciderService.class); - switchTaskMapper = new SwitchTaskMapper(evaluators); - } - - @Test - public void getMappedTasks() { - - // Given - // Task Definition - TaskDef taskDef = new TaskDef(); - Map inputMap = new HashMap<>(); - inputMap.put("Id", "${workflow.input.Id}"); - List> taskDefinitionInput = new LinkedList<>(); - taskDefinitionInput.add(inputMap); - - // Switch task instance - WorkflowTask switchTask = new WorkflowTask(); - switchTask.setType(TaskType.SWITCH.name()); - switchTask.setName("Switch"); - switchTask.setTaskReferenceName("switchTask"); - switchTask.setDefaultCase(Collections.singletonList(task1)); - switchTask.getInputParameters().put("Id", "${workflow.input.Id}"); - switchTask.setEvaluatorType(JavascriptEvaluator.NAME); - switchTask.setExpression( - "if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; "); - Map> decisionCases = new HashMap<>(); - decisionCases.put("even", Collections.singletonList(task2)); - decisionCases.put("odd", Collections.singletonList(task3)); - switchTask.setDecisionCases(decisionCases); - // Workflow instance - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(workflowDef); - Map workflowInput = new HashMap<>(); - workflowInput.put("Id", "22"); - workflowModel.setInput(workflowInput); - - Map body = new HashMap<>(); - body.put("input", taskDefinitionInput); - taskDef.getInputTemplate().putAll(body); - - Map input = - parametersUtils.getTaskInput( - switchTask.getInputParameters(), workflowModel, null, null); - - TaskModel theTask = new TaskModel(); - theTask.setReferenceTaskName("Foo"); - theTask.setTaskId(idGenerator.generate()); - - when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null)) - .thenReturn(Collections.singletonList(theTask)); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(switchTask) - .withTaskInput(input) - .withRetryCount(0) - .withTaskId(idGenerator.generate()) - .withDeciderService(deciderService) - .build(); - - // When - List mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(2, mappedTasks.size()); - assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName()); - assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName()); - } - - @Test - public void getMappedTasksWithValueParamEvaluator() { - - // Given - // Task Definition - TaskDef taskDef = new TaskDef(); - Map inputMap = new HashMap<>(); - inputMap.put("Id", "${workflow.input.Id}"); - List> taskDefinitionInput = new LinkedList<>(); - taskDefinitionInput.add(inputMap); - - // Switch task instance - WorkflowTask switchTask = new WorkflowTask(); - switchTask.setType(TaskType.SWITCH.name()); - switchTask.setName("Switch"); - switchTask.setTaskReferenceName("switchTask"); - switchTask.setDefaultCase(Collections.singletonList(task1)); - switchTask.getInputParameters().put("Id", "${workflow.input.Id}"); - switchTask.setEvaluatorType(ValueParamEvaluator.NAME); - switchTask.setExpression("Id"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("even", Collections.singletonList(task2)); - decisionCases.put("odd", Collections.singletonList(task3)); - switchTask.setDecisionCases(decisionCases); - // Workflow instance - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setSchemaVersion(2); - - WorkflowModel workflowModel = new WorkflowModel(); - workflowModel.setWorkflowDefinition(workflowDef); - Map workflowInput = new HashMap<>(); - workflowInput.put("Id", "even"); - workflowModel.setInput(workflowInput); - - Map body = new HashMap<>(); - body.put("input", taskDefinitionInput); - taskDef.getInputTemplate().putAll(body); - - Map input = - parametersUtils.getTaskInput( - switchTask.getInputParameters(), workflowModel, null, null); - - TaskModel theTask = new TaskModel(); - theTask.setReferenceTaskName("Foo"); - theTask.setTaskId(idGenerator.generate()); - - when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null)) - .thenReturn(Collections.singletonList(theTask)); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflowModel) - .withWorkflowTask(switchTask) - .withTaskInput(input) - .withRetryCount(0) - .withTaskId(idGenerator.generate()) - .withDeciderService(deciderService) - .build(); - - // When - List mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(2, mappedTasks.size()); - assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName()); - assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java deleted file mode 100644 index 47e02a12e..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.List; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.mockito.Mockito.mock; - -public class TerminateTaskMapperTest { - private ParametersUtils parametersUtils; - - @Before - public void setUp() { - parametersUtils = mock(ParametersUtils.class); - } - - @Test - public void getMappedTasks() { - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - - String taskId = new IDGenerator().generate(); - - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - List mappedTasks = - new TerminateTaskMapper(parametersUtils).getMappedTasks(taskMapperContext); - - Assert.assertNotNull(mappedTasks); - Assert.assertEquals(1, mappedTasks.size()); - Assert.assertEquals(TaskType.TASK_TYPE_TERMINATE, mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java deleted file mode 100644 index afdafa28d..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -public class UserDefinedTaskMapperTest { - - private IDGenerator idGenerator; - - private UserDefinedTaskMapper userDefinedTaskMapper; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setUp() { - ParametersUtils parametersUtils = mock(ParametersUtils.class); - MetadataDAO metadataDAO = mock(MetadataDAO.class); - userDefinedTaskMapper = new UserDefinedTaskMapper(parametersUtils, metadataDAO); - idGenerator = new IDGenerator(); - } - - @Test - public void getMappedTasks() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("user_task"); - workflowTask.setType(TaskType.USER_DEFINED.name()); - workflowTask.setTaskDefinition(new TaskDef("user_task")); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // when - List mappedTasks = userDefinedTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TaskType.USER_DEFINED.name(), mappedTasks.get(0).getTaskType()); - } - - @Test - public void getMappedTasksException() { - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("user_task"); - workflowTask.setType(TaskType.USER_DEFINED.name()); - String taskId = idGenerator.generate(); - String retriedTaskId = idGenerator.generate(); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withRetryTaskId(retriedTaskId) - .withTaskId(taskId) - .build(); - - // then - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage( - String.format( - "Invalid task specified. Cannot find task by name %s in the task definitions", - workflowTask.getName())); - // when - userDefinedTaskMapper.getMappedTasks(taskMapperContext); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java deleted file mode 100644 index 3b9071cef..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.mapper; - -import java.util.HashMap; -import java.util.List; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -public class WaitTaskMapperTest { - - @Test - public void getMappedTasks() { - - // Given - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("Wait_task"); - workflowTask.setType(TaskType.WAIT.name()); - String taskId = new IDGenerator().generate(); - - ParametersUtils parametersUtils = mock(ParametersUtils.class); - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef workflowDef = new WorkflowDef(); - workflow.setWorkflowDefinition(workflowDef); - - TaskMapperContext taskMapperContext = - TaskMapperContext.newBuilder() - .withWorkflowModel(workflow) - .withTaskDefinition(new TaskDef()) - .withWorkflowTask(workflowTask) - .withTaskInput(new HashMap<>()) - .withRetryCount(0) - .withTaskId(taskId) - .build(); - - WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils); - // When - List mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext); - - // Then - assertEquals(1, mappedTasks.size()); - assertEquals(TASK_TYPE_WAIT, mappedTasks.get(0).getTaskType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/EventQueueResolutionTest.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/EventQueueResolutionTest.java deleted file mode 100644 index 8d1ed59af..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/EventQueueResolutionTest.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.events.EventQueueProvider; -import com.netflix.conductor.core.events.EventQueues; -import com.netflix.conductor.core.events.MockQueueProvider; -import com.netflix.conductor.core.events.queue.ObservableQueue; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * Tests the {@link Event#getQueue(WorkflowModel, TaskModel)} method with a real {@link - * ParametersUtils} object. - */ -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class EventQueueResolutionTest { - - private WorkflowDef testWorkflowDefinition; - private EventQueues eventQueues; - private ParametersUtils parametersUtils; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setup() { - Map providers = new HashMap<>(); - providers.put("sqs", new MockQueueProvider("sqs")); - providers.put("conductor", new MockQueueProvider("conductor")); - - parametersUtils = new ParametersUtils(objectMapper); - eventQueues = new EventQueues(providers, parametersUtils); - - testWorkflowDefinition = new WorkflowDef(); - testWorkflowDefinition.setName("testWorkflow"); - testWorkflowDefinition.setVersion(2); - } - - @Test - public void testSinkParam() { - String sink = "sqs:queue_name"; - - WorkflowDef def = new WorkflowDef(); - def.setName("wf0"); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - - TaskModel task1 = new TaskModel(); - task1.setReferenceTaskName("t1"); - task1.getOutputData().put("q", "t1_queue"); - workflow.getTasks().add(task1); - - TaskModel task2 = new TaskModel(); - task2.setReferenceTaskName("t2"); - task2.getOutputData().put("q", "task2_queue"); - workflow.getTasks().add(task2); - - TaskModel task = new TaskModel(); - task.setReferenceTaskName("event"); - task.getInputData().put("sink", sink); - task.setTaskType(TaskType.EVENT.name()); - workflow.getTasks().add(task); - - Event event = new Event(eventQueues, parametersUtils, objectMapper); - ObservableQueue queue = event.getQueue(workflow, task); - assertNotNull(task.getReasonForIncompletion(), queue); - assertEquals("queue_name", queue.getName()); - assertEquals("sqs", queue.getType()); - - sink = "sqs:${t1.output.q}"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("t1_queue", queue.getName()); - assertEquals("sqs", queue.getType()); - - sink = "sqs:${t2.output.q}"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("task2_queue", queue.getName()); - assertEquals("sqs", queue.getType()); - - sink = "conductor"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals( - workflow.getWorkflowName() + ":" + task.getReferenceTaskName(), queue.getName()); - assertEquals("conductor", queue.getType()); - - sink = "sqs:static_value"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("static_value", queue.getName()); - assertEquals("sqs", queue.getType()); - assertEquals(sink, task.getOutputData().get("event_produced")); - } - - @Test - public void testDynamicSinks() { - Event event = new Event(eventQueues, parametersUtils, objectMapper); - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(testWorkflowDefinition); - - TaskModel task = new TaskModel(); - task.setReferenceTaskName("task0"); - task.setTaskId("task_id_0"); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.getInputData().put("sink", "conductor:some_arbitary_queue"); - - ObservableQueue queue = event.getQueue(workflow, task); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertNotNull(queue); - assertEquals("testWorkflow:some_arbitary_queue", queue.getName()); - assertEquals("testWorkflow:some_arbitary_queue", queue.getURI()); - assertEquals("conductor", queue.getType()); - assertEquals( - "conductor:testWorkflow:some_arbitary_queue", - task.getOutputData().get("event_produced")); - - task.getInputData().put("sink", "conductor"); - queue = event.getQueue(workflow, task); - assertEquals( - "not in progress: " + task.getReasonForIncompletion(), - TaskModel.Status.IN_PROGRESS, - task.getStatus()); - assertNotNull(queue); - assertEquals("testWorkflow:task0", queue.getName()); - - task.getInputData().put("sink", "sqs:my_sqs_queue_name"); - queue = event.getQueue(workflow, task); - assertEquals( - "not in progress: " + task.getReasonForIncompletion(), - TaskModel.Status.IN_PROGRESS, - task.getStatus()); - assertNotNull(queue); - assertEquals("my_sqs_queue_name", queue.getName()); - assertEquals("sqs", queue.getType()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java deleted file mode 100644 index c66af8fe1..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.evaluators.Evaluator; -import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator; -import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.mock; - -public class InlineTest { - - private final WorkflowModel workflow = new WorkflowModel(); - private final WorkflowExecutor executor = mock(WorkflowExecutor.class); - - @Test - public void testInlineTaskValidationFailures() { - Inline inline = new Inline(getStringEvaluatorMap()); - - Map inputObj = new HashMap<>(); - inputObj.put("value", 1); - inputObj.put("expression", ""); - inputObj.put("evaluatorType", "value-param"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(inputObj); - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertEquals( - "Empty 'expression' in Inline task's input parameters. A non-empty String value must be provided.", - task.getReasonForIncompletion()); - - inputObj = new HashMap<>(); - inputObj.put("value", 1); - inputObj.put("expression", "value"); - inputObj.put("evaluatorType", ""); - - task = new TaskModel(); - task.getInputData().putAll(inputObj); - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertEquals( - "Empty 'evaluatorType' in Inline task's input parameters. A non-empty String value must be provided.", - task.getReasonForIncompletion()); - } - - @Test - public void testInlineValueParamExpression() { - Inline inline = new Inline(getStringEvaluatorMap()); - - Map inputObj = new HashMap<>(); - inputObj.put("value", 101); - inputObj.put("expression", "value"); - inputObj.put("evaluatorType", "value-param"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(inputObj); - - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals(101, task.getOutputData().get("result")); - - inputObj = new HashMap<>(); - inputObj.put("value", "StringValue"); - inputObj.put("expression", "value"); - inputObj.put("evaluatorType", "value-param"); - - task = new TaskModel(); - task.getInputData().putAll(inputObj); - - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals("StringValue", task.getOutputData().get("result")); - } - - @SuppressWarnings("unchecked") - @Test - public void testInlineJavascriptExpression() { - Inline inline = new Inline(getStringEvaluatorMap()); - - Map inputObj = new HashMap<>(); - inputObj.put("value", 101); - inputObj.put( - "expression", - "function e() { if ($.value == 101){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();"); - inputObj.put("evaluatorType", "javascript"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(inputObj); - - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals( - true, ((Map) task.getOutputData().get("result")).get("evalResult")); - - inputObj = new HashMap<>(); - inputObj.put("value", "StringValue"); - inputObj.put( - "expression", - "function e() { if ($.value == 'StringValue'){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();"); - inputObj.put("evaluatorType", "javascript"); - - task = new TaskModel(); - task.getInputData().putAll(inputObj); - - inline.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - assertEquals( - true, ((Map) task.getOutputData().get("result")).get("evalResult")); - } - - private Map getStringEvaluatorMap() { - Map evaluators = new HashMap<>(); - evaluators.put(ValueParamEvaluator.NAME, new ValueParamEvaluator()); - evaluators.put(JavascriptEvaluator.NAME, new JavascriptEvaluator()); - return evaluators; - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java deleted file mode 100644 index b6a857fbe..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; - -/** - * @author x-ultra - */ -public class TestLambda { - - private final WorkflowModel workflow = new WorkflowModel(); - private final WorkflowExecutor executor = mock(WorkflowExecutor.class); - - @SuppressWarnings({"rawtypes", "unchecked"}) - @Test - public void start() { - Lambda lambda = new Lambda(); - - Map inputObj = new HashMap(); - inputObj.put("a", 1); - - // test for scriptExpression == null - TaskModel task = new TaskModel(); - task.getInputData().put("input", inputObj); - lambda.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - - // test for normal - task = new TaskModel(); - task.getInputData().put("input", inputObj); - task.getInputData().put("scriptExpression", "if ($.input.a==1){return 1}else{return 0 } "); - lambda.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(task.getOutputData().toString(), "{result=1}"); - - // test for scriptExpression ScriptException - task = new TaskModel(); - task.getInputData().put("input", inputObj); - task.getInputData().put("scriptExpression", "if ($.a.size==1){return 1}else{return 0 } "); - lambda.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSubWorkflow.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSubWorkflow.java deleted file mode 100644 index f2dad62f9..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSubWorkflow.java +++ /dev/null @@ -1,492 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class TestSubWorkflow { - - private WorkflowExecutor workflowExecutor; - private SubWorkflow subWorkflow; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setup() { - workflowExecutor = mock(WorkflowExecutor.class); - subWorkflow = new SubWorkflow(objectMapper); - } - - @Test - public void testStartSubWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 3); - task.setInputData(inputData); - - String workflowId = "workflow_1"; - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId(workflowId); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(3), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - any())) - .thenReturn(workflowId); - - when(workflowExecutor.getWorkflow(anyString(), eq(false))).thenReturn(workflow); - - workflow.setStatus(WorkflowModel.Status.RUNNING); - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - - workflow.setStatus(WorkflowModel.Status.TERMINATED); - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - assertEquals(TaskModel.Status.CANCELED, task.getStatus()); - - workflow.setStatus(WorkflowModel.Status.COMPLETED); - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - } - - @Test - public void testStartSubWorkflowQueueFailure() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - task.setStatus(TaskModel.Status.SCHEDULED); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 3); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(3), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - any())) - .thenThrow( - new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, "QueueDAO failure")); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertNull("subWorkflowId should be null", task.getSubWorkflowId()); - assertEquals(TaskModel.Status.SCHEDULED, task.getStatus()); - assertTrue("Output data should be empty", task.getOutputData().isEmpty()); - } - - @Test - public void testStartSubWorkflowStartError() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - task.setStatus(TaskModel.Status.SCHEDULED); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 3); - task.setInputData(inputData); - - String failureReason = "non transient failure"; - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(3), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - any())) - .thenThrow( - new ApplicationException( - ApplicationException.Code.INTERNAL_ERROR, failureReason)); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertNull("subWorkflowId should be null", task.getSubWorkflowId()); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertEquals(failureReason, task.getReasonForIncompletion()); - assertTrue("Output data should be empty", task.getOutputData().isEmpty()); - } - - @Test - public void testStartSubWorkflowWithEmptyWorkflowInput() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 3); - - Map workflowInput = new HashMap<>(); - inputData.put("workflowInput", workflowInput); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(3), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - any())) - .thenReturn("workflow_1"); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - } - - @Test - public void testStartSubWorkflowWithWorkflowInput() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 3); - - Map workflowInput = new HashMap<>(); - workflowInput.put("test", "value"); - inputData.put("workflowInput", workflowInput); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(3), - eq(workflowInput), - eq(null), - any(), - any(), - any(), - eq(null), - any())) - .thenReturn("workflow_1"); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - } - - @Test - public void testStartSubWorkflowTaskToDomain() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - Map taskToDomain = - new HashMap<>() { - { - put("*", "unittest"); - } - }; - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - inputData.put("subWorkflowTaskToDomain", taskToDomain); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(2), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - eq(taskToDomain))) - .thenReturn("workflow_1"); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - } - - @Test - public void testExecuteSubWorkflowWithoutId() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(2), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - eq(null))) - .thenReturn("workflow_1"); - - assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - } - - @Test - public void testExecuteWorkflowStatus() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - WorkflowModel subWorkflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - Map taskToDomain = - new HashMap<>() { - { - put("*", "unittest"); - } - }; - - TaskModel task = new TaskModel(); - Map outputData = new HashMap<>(); - task.setOutputData(outputData); - task.setSubWorkflowId("sub-workflow-id"); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - inputData.put("subWorkflowTaskToDomain", taskToDomain); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(2), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - eq(taskToDomain))) - .thenReturn("workflow_1"); - when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(false))) - .thenReturn(subWorkflowInstance); - - subWorkflowInstance.setStatus(WorkflowModel.Status.RUNNING); - assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertNull(task.getStatus()); - assertNull(task.getReasonForIncompletion()); - - subWorkflowInstance.setStatus(WorkflowModel.Status.PAUSED); - assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertNull(task.getStatus()); - assertNull(task.getReasonForIncompletion()); - - subWorkflowInstance.setStatus(WorkflowModel.Status.COMPLETED); - assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull(task.getReasonForIncompletion()); - - subWorkflowInstance.setStatus(WorkflowModel.Status.FAILED); - subWorkflowInstance.setReasonForIncompletion("unit1"); - assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertTrue(task.getReasonForIncompletion().contains("unit1")); - - subWorkflowInstance.setStatus(WorkflowModel.Status.TIMED_OUT); - subWorkflowInstance.setReasonForIncompletion("unit2"); - assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertEquals(TaskModel.Status.TIMED_OUT, task.getStatus()); - assertTrue(task.getReasonForIncompletion().contains("unit2")); - - subWorkflowInstance.setStatus(WorkflowModel.Status.TERMINATED); - subWorkflowInstance.setReasonForIncompletion("unit3"); - assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor)); - assertEquals(TaskModel.Status.CANCELED, task.getStatus()); - assertTrue(task.getReasonForIncompletion().contains("unit3")); - } - - @Test - public void testCancelWithWorkflowId() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - WorkflowModel subWorkflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - task.setSubWorkflowId("sub-workflow-id"); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(2), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - eq(null))) - .thenReturn("workflow_1"); - when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(true))) - .thenReturn(subWorkflowInstance); - - workflowInstance.setStatus(WorkflowModel.Status.TIMED_OUT); - subWorkflow.cancel(workflowInstance, task, workflowExecutor); - - assertEquals(WorkflowModel.Status.TERMINATED, subWorkflowInstance.getStatus()); - } - - @Test - public void testCancelWithoutWorkflowId() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - WorkflowModel subWorkflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - TaskModel task = new TaskModel(); - Map outputData = new HashMap<>(); - task.setOutputData(outputData); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq("UnitWorkFlow"), - eq(2), - eq(inputData), - eq(null), - any(), - any(), - any(), - eq(null), - eq(null))) - .thenReturn("workflow_1"); - when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(false))) - .thenReturn(subWorkflowInstance); - - subWorkflow.cancel(workflowInstance, task, workflowExecutor); - - assertEquals(WorkflowModel.Status.RUNNING, subWorkflowInstance.getStatus()); - } - - @Test - public void testIsAsync() { - assertTrue(subWorkflow.isAsync()); - } - - @Test - public void testStartSubWorkflowWithSubWorkflowDefinition() { - WorkflowDef workflowDef = new WorkflowDef(); - WorkflowModel workflowInstance = new WorkflowModel(); - workflowInstance.setWorkflowDefinition(workflowDef); - - WorkflowDef subWorkflowDef = new WorkflowDef(); - subWorkflowDef.setName("subWorkflow_1"); - - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - Map inputData = new HashMap<>(); - inputData.put("subWorkflowName", "UnitWorkFlow"); - inputData.put("subWorkflowVersion", 2); - inputData.put("subWorkflowDefinition", subWorkflowDef); - task.setInputData(inputData); - - when(workflowExecutor.startWorkflow( - eq(subWorkflowDef), - eq(inputData), - eq(null), - any(), - eq(0), - any(), - any(), - eq(null), - any())) - .thenReturn("workflow_1"); - - subWorkflow.start(workflowInstance, task, workflowExecutor); - assertEquals("workflow_1", task.getSubWorkflowId()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java deleted file mode 100644 index c04f2ff85..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.time.Duration; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.CountDownLatch; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.service.ExecutionService; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class TestSystemTaskWorker { - - private static final String TEST_TASK = "system_task"; - private static final String ISOLATED_TASK = "system_task-isolated"; - - private AsyncSystemTaskExecutor asyncSystemTaskExecutor; - private ExecutionService executionService; - private QueueDAO queueDAO; - private ConductorProperties properties; - - private SystemTaskWorker systemTaskWorker; - - @Before - public void setUp() { - asyncSystemTaskExecutor = mock(AsyncSystemTaskExecutor.class); - executionService = mock(ExecutionService.class); - queueDAO = mock(QueueDAO.class); - properties = mock(ConductorProperties.class); - - when(properties.getSystemTaskWorkerThreadCount()).thenReturn(10); - when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(10); - when(properties.getSystemTaskWorkerCallbackDuration()).thenReturn(Duration.ofSeconds(30)); - when(properties.getSystemTaskMaxPollCount()).thenReturn(1); - when(properties.getSystemTaskWorkerPollInterval()).thenReturn(Duration.ofSeconds(30)); - - systemTaskWorker = - new SystemTaskWorker( - queueDAO, asyncSystemTaskExecutor, properties, executionService); - systemTaskWorker.start(); - } - - @After - public void tearDown() { - systemTaskWorker.queueExecutionConfigMap.clear(); - systemTaskWorker.stop(); - } - - @Test - public void testGetExecutionConfigForSystemTask() { - when(properties.getSystemTaskWorkerThreadCount()).thenReturn(5); - systemTaskWorker = - new SystemTaskWorker( - queueDAO, asyncSystemTaskExecutor, properties, executionService); - assertEquals( - systemTaskWorker.getExecutionConfig("").getSemaphoreUtil().availableSlots(), 5); - } - - @Test - public void testGetExecutionConfigForIsolatedSystemTask() { - when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(7); - systemTaskWorker = - new SystemTaskWorker( - queueDAO, asyncSystemTaskExecutor, properties, executionService); - assertEquals( - systemTaskWorker.getExecutionConfig("test-iso").getSemaphoreUtil().availableSlots(), - 7); - } - - @Test - public void testPollAndExecuteSystemTask() throws Exception { - when(queueDAO.pop(anyString(), anyInt(), anyInt())) - .thenReturn(Collections.singletonList("taskId")); - - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(asyncSystemTaskExecutor) - .execute(any(), anyString()); - - systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK); - - latch.await(); - - verify(asyncSystemTaskExecutor).execute(any(), anyString()); - } - - @Test - public void testBatchPollAndExecuteSystemTask() throws Exception { - when(properties.getSystemTaskMaxPollCount()).thenReturn(2); - when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("t1", "t1")); - - CountDownLatch latch = new CountDownLatch(2); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(asyncSystemTaskExecutor) - .execute(any(), eq("t1")); - - systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK); - - latch.await(); - - verify(asyncSystemTaskExecutor, Mockito.times(2)).execute(any(), eq("t1")); - } - - @Test - public void testPollAndExecuteIsolatedSystemTask() throws Exception { - when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("isolated_taskId")); - - CountDownLatch latch = new CountDownLatch(1); - doAnswer( - invocation -> { - latch.countDown(); - return null; - }) - .when(asyncSystemTaskExecutor) - .execute(any(), eq("isolated_taskId")); - - systemTaskWorker.pollAndExecute(new IsolatedTask(), ISOLATED_TASK); - - latch.await(); - - verify(asyncSystemTaskExecutor, Mockito.times(1)).execute(any(), eq("isolated_taskId")); - } - - @Test - public void testPollException() { - when(properties.getSystemTaskWorkerThreadCount()).thenReturn(1); - when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class); - - systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK); - - verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString()); - } - - @Test - public void testBatchPollException() { - when(properties.getSystemTaskWorkerThreadCount()).thenReturn(2); - when(properties.getSystemTaskMaxPollCount()).thenReturn(2); - when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class); - - systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK); - - verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString()); - } - - static class TestTask extends WorkflowSystemTask { - public TestTask() { - super(TEST_TASK); - } - } - - static class IsolatedTask extends WorkflowSystemTask { - public IsolatedTask() { - super(ISOLATED_TASK); - } - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorkerCoordinator.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorkerCoordinator.java deleted file mode 100644 index 346b15c5b..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorkerCoordinator.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.time.Duration; -import java.util.Collections; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.core.config.ConductorProperties; - -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class TestSystemTaskWorkerCoordinator { - - private static final String TEST_QUEUE = "test"; - private static final String EXECUTION_NAMESPACE_CONSTANT = "@exeNS"; - - private SystemTaskWorker systemTaskWorker; - private ConductorProperties properties; - - @Before - public void setUp() { - systemTaskWorker = mock(SystemTaskWorker.class); - properties = mock(ConductorProperties.class); - when(properties.getSystemTaskWorkerPollInterval()).thenReturn(Duration.ofMillis(50)); - when(properties.getSystemTaskWorkerExecutionNamespace()).thenReturn(""); - } - - @Test - public void testIsFromCoordinatorExecutionNameSpace() { - doReturn("exeNS").when(properties).getSystemTaskWorkerExecutionNamespace(); - SystemTaskWorkerCoordinator systemTaskWorkerCoordinator = - new SystemTaskWorkerCoordinator( - systemTaskWorker, properties, Collections.emptySet()); - assertTrue( - systemTaskWorkerCoordinator.isFromCoordinatorExecutionNameSpace( - new TaskWithExecutionNamespace())); - } - - static class TaskWithExecutionNamespace extends WorkflowSystemTask { - public TaskWithExecutionNamespace() { - super(TEST_QUEUE + EXECUTION_NAMESPACE_CONSTANT); - } - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java deleted file mode 100644 index 7ecf0d9d2..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter; -import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationWorkflowOutputParameter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; - -public class TestTerminate { - - private final WorkflowExecutor executor = mock(WorkflowExecutor.class); - - @Test - public void should_fail_if_input_status_is_not_valid() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), "PAUSED"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - } - - @Test - public void should_fail_if_input_status_is_empty() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), ""); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - } - - @Test - public void should_fail_if_input_status_is_null() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), null); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - } - - @Test - public void should_complete_workflow_on_terminate_task_success() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}")); - - HashMap expectedOutput = - new HashMap<>() { - { - put("output", "${task0.output.value}"); - } - }; - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), "COMPLETED"); - input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(expectedOutput, task.getOutputData()); - } - - @Test - public void should_fail_workflow_on_terminate_task_success() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}")); - - HashMap expectedOutput = - new HashMap<>() { - { - put("output", "${task0.output.value}"); - } - }; - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), "FAILED"); - input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(expectedOutput, task.getOutputData()); - } - - @Test - public void should_fail_workflow_on_terminate_task_success_with_empty_output() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), "FAILED"); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertTrue(task.getOutputData().isEmpty()); - } - - @Test - public void should_fail_workflow_on_terminate_task_success_with_resolved_output() { - WorkflowModel workflow = new WorkflowModel(); - Terminate terminateTask = new Terminate(); - - HashMap expectedOutput = - new HashMap<>() { - { - put("result", 1); - } - }; - - Map input = new HashMap<>(); - input.put(getTerminationStatusParameter(), "FAILED"); - input.put(getTerminationWorkflowOutputParameter(), expectedOutput); - - TaskModel task = new TaskModel(); - task.getInputData().putAll(input); - terminateTask.execute(workflow, task, executor); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestWait.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestWait.java deleted file mode 100644 index e419d0795..000000000 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestWait.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.execution.tasks; - -import java.text.ParseException; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Date; - -import org.apache.commons.lang3.time.DateUtils; -import org.junit.Test; - -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.*; - -public class TestWait { - - private final Wait wait = new Wait(); - - @Test - public void testWaitForever() { - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.SCHEDULED); - WorkflowModel model = new WorkflowModel(); - - wait.start(model, task, null); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertTrue(task.getOutputData().isEmpty()); - } - - @Test - public void testWaitUntil() throws ParseException { - String dateFormat = "yyyy-MM-dd HH:mm"; - - WorkflowModel model = new WorkflowModel(); - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.SCHEDULED); - - DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormat); - LocalDateTime now = LocalDateTime.now(); - String formatted = formatter.format(now); - System.out.println(formatted); - - task.getInputData().put(Wait.UNTIL_INPUT, formatted); - Date parsed = DateUtils.parseDate(formatted, dateFormat); - - wait.start(model, task, null); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertEquals(parsed.getTime(), task.getWaitTimeout()); - - // Execute runs when checking if the task has completed - boolean updated = wait.execute(model, task, null); - assertTrue(updated); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - } - - @Test - public void testWaitDuration() throws ParseException { - WorkflowModel model = new WorkflowModel(); - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.SCHEDULED); - - task.getInputData().put(Wait.DURATION_INPUT, "1s"); - wait.start(model, task, null); - long now = System.currentTimeMillis(); - - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertEquals(now + 1000, task.getWaitTimeout()); - - try { - Thread.sleep(2_000); - } catch (InterruptedException e) { - } - - // Execute runs when checking if the task has completed - boolean updated = wait.execute(model, task, null); - assertTrue(updated); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - } - - @Test - public void testInvalidWaitConfig() throws ParseException { - WorkflowModel model = new WorkflowModel(); - - TaskModel task = new TaskModel(); - task.setStatus(TaskModel.Status.SCHEDULED); - - task.getInputData().put(Wait.DURATION_INPUT, "1s"); - task.getInputData().put(Wait.UNTIL_INPUT, "2022-12-12"); - wait.start(model, task, null); - assertEquals(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, task.getStatus()); - assertTrue(!task.getReasonForIncompletion().isEmpty()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java deleted file mode 100644 index 1d9444c0f..000000000 --- a/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.metadata; - -import java.util.List; -import java.util.Optional; -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.dao.MetadataDAO; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class MetadataMapperServiceTest { - - @TestConfiguration - static class TestMetadataMapperServiceConfiguration { - - @Bean - public MetadataDAO metadataDAO() { - return mock(MetadataDAO.class); - } - - @Bean - public MetadataMapperService metadataMapperService(MetadataDAO metadataDAO) { - return new MetadataMapperService(metadataDAO); - } - } - - @Autowired private MetadataDAO metadataDAO; - - @Autowired private MetadataMapperService metadataMapperService; - - @After - public void cleanUp() { - reset(metadataDAO); - } - - @Test - public void testMetadataPopulationOnSimpleTask() { - String nameTaskDefinition = "task1"; - TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition); - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - - when(metadataDAO.getTaskDef(nameTaskDefinition)).thenReturn(taskDefinition); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(1, workflowDefinition.getTasks().size()); - WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0); - assertNotNull(populatedWorkflowTask.getTaskDefinition()); - verify(metadataDAO).getTaskDef(nameTaskDefinition); - } - - @Test - public void testNoMetadataPopulationOnEmbeddedTaskDefinition() { - String nameTaskDefinition = "task2"; - TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition); - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - workflowTask.setTaskDefinition(taskDefinition); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(1, workflowDefinition.getTasks().size()); - WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0); - assertNotNull(populatedWorkflowTask.getTaskDefinition()); - verifyNoInteractions(metadataDAO); - } - - @Test - public void testMetadataPopulationOnlyOnNecessaryWorkflowTasks() { - String nameTaskDefinition1 = "task4"; - TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1); - WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1); - workflowTask1.setTaskDefinition(taskDefinition); - - String nameTaskDefinition2 = "task5"; - WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask1, workflowTask2)); - - when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(taskDefinition); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(2, workflowDefinition.getTasks().size()); - List workflowTasks = workflowDefinition.getTasks(); - assertNotNull(workflowTasks.get(0).getTaskDefinition()); - assertNotNull(workflowTasks.get(1).getTaskDefinition()); - - verify(metadataDAO).getTaskDef(nameTaskDefinition2); - verifyNoMoreInteractions(metadataDAO); - } - - @Test(expected = ApplicationException.class) - public void testMetadataPopulationMissingDefinitions() { - String nameTaskDefinition1 = "task4"; - WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1); - - String nameTaskDefinition2 = "task5"; - WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2); - - TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask1, workflowTask2)); - - when(metadataDAO.getTaskDef(nameTaskDefinition1)).thenReturn(taskDefinition); - when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(null); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - } - - @Test - public void testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable() { - String nameTaskDefinition = "taskSubworkflow6"; - String workflowDefinitionName = "subworkflow"; - int version = 3; - - WorkflowDef subWorkflowDefinition = createWorkflowDefinition("workflowDefinitionName"); - subWorkflowDefinition.setVersion(version); - - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(workflowDefinitionName); - workflowTask.setSubWorkflowParam(subWorkflowParams); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - when(metadataDAO.getLatestWorkflowDef(workflowDefinitionName)) - .thenReturn(Optional.of(subWorkflowDefinition)); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(1, workflowDefinition.getTasks().size()); - List workflowTasks = workflowDefinition.getTasks(); - SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam(); - - assertEquals(workflowDefinitionName, params.getName()); - assertEquals(version, params.getVersion().intValue()); - - verify(metadataDAO).getLatestWorkflowDef(workflowDefinitionName); - verify(metadataDAO).getTaskDef(nameTaskDefinition); - verifyNoMoreInteractions(metadataDAO); - } - - @Test - public void testNoVersionPopulationForSubworkflowTaskIfAvailable() { - String nameTaskDefinition = "taskSubworkflow7"; - String workflowDefinitionName = "subworkflow"; - Integer version = 2; - - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(workflowDefinitionName); - subWorkflowParams.setVersion(version); - workflowTask.setSubWorkflowParam(subWorkflowParams); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(1, workflowDefinition.getTasks().size()); - List workflowTasks = workflowDefinition.getTasks(); - SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam(); - - assertEquals(workflowDefinitionName, params.getName()); - assertEquals(version, params.getVersion()); - - verify(metadataDAO).getTaskDef(nameTaskDefinition); - verifyNoMoreInteractions(metadataDAO); - } - - @Test(expected = TerminateWorkflowException.class) - public void testExceptionWhenWorkflowDefinitionNotAvailable() { - String nameTaskDefinition = "taskSubworkflow8"; - String workflowDefinitionName = "subworkflow"; - - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(workflowDefinitionName); - workflowTask.setSubWorkflowParam(subWorkflowParams); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - when(metadataDAO.getLatestWorkflowDef(workflowDefinitionName)).thenReturn(Optional.empty()); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - verify(metadataDAO).getLatestWorkflowDef(workflowDefinitionName); - } - - @Test(expected = IllegalArgumentException.class) - public void testLookupWorkflowDefinition() { - try { - String workflowName = "test"; - when(metadataDAO.getWorkflowDef(workflowName, 0)) - .thenReturn(Optional.of(new WorkflowDef())); - Optional optionalWorkflowDef = - metadataMapperService.lookupWorkflowDefinition(workflowName, 0); - assertTrue(optionalWorkflowDef.isPresent()); - metadataMapperService.lookupWorkflowDefinition(null, 0); - } catch (ConstraintViolationException ex) { - Assert.assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testLookupLatestWorkflowDefinition() { - String workflowName = "test"; - when(metadataDAO.getLatestWorkflowDef(workflowName)) - .thenReturn(Optional.of(new WorkflowDef())); - Optional optionalWorkflowDef = - metadataMapperService.lookupLatestWorkflowDefinition(workflowName); - assertTrue(optionalWorkflowDef.isPresent()); - - metadataMapperService.lookupLatestWorkflowDefinition(null); - } - - @Test - public void testShouldNotPopulateTaskDefinition() { - WorkflowTask workflowTask = createWorkflowTask(""); - assertFalse(metadataMapperService.shouldPopulateTaskDefinition(workflowTask)); - } - - @Test - public void testShouldPopulateTaskDefinition() { - WorkflowTask workflowTask = createWorkflowTask("test"); - assertTrue(metadataMapperService.shouldPopulateTaskDefinition(workflowTask)); - } - - @Test - public void testMetadataPopulationOnSimpleTaskDefMissing() { - String nameTaskDefinition = "task1"; - WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); - - when(metadataDAO.getTaskDef(nameTaskDefinition)).thenReturn(null); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); - workflowDefinition.setTasks(List.of(workflowTask)); - - metadataMapperService.populateTaskDefinitions(workflowDefinition); - - assertEquals(1, workflowDefinition.getTasks().size()); - WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0); - assertNotNull(populatedWorkflowTask.getTaskDefinition()); - } - - private WorkflowDef createWorkflowDefinition(String name) { - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName(name); - return workflowDefinition; - } - - private WorkflowTask createWorkflowTask(String name) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName(name); - workflowTask.setType(TaskType.SIMPLE.name()); - return workflowTask; - } - - private TaskDef createTaskDefinition(String name) { - return new TaskDef(name); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowRepairService.java b/core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowRepairService.java deleted file mode 100644 index dd7e891d6..000000000 --- a/core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowRepairService.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.reconciliation; - -import java.time.Duration; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.*; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.*; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class TestWorkflowRepairService { - - private QueueDAO queueDAO; - private ExecutionDAO executionDAO; - private ConductorProperties properties; - private WorkflowRepairService workflowRepairService; - private SystemTaskRegistry systemTaskRegistry; - - @Before - public void setUp() { - executionDAO = mock(ExecutionDAO.class); - queueDAO = mock(QueueDAO.class); - properties = mock(ConductorProperties.class); - systemTaskRegistry = mock(SystemTaskRegistry.class); - workflowRepairService = - new WorkflowRepairService(executionDAO, queueDAO, properties, systemTaskRegistry); - } - - @Test - public void verifyAndRepairSimpleTaskInScheduledState() { - TaskModel task = new TaskModel(); - task.setTaskType("SIMPLE"); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setTaskId("abcd"); - task.setCallbackAfterSeconds(60); - - when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false); - - assertTrue(workflowRepairService.verifyAndRepairTask(task)); - // Verify that a new queue message is pushed for sync system tasks that fails queue contains - // check. - verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong()); - } - - @Test - public void verifySimpleTaskInProgressState() { - TaskModel task = new TaskModel(); - task.setTaskType("SIMPLE"); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setTaskId("abcd"); - task.setCallbackAfterSeconds(60); - - when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false); - - assertFalse(workflowRepairService.verifyAndRepairTask(task)); - // Verify that queue message is never pushed for simple task in IN_PROGRESS state - verify(queueDAO, never()).containsMessage(anyString(), anyString()); - verify(queueDAO, never()).push(anyString(), anyString(), anyLong()); - } - - @Test - public void verifyAndRepairSystemTask() { - String taskType = "TEST_SYS_TASK"; - TaskModel task = new TaskModel(); - task.setTaskType(taskType); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setTaskId("abcd"); - task.setCallbackAfterSeconds(60); - - when(systemTaskRegistry.isSystemTask("TEST_SYS_TASK")).thenReturn(true); - when(systemTaskRegistry.get(taskType)) - .thenReturn( - new WorkflowSystemTask("TEST_SYS_TASK") { - @Override - public boolean isAsync() { - return true; - } - - @Override - public boolean isAsyncComplete(TaskModel task) { - return false; - } - - @Override - public void start( - WorkflowModel workflow, - TaskModel task, - WorkflowExecutor executor) { - super.start(workflow, task, executor); - } - }); - - when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false); - - assertTrue(workflowRepairService.verifyAndRepairTask(task)); - // Verify that a new queue message is pushed for tasks that fails queue contains check. - verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong()); - - // Verify a system task in IN_PROGRESS state can be recovered. - reset(queueDAO); - task.setStatus(TaskModel.Status.IN_PROGRESS); - assertTrue(workflowRepairService.verifyAndRepairTask(task)); - // Verify that a new queue message is pushed for async System task in IN_PROGRESS state that - // fails queue contains check. - verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong()); - } - - @Test - public void assertSyncSystemTasksAreNotCheckedAgainstQueue() { - // Return a Switch task object to init WorkflowSystemTask registry. - when(systemTaskRegistry.get(TASK_TYPE_DECISION)).thenReturn(new Decision()); - when(systemTaskRegistry.isSystemTask(TASK_TYPE_DECISION)).thenReturn(true); - when(systemTaskRegistry.get(TASK_TYPE_SWITCH)).thenReturn(new Switch()); - when(systemTaskRegistry.isSystemTask(TASK_TYPE_SWITCH)).thenReturn(true); - - TaskModel task = new TaskModel(); - task.setTaskType(TASK_TYPE_DECISION); - task.setStatus(TaskModel.Status.SCHEDULED); - - assertFalse(workflowRepairService.verifyAndRepairTask(task)); - // Verify that queue contains is never checked for sync system tasks - verify(queueDAO, never()).containsMessage(anyString(), anyString()); - // Verify that queue message is never pushed for sync system tasks - verify(queueDAO, never()).push(anyString(), anyString(), anyLong()); - - task = new TaskModel(); - task.setTaskType(TASK_TYPE_SWITCH); - task.setStatus(TaskModel.Status.SCHEDULED); - - assertFalse(workflowRepairService.verifyAndRepairTask(task)); - // Verify that queue contains is never checked for sync system tasks - verify(queueDAO, never()).containsMessage(anyString(), anyString()); - // Verify that queue message is never pushed for sync system tasks - verify(queueDAO, never()).push(anyString(), anyString(), anyLong()); - } - - @Test - public void assertAsyncCompleteInProgressSystemTasksAreNotCheckedAgainstQueue() { - TaskModel task = new TaskModel(); - task.setTaskType(TASK_TYPE_SUB_WORKFLOW); - task.setStatus(TaskModel.Status.IN_PROGRESS); - task.setTaskId("abcd"); - task.setCallbackAfterSeconds(60); - - WorkflowSystemTask workflowSystemTask = new SubWorkflow(new ObjectMapper()); - when(systemTaskRegistry.get(TASK_TYPE_SUB_WORKFLOW)).thenReturn(workflowSystemTask); - - assertTrue(workflowSystemTask.isAsyncComplete(task)); - - assertFalse(workflowRepairService.verifyAndRepairTask(task)); - // Verify that queue message is never pushed for async complete system tasks - verify(queueDAO, never()).containsMessage(anyString(), anyString()); - verify(queueDAO, never()).push(anyString(), anyString(), anyLong()); - } - - @Test - public void assertAsyncCompleteScheduledSystemTasksAreCheckedAgainstQueue() { - TaskModel task = new TaskModel(); - task.setTaskType(TASK_TYPE_SUB_WORKFLOW); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setTaskId("abcd"); - task.setCallbackAfterSeconds(60); - - WorkflowSystemTask workflowSystemTask = new SubWorkflow(new ObjectMapper()); - when(systemTaskRegistry.get(TASK_TYPE_SUB_WORKFLOW)).thenReturn(workflowSystemTask); - when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false); - - assertTrue(workflowSystemTask.isAsyncComplete(task)); - - assertTrue(workflowRepairService.verifyAndRepairTask(task)); - // Verify that queue message is never pushed for async complete system tasks - verify(queueDAO, times(1)).containsMessage(anyString(), anyString()); - verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong()); - } - - @Test - public void verifyAndRepairParentWorkflow() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowId("abcd"); - workflow.setParentWorkflowId("parentWorkflowId"); - - when(properties.getWorkflowOffsetTimeout()).thenReturn(Duration.ofSeconds(10)); - when(executionDAO.getWorkflow("abcd", true)).thenReturn(workflow); - when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false); - - workflowRepairService.verifyAndRepairWorkflowTasks("abcd"); - verify(queueDAO, times(1)).containsMessage(anyString(), anyString()); - verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java b/core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java deleted file mode 100644 index 25105423b..000000000 --- a/core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.sync.local; - -import java.util.concurrent.TimeUnit; - -import org.junit.Test; -import org.springframework.boot.test.context.runner.ApplicationContextRunner; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class LocalOnlyLockTest { - - // Lock can be global since it uses global cache internally - private final LocalOnlyLock localOnlyLock = new LocalOnlyLock(); - - @Test - public void testLockUnlock() { - localOnlyLock.acquireLock("a", 100, 1000, TimeUnit.MILLISECONDS); - assertEquals(localOnlyLock.cache().estimatedSize(), 1); - assertEquals(localOnlyLock.cache().get("a").availablePermits(), 0); - assertEquals(localOnlyLock.scheduledFutures().size(), 1); - localOnlyLock.releaseLock("a"); - assertEquals(localOnlyLock.scheduledFutures().size(), 0); - assertEquals(localOnlyLock.cache().get("a").availablePermits(), 1); - localOnlyLock.deleteLock("a"); - assertEquals(localOnlyLock.cache().estimatedSize(), 0); - } - - @Test(timeout = 10 * 1000) - public void testLockTimeout() { - localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS); - assertTrue(localOnlyLock.acquireLock("d", 100, 1000, TimeUnit.MILLISECONDS)); - assertFalse(localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS)); - assertEquals(localOnlyLock.scheduledFutures().size(), 2); - localOnlyLock.releaseLock("c"); - localOnlyLock.releaseLock("d"); - assertEquals(localOnlyLock.scheduledFutures().size(), 0); - } - - @Test(timeout = 10 * 1000) - public void testLockLeaseTime() { - for (int i = 0; i < 10; i++) { - localOnlyLock.acquireLock("a", 1000, 100, TimeUnit.MILLISECONDS); - } - localOnlyLock.acquireLock("a"); - assertEquals(0, localOnlyLock.cache().get("a").availablePermits()); - localOnlyLock.releaseLock("a"); - } - - @Test(timeout = 10 * 1000) - public void testLockLeaseWithRelease() throws Exception { - localOnlyLock.acquireLock("b", 1000, 1000, TimeUnit.MILLISECONDS); - localOnlyLock.releaseLock("b"); - - // Wait for lease to run out and also call release - Thread.sleep(2000); - - localOnlyLock.acquireLock("b"); - assertEquals(0, localOnlyLock.cache().get("b").availablePermits()); - localOnlyLock.releaseLock("b"); - } - - @Test - public void testRelease() { - localOnlyLock.releaseLock("x54as4d2;23'4"); - localOnlyLock.releaseLock("x54as4d2;23'4"); - assertEquals(1, localOnlyLock.cache().get("x54as4d2;23'4").availablePermits()); - } - - @Test - public void testLockConfiguration() { - new ApplicationContextRunner() - .withPropertyValues("conductor.workflow-execution-lock.type=local_only") - .withUserConfiguration(LocalOnlyLockConfiguration.class) - .run( - context -> { - LocalOnlyLock lock = context.getBean(LocalOnlyLock.class); - assertNotNull(lock); - }); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtilsTest.java deleted file mode 100644 index ccd0bcbee..000000000 --- a/core/src/test/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtilsTest.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.commons.lang3.StringUtils; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.util.unit.DataSize; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.TerminateWorkflowException; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class ExternalPayloadStorageUtilsTest { - - private ExternalPayloadStorage externalPayloadStorage; - private ExternalStorageLocation location; - - @Autowired private ObjectMapper objectMapper; - - // Subject - private ExternalPayloadStorageUtils externalPayloadStorageUtils; - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void setup() { - externalPayloadStorage = mock(ExternalPayloadStorage.class); - ConductorProperties properties = mock(ConductorProperties.class); - location = new ExternalStorageLocation(); - location.setPath("some/test/path"); - - when(properties.getWorkflowInputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10L)); - when(properties.getMaxWorkflowInputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10240L)); - when(properties.getWorkflowOutputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10L)); - when(properties.getMaxWorkflowOutputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10240L)); - when(properties.getTaskInputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L)); - when(properties.getMaxTaskInputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10240L)); - when(properties.getTaskOutputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L)); - when(properties.getMaxTaskOutputPayloadSizeThreshold()) - .thenReturn(DataSize.ofKilobytes(10240L)); - - externalPayloadStorageUtils = - new ExternalPayloadStorageUtils(externalPayloadStorage, properties, objectMapper); - } - - @Test - public void testDownloadPayload() throws IOException { - String path = "test/payload"; - - Map payload = new HashMap<>(); - payload.put("key1", "value1"); - payload.put("key2", 200); - byte[] payloadBytes = objectMapper.writeValueAsString(payload).getBytes(); - when(externalPayloadStorage.download(path)) - .thenReturn(new ByteArrayInputStream(payloadBytes)); - - Map result = externalPayloadStorageUtils.downloadPayload(path); - assertNotNull(result); - assertEquals(payload, result); - } - - @SuppressWarnings("unchecked") - @Test - public void testUploadTaskPayload() throws IOException { - AtomicInteger uploadCount = new AtomicInteger(0); - - InputStream stream = - com.netflix.conductor.core.utils.ExternalPayloadStorageUtilsTest.class - .getResourceAsStream("/payload.json"); - Map payload = objectMapper.readValue(stream, Map.class); - - when(externalPayloadStorage.getLocation( - ExternalPayloadStorage.Operation.WRITE, - ExternalPayloadStorage.PayloadType.TASK_INPUT, - "")) - .thenReturn(location); - doAnswer( - invocation -> { - uploadCount.incrementAndGet(); - return null; - }) - .when(externalPayloadStorage) - .upload(anyString(), any(), anyLong()); - - TaskModel task = new TaskModel(); - task.setInputData(payload); - externalPayloadStorageUtils.verifyAndUpload( - task, ExternalPayloadStorage.PayloadType.TASK_INPUT); - assertTrue(StringUtils.isNotEmpty(task.getExternalInputPayloadStoragePath())); - assertFalse(task.getInputData().isEmpty()); - assertEquals(1, uploadCount.get()); - assertNotNull(task.getExternalInputPayloadStoragePath()); - } - - @SuppressWarnings("unchecked") - @Test - public void testUploadWorkflowPayload() throws IOException { - AtomicInteger uploadCount = new AtomicInteger(0); - - InputStream stream = - com.netflix.conductor.core.utils.ExternalPayloadStorageUtilsTest.class - .getResourceAsStream("/payload.json"); - Map payload = objectMapper.readValue(stream, Map.class); - - when(externalPayloadStorage.getLocation( - ExternalPayloadStorage.Operation.WRITE, - ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT, - "")) - .thenReturn(location); - doAnswer( - invocation -> { - uploadCount.incrementAndGet(); - return null; - }) - .when(externalPayloadStorage) - .upload(anyString(), any(), anyLong()); - - WorkflowModel workflow = new WorkflowModel(); - WorkflowDef def = new WorkflowDef(); - def.setName("name"); - def.setVersion(1); - workflow.setOutput(payload); - workflow.setWorkflowDefinition(def); - externalPayloadStorageUtils.verifyAndUpload( - workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT); - assertTrue(StringUtils.isNotEmpty(workflow.getExternalOutputPayloadStoragePath())); - assertFalse(workflow.getOutput().isEmpty()); - assertEquals(1, uploadCount.get()); - assertNotNull(workflow.getExternalOutputPayloadStoragePath()); - } - - @Test - public void testUploadHelper() { - AtomicInteger uploadCount = new AtomicInteger(0); - String path = "some/test/path.json"; - ExternalStorageLocation location = new ExternalStorageLocation(); - location.setPath(path); - - when(externalPayloadStorage.getLocation(any(), any(), any())).thenReturn(location); - doAnswer( - invocation -> { - uploadCount.incrementAndGet(); - return null; - }) - .when(externalPayloadStorage) - .upload(anyString(), any(), anyLong()); - - assertEquals( - path, - externalPayloadStorageUtils.uploadHelper( - new byte[] {}, 10L, ExternalPayloadStorage.PayloadType.TASK_OUTPUT)); - assertEquals(1, uploadCount.get()); - } - - @Test - public void testFailTaskWithInputPayload() { - TaskModel task = new TaskModel(); - task.setInputData(new HashMap<>()); - - expectedException.expect(TerminateWorkflowException.class); - externalPayloadStorageUtils.failTask( - task, ExternalPayloadStorage.PayloadType.TASK_INPUT, "error"); - assertNotNull(task); - assertTrue(task.getInputData().isEmpty()); - } - - @Test - public void testFailTaskWithOutputPayload() { - TaskModel task = new TaskModel(); - task.setOutputData(new HashMap<>()); - - expectedException.expect(TerminateWorkflowException.class); - externalPayloadStorageUtils.failTask( - task, ExternalPayloadStorage.PayloadType.TASK_OUTPUT, "error"); - assertNotNull(task); - assertTrue(task.getOutputData().isEmpty()); - } - - @Test - public void testFailWorkflowWithInputPayload() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setInput(new HashMap<>()); - - expectedException.expect(TerminateWorkflowException.class); - externalPayloadStorageUtils.failWorkflow( - workflow, ExternalPayloadStorage.PayloadType.TASK_INPUT, "error"); - assertNotNull(workflow); - assertTrue(workflow.getInput().isEmpty()); - assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus()); - } - - @Test - public void testFailWorkflowWithOutputPayload() { - WorkflowModel workflow = new WorkflowModel(); - workflow.setOutput(new HashMap<>()); - - expectedException.expect(TerminateWorkflowException.class); - externalPayloadStorageUtils.failWorkflow( - workflow, ExternalPayloadStorage.PayloadType.TASK_OUTPUT, "error"); - assertNotNull(workflow); - assertTrue(workflow.getOutput().isEmpty()); - assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java deleted file mode 100644 index bc467b1a2..000000000 --- a/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class JsonUtilsTest { - - private JsonUtils jsonUtils; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setup() { - jsonUtils = new JsonUtils(objectMapper); - } - - @Test - public void testArray() { - List list = new LinkedList<>(); - Map map = new HashMap<>(); - map.put("externalId", "[{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}]"); - map.put("name", "conductor"); - map.put("version", 2); - list.add(map); - - //noinspection unchecked - map = (Map) list.get(0); - assertTrue(map.get("externalId") instanceof String); - - int before = list.size(); - jsonUtils.expand(list); - assertEquals(before, list.size()); - - //noinspection unchecked - map = (Map) list.get(0); - assertTrue(map.get("externalId") instanceof ArrayList); - } - - @Test - public void testMap() { - Map map = new HashMap<>(); - map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}"); - map.put("name", "conductor"); - map.put("version", 2); - - assertTrue(map.get("externalId") instanceof String); - - jsonUtils.expand(map); - - assertTrue(map.get("externalId") instanceof LinkedHashMap); - } - - @Test - public void testMultiLevelMap() { - Map parentMap = new HashMap<>(); - parentMap.put("requestId", "abcde"); - parentMap.put("status", "PROCESSED"); - - Map childMap = new HashMap<>(); - childMap.put("path", "test/path"); - childMap.put("type", "VIDEO"); - - Map grandChildMap = new HashMap<>(); - grandChildMap.put("duration", "370"); - grandChildMap.put("passed", "true"); - - childMap.put("metadata", grandChildMap); - parentMap.put("asset", childMap); - - Object jsonObject = jsonUtils.expand(parentMap); - assertNotNull(jsonObject); - } - - // This test verifies that the types of the elements in the input are maintained upon expanding - // the JSON object - @Test - public void testTypes() throws Exception { - String map = - "{\"requestId\":\"1375128656908832001\",\"workflowId\":\"fc147e1d-5408-4d41-b066-53cb2e551d0e\"," - + "\"inner\":{\"num\":42,\"status\":\"READY\"}}"; - jsonUtils.expand(map); - - Object jsonObject = jsonUtils.expand(map); - assertNotNull(jsonObject); - assertTrue(jsonObject instanceof LinkedHashMap); - assertTrue(((LinkedHashMap) jsonObject).get("requestId") instanceof String); - assertTrue(((LinkedHashMap) jsonObject).get("workflowId") instanceof String); - assertTrue(((LinkedHashMap) jsonObject).get("inner") instanceof LinkedHashMap); - assertTrue( - ((LinkedHashMap) ((LinkedHashMap) jsonObject).get("inner")).get("num") - instanceof Integer); - assertTrue( - ((LinkedHashMap) ((LinkedHashMap) jsonObject).get("inner")) - .get("status") - instanceof String); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java deleted file mode 100644 index 9e5234001..000000000 --- a/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java +++ /dev/null @@ -1,330 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicReference; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -@SuppressWarnings("rawtypes") -public class ParametersUtilsTest { - - private ParametersUtils parametersUtils; - private JsonUtils jsonUtils; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void setup() { - parametersUtils = new ParametersUtils(objectMapper); - jsonUtils = new JsonUtils(objectMapper); - } - - @Test - public void testReplace() throws Exception { - Map map = new HashMap<>(); - map.put("name", "conductor"); - map.put("version", 2); - map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}"); - - Map input = new HashMap<>(); - input.put("k1", "${$.externalId}"); - input.put("k4", "${name}"); - input.put("k5", "${version}"); - - Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class); - - Map replaced = parametersUtils.replace(input, jsonObj); - assertNotNull(replaced); - - assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1")); - assertEquals("conductor", replaced.get("k4")); - assertEquals(2, replaced.get("k5")); - } - - @Test - public void testReplaceWithArrayExpand() { - List list = new LinkedList<>(); - Map map = new HashMap<>(); - map.put("externalId", "[{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}]"); - map.put("name", "conductor"); - map.put("version", 2); - list.add(map); - jsonUtils.expand(list); - - Map input = new HashMap<>(); - input.put("k1", "${$..externalId}"); - input.put("k2", "${$[0].externalId[0].taskRefName}"); - input.put("k3", "${__json_externalId.taskRefName}"); - input.put("k4", "${$[0].name}"); - input.put("k5", "${$[0].version}"); - - Map replaced = parametersUtils.replace(input, list); - assertNotNull(replaced); - assertEquals(replaced.get("k2"), "t001"); - assertNull(replaced.get("k3")); - assertEquals(replaced.get("k4"), "conductor"); - assertEquals(replaced.get("k5"), 2); - } - - @Test - public void testReplaceWithMapExpand() { - Map map = new HashMap<>(); - map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}"); - map.put("name", "conductor"); - map.put("version", 2); - jsonUtils.expand(map); - - Map input = new HashMap<>(); - input.put("k1", "${$.externalId}"); - input.put("k2", "${externalId.taskRefName}"); - input.put("k4", "${name}"); - input.put("k5", "${version}"); - - Map replaced = parametersUtils.replace(input, map); - assertNotNull(replaced); - assertEquals("t001", replaced.get("k2")); - assertNull(replaced.get("k3")); - assertEquals("conductor", replaced.get("k4")); - assertEquals(2, replaced.get("k5")); - } - - @Test - public void testReplaceConcurrent() throws ExecutionException, InterruptedException { - ExecutorService executorService = Executors.newFixedThreadPool(2); - - AtomicReference generatedId = new AtomicReference<>("test-0"); - Map input = new HashMap<>(); - Map payload = new HashMap<>(); - payload.put("event", "conductor:TEST_EVENT"); - payload.put("someId", generatedId); - input.put("payload", payload); - input.put("name", "conductor"); - input.put("version", 2); - - Map inputParams = new HashMap<>(); - inputParams.put("k1", "${payload.someId}"); - inputParams.put("k2", "${name}"); - - CompletableFuture.runAsync( - () -> { - for (int i = 0; i < 10000; i++) { - generatedId.set("test-" + i); - payload.put("someId", generatedId.get()); - Object jsonObj = null; - try { - jsonObj = - objectMapper.readValue( - objectMapper.writeValueAsString(input), - Object.class); - } catch (JsonProcessingException e) { - e.printStackTrace(); - return; - } - Map replaced = - parametersUtils.replace(inputParams, jsonObj); - assertNotNull(replaced); - assertEquals(generatedId.get(), replaced.get("k1")); - assertEquals("conductor", replaced.get("k2")); - assertNull(replaced.get("k3")); - } - }, - executorService) - .get(); - - executorService.shutdown(); - } - - // Tests ParametersUtils with Map and List input values, and verifies input map is not mutated - // by ParametersUtils. - @Test - public void testReplaceInputWithMapAndList() throws Exception { - Map map = new HashMap<>(); - map.put("name", "conductor"); - map.put("version", 2); - map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}"); - - Map input = new HashMap<>(); - input.put("k1", "${$.externalId}"); - input.put("k2", "${name}"); - input.put("k3", "${version}"); - input.put("k4", "${}"); - input.put("k5", "${ }"); - - Map mapValue = new HashMap<>(); - mapValue.put("name", "${name}"); - mapValue.put("version", "${version}"); - input.put("map", mapValue); - - List listValue = new ArrayList<>(); - listValue.add("${name}"); - listValue.add("${version}"); - input.put("list", listValue); - - Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class); - - Map replaced = parametersUtils.replace(input, jsonObj); - assertNotNull(replaced); - - // Verify that values are replaced correctly. - assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1")); - assertEquals("conductor", replaced.get("k2")); - assertEquals(2, replaced.get("k3")); - assertEquals("", replaced.get("k4")); - assertEquals("", replaced.get("k5")); - - Map replacedMap = (Map) replaced.get("map"); - assertEquals("conductor", replacedMap.get("name")); - assertEquals(2, replacedMap.get("version")); - - List replacedList = (List) replaced.get("list"); - assertEquals(2, replacedList.size()); - assertEquals("conductor", replacedList.get(0)); - assertEquals(2, replacedList.get(1)); - - // Verify that input map is not mutated - assertEquals("${$.externalId}", input.get("k1")); - assertEquals("${name}", input.get("k2")); - assertEquals("${version}", input.get("k3")); - - Map inputMap = (Map) input.get("map"); - assertEquals("${name}", inputMap.get("name")); - assertEquals("${version}", inputMap.get("version")); - - List inputList = (List) input.get("list"); - assertEquals(2, inputList.size()); - assertEquals("${name}", inputList.get(0)); - assertEquals("${version}", inputList.get(1)); - } - - @Test - public void testReplaceWithEscapedTags() throws Exception { - Map map = new HashMap<>(); - map.put("someString", "conductor"); - map.put("someNumber", 2); - - Map input = new HashMap<>(); - input.put( - "k1", - "${$.someString} $${$.someNumber}${$.someNumber} ${$.someNumber}$${$.someString}"); - input.put("k2", "$${$.someString}afterText"); - input.put("k3", "beforeText$${$.someString}"); - input.put("k4", "$${$.someString} afterText"); - input.put("k5", "beforeText $${$.someString}"); - - Map mapValue = new HashMap<>(); - mapValue.put("a", "${someString}"); - mapValue.put("b", "${someNumber}"); - mapValue.put("c", "$${someString} ${someNumber}"); - input.put("map", mapValue); - - List listValue = new ArrayList<>(); - listValue.add("${someString}"); - listValue.add("${someNumber}"); - listValue.add("${someString} $${someNumber}"); - input.put("list", listValue); - - Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class); - - Map replaced = parametersUtils.replace(input, jsonObj); - assertNotNull(replaced); - - // Verify that values are replaced correctly. - assertEquals("conductor ${$.someNumber}2 2${$.someString}", replaced.get("k1")); - assertEquals("${$.someString}afterText", replaced.get("k2")); - assertEquals("beforeText${$.someString}", replaced.get("k3")); - assertEquals("${$.someString} afterText", replaced.get("k4")); - assertEquals("beforeText ${$.someString}", replaced.get("k5")); - - Map replacedMap = (Map) replaced.get("map"); - assertEquals("conductor", replacedMap.get("a")); - assertEquals(2, replacedMap.get("b")); - assertEquals("${someString} 2", replacedMap.get("c")); - - List replacedList = (List) replaced.get("list"); - assertEquals(3, replacedList.size()); - assertEquals("conductor", replacedList.get(0)); - assertEquals(2, replacedList.get(1)); - assertEquals("conductor ${someNumber}", replacedList.get(2)); - - // Verify that input map is not mutated - Map inputMap = (Map) input.get("map"); - assertEquals("${someString}", inputMap.get("a")); - assertEquals("${someNumber}", inputMap.get("b")); - assertEquals("$${someString} ${someNumber}", inputMap.get("c")); - - // Verify that input list is not mutated - List inputList = (List) input.get("list"); - assertEquals(3, inputList.size()); - assertEquals("${someString}", inputList.get(0)); - assertEquals("${someNumber}", inputList.get(1)); - assertEquals("${someString} $${someNumber}", inputList.get(2)); - } - - @Test - public void getWorkflowInputHandlesNullInputTemplate() { - WorkflowDef workflowDef = new WorkflowDef(); - Map inputParams = Map.of("key", "value"); - Map workflowInput = - parametersUtils.getWorkflowInput(workflowDef, inputParams); - assertEquals("value", workflowInput.get("key")); - } - - @Test - public void getWorkflowInputFillsInTemplatedFields() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setInputTemplate(Map.of("other_key", "other_value")); - Map inputParams = new HashMap<>(Map.of("key", "value")); - Map workflowInput = - parametersUtils.getWorkflowInput(workflowDef, inputParams); - assertEquals("value", workflowInput.get("key")); - assertEquals("other_value", workflowInput.get("other_key")); - } - - @Test - public void getWorkflowInputPreservesExistingFieldsIfPopulated() { - WorkflowDef workflowDef = new WorkflowDef(); - String keyName = "key"; - workflowDef.setInputTemplate(Map.of(keyName, "templated_value")); - Map inputParams = new HashMap<>(Map.of(keyName, "supplied_value")); - Map workflowInput = - parametersUtils.getWorkflowInput(workflowDef, inputParams); - assertEquals("supplied_value", workflowInput.get(keyName)); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/utils/QueueUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/QueueUtilsTest.java deleted file mode 100644 index 6633c5fa8..000000000 --- a/core/src/test/java/com/netflix/conductor/core/utils/QueueUtilsTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import org.junit.Assert; -import org.junit.Test; - -public class QueueUtilsTest { - - @Test - public void queueNameWithTypeAndIsolationGroup() { - String queueNameGenerated = QueueUtils.getQueueName("tType", null, "isolationGroup", null); - String queueNameGeneratedOnlyType = QueueUtils.getQueueName("tType", null, null, null); - String queueNameGeneratedWithAllValues = - QueueUtils.getQueueName("tType", "domain", "iso", "eN"); - - Assert.assertEquals("tType-isolationGroup", queueNameGenerated); - Assert.assertEquals("tType", queueNameGeneratedOnlyType); - Assert.assertEquals("domain:tType@eN-iso", queueNameGeneratedWithAllValues); - } - - @Test - public void notIsolatedIfSeparatorNotPresent() { - String notIsolatedQueue = "notIsolated"; - Assert.assertFalse(QueueUtils.isIsolatedQueue(notIsolatedQueue)); - } - - @Test - public void testGetExecutionNameSpace() { - String executionNameSpace = QueueUtils.getExecutionNameSpace("domain:queueName@eN-iso"); - Assert.assertEquals(executionNameSpace, "eN"); - } - - @Test - public void testGetQueueExecutionNameSpaceEmpty() { - Assert.assertEquals(QueueUtils.getExecutionNameSpace("queueName"), ""); - } - - @Test - public void testGetQueueExecutionNameSpaceWithIsolationGroup() { - Assert.assertEquals( - QueueUtils.getExecutionNameSpace("domain:test@executionNameSpace-isolated"), - "executionNameSpace"); - } - - @Test - public void testGetQueueName() { - Assert.assertEquals( - "domain:taskType@eN-isolated", - QueueUtils.getQueueName("taskType", "domain", "isolated", "eN")); - } - - @Test - public void testGetTaskType() { - Assert.assertEquals("taskType", QueueUtils.getTaskType("domain:taskType-isolated")); - } - - @Test - public void testGetTaskTypeWithoutDomain() { - Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType-isolated")); - } - - @Test - public void testGetTaskTypeWithoutDomainAndWithoutIsolationGroup() { - Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType")); - } - - @Test - public void testGetTaskTypeWithoutDomainAndWithExecutionNameSpace() { - Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType@eN")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/core/utils/SemaphoreUtilTest.java b/core/src/test/java/com/netflix/conductor/core/utils/SemaphoreUtilTest.java deleted file mode 100644 index 21b03e190..000000000 --- a/core/src/test/java/com/netflix/conductor/core/utils/SemaphoreUtilTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.utils; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.stream.IntStream; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -@SuppressWarnings("ToArrayCallWithZeroLengthArrayArgument") -public class SemaphoreUtilTest { - - @Test - public void testBlockAfterAvailablePermitsExhausted() throws Exception { - int threads = 5; - ExecutorService executorService = Executors.newFixedThreadPool(threads); - SemaphoreUtil semaphoreUtil = new SemaphoreUtil(threads); - - List> futuresList = new ArrayList<>(); - IntStream.range(0, threads) - .forEach( - t -> - futuresList.add( - CompletableFuture.runAsync( - () -> semaphoreUtil.acquireSlots(1), - executorService))); - - CompletableFuture allFutures = - CompletableFuture.allOf( - futuresList.toArray(new CompletableFuture[futuresList.size()])); - - allFutures.get(); - - assertEquals(0, semaphoreUtil.availableSlots()); - assertFalse(semaphoreUtil.acquireSlots(1)); - - executorService.shutdown(); - } - - @Test - public void testAllowsPollingWhenPermitBecomesAvailable() throws Exception { - int threads = 5; - ExecutorService executorService = Executors.newFixedThreadPool(threads); - SemaphoreUtil semaphoreUtil = new SemaphoreUtil(threads); - - List> futuresList = new ArrayList<>(); - IntStream.range(0, threads) - .forEach( - t -> - futuresList.add( - CompletableFuture.runAsync( - () -> semaphoreUtil.acquireSlots(1), - executorService))); - - CompletableFuture allFutures = - CompletableFuture.allOf( - futuresList.toArray(new CompletableFuture[futuresList.size()])); - allFutures.get(); - - assertEquals(0, semaphoreUtil.availableSlots()); - semaphoreUtil.completeProcessing(1); - - assertTrue(semaphoreUtil.availableSlots() > 0); - assertTrue(semaphoreUtil.acquireSlots(1)); - - executorService.shutdown(); - } -} diff --git a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java deleted file mode 100644 index 5133bfa0b..000000000 --- a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java +++ /dev/null @@ -1,440 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.builder.EqualsBuilder; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import static org.junit.Assert.*; - -public abstract class ExecutionDAOTest { - - protected abstract ExecutionDAO getExecutionDAO(); - - protected ConcurrentExecutionLimitDAO getConcurrentExecutionLimitDAO() { - return (ConcurrentExecutionLimitDAO) getExecutionDAO(); - } - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testTaskExceedsLimit() { - TaskDef taskDefinition = new TaskDef(); - taskDefinition.setName("task1"); - taskDefinition.setConcurrentExecLimit(1); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("task1"); - workflowTask.setTaskDefinition(taskDefinition); - workflowTask.setTaskDefinition(taskDefinition); - - List tasks = new LinkedList<>(); - for (int i = 0; i < 15; i++) { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(i + 1); - task.setTaskId("t_" + i); - task.setWorkflowInstanceId("workflow_" + i); - task.setReferenceTaskName("task1"); - task.setTaskDefName("task1"); - tasks.add(task); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setWorkflowTask(workflowTask); - } - - getExecutionDAO().createTasks(tasks); - assertFalse(getConcurrentExecutionLimitDAO().exceedsLimit(tasks.get(0))); - tasks.get(0).setStatus(TaskModel.Status.IN_PROGRESS); - getExecutionDAO().updateTask(tasks.get(0)); - - for (TaskModel task : tasks) { - assertTrue(getConcurrentExecutionLimitDAO().exceedsLimit(task)); - } - } - - @Test - public void testCreateTaskException() { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(UUID.randomUUID().toString()); - task.setTaskDefName("task1"); - - expectedException.expect(ApplicationException.class); - expectedException.expectMessage("Workflow instance id cannot be null"); - getExecutionDAO().createTasks(Collections.singletonList(task)); - - task.setWorkflowInstanceId(UUID.randomUUID().toString()); - expectedException.expect(ApplicationException.class); - expectedException.expectMessage("Task reference name cannot be null"); - getExecutionDAO().createTasks(Collections.singletonList(task)); - } - - @Test - public void testCreateTaskException2() { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(UUID.randomUUID().toString()); - task.setTaskDefName("task1"); - task.setWorkflowInstanceId(UUID.randomUUID().toString()); - - expectedException.expect(ApplicationException.class); - expectedException.expectMessage("Task reference name cannot be null"); - getExecutionDAO().createTasks(Collections.singletonList(task)); - } - - @Test - public void testTaskCreateDups() { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for (int i = 0; i < 3; i++) { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(i + 1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("t" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + i); - task.setStatus(TaskModel.Status.IN_PROGRESS); - tasks.add(task); - } - - // Let's insert a retried task - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 2); - task.setReferenceTaskName("t" + 2); - task.setRetryCount(1); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 2); - task.setStatus(TaskModel.Status.IN_PROGRESS); - tasks.add(task); - - // Duplicate task! - task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 1); - task.setReferenceTaskName("t" + 1); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 1); - task.setStatus(TaskModel.Status.IN_PROGRESS); - tasks.add(task); - - List created = getExecutionDAO().createTasks(tasks); - assertEquals(tasks.size() - 1, created.size()); // 1 less - - Set srcIds = - tasks.stream() - .map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()) - .collect(Collectors.toSet()); - Set createdIds = - created.stream() - .map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()) - .collect(Collectors.toSet()); - - assertEquals(srcIds, createdIds); - - List pending = getExecutionDAO().getPendingTasksByWorkflow("task0", workflowId); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0))); - - List found = getExecutionDAO().getTasks(tasks.get(0).getTaskDefName(), null, 1); - assertNotNull(found); - assertEquals(1, found.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0))); - } - - @Test - public void testTaskOps() { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for (int i = 0; i < 3; i++) { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(TaskModel.Status.IN_PROGRESS); - tasks.add(task); - } - - for (int i = 0; i < 3; i++) { - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("x" + workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId("x" + workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(TaskModel.Status.IN_PROGRESS); - getExecutionDAO().createTasks(Collections.singletonList(task)); - } - - List created = getExecutionDAO().createTasks(tasks); - assertEquals(tasks.size(), created.size()); - - List pending = - getExecutionDAO().getPendingTasksForTaskType(tasks.get(0).getTaskDefName()); - assertNotNull(pending); - assertEquals(2, pending.size()); - // Pending list can come in any order. finding the one we are looking for and then - // comparing - TaskModel matching = - pending.stream() - .filter(task -> task.getTaskId().equals(tasks.get(0).getTaskId())) - .findAny() - .get(); - assertTrue(EqualsBuilder.reflectionEquals(matching, tasks.get(0))); - - for (int i = 0; i < 3; i++) { - TaskModel found = getExecutionDAO().getTask(workflowId + "_t" + i); - assertNotNull(found); - found.getOutputData().put("updated", true); - found.setStatus(TaskModel.Status.COMPLETED); - getExecutionDAO().updateTask(found); - } - - List taskIds = - tasks.stream().map(TaskModel::getTaskId).collect(Collectors.toList()); - List found = getExecutionDAO().getTasks(taskIds); - assertEquals(taskIds.size(), found.size()); - found.forEach( - task -> { - assertTrue(task.getOutputData().containsKey("updated")); - assertEquals(true, task.getOutputData().get("updated")); - boolean removed = getExecutionDAO().removeTask(task.getTaskId()); - assertTrue(removed); - }); - - found = getExecutionDAO().getTasks(taskIds); - assertTrue(found.isEmpty()); - } - - @Test - public void testPending() { - WorkflowDef def = new WorkflowDef(); - def.setName("pending_count_test"); - - WorkflowModel workflow = createTestWorkflow(); - workflow.setWorkflowDefinition(def); - - List workflowIds = generateWorkflows(workflow, 10); - long count = getExecutionDAO().getPendingWorkflowCount(def.getName()); - assertEquals(10, count); - - for (int i = 0; i < 10; i++) { - getExecutionDAO().removeFromPendingWorkflow(def.getName(), workflowIds.get(i)); - } - - count = getExecutionDAO().getPendingWorkflowCount(def.getName()); - assertEquals(0, count); - } - - @Test - public void complexExecutionTest() { - WorkflowModel workflow = createTestWorkflow(); - int numTasks = workflow.getTasks().size(); - - String workflowId = getExecutionDAO().createWorkflow(workflow); - assertEquals(workflow.getWorkflowId(), workflowId); - - List created = getExecutionDAO().createTasks(workflow.getTasks()); - assertEquals(workflow.getTasks().size(), created.size()); - - WorkflowModel workflowWithTasks = - getExecutionDAO().getWorkflow(workflow.getWorkflowId(), true); - assertEquals(workflowId, workflowWithTasks.getWorkflowId()); - assertEquals(numTasks, workflowWithTasks.getTasks().size()); - - WorkflowModel found = getExecutionDAO().getWorkflow(workflowId, false); - assertTrue(found.getTasks().isEmpty()); - - workflow.getTasks().clear(); - assertEquals(workflow, found); - - workflow.getInput().put("updated", true); - getExecutionDAO().updateWorkflow(workflow); - found = getExecutionDAO().getWorkflow(workflowId); - assertNotNull(found); - assertTrue(found.getInput().containsKey("updated")); - assertEquals(true, found.getInput().get("updated")); - - List running = - getExecutionDAO() - .getRunningWorkflowIds( - workflow.getWorkflowName(), workflow.getWorkflowVersion()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - workflow.setStatus(WorkflowModel.Status.RUNNING); - getExecutionDAO().updateWorkflow(workflow); - - running = - getExecutionDAO() - .getRunningWorkflowIds( - workflow.getWorkflowName(), workflow.getWorkflowVersion()); - assertNotNull(running); - assertEquals(1, running.size()); - assertEquals(workflow.getWorkflowId(), running.get(0)); - - List pending = - getExecutionDAO() - .getPendingWorkflowsByType( - workflow.getWorkflowName(), workflow.getWorkflowVersion()); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertEquals(3, pending.get(0).getTasks().size()); - pending.get(0).getTasks().clear(); - assertEquals(workflow, pending.get(0)); - - workflow.setStatus(WorkflowModel.Status.COMPLETED); - getExecutionDAO().updateWorkflow(workflow); - running = - getExecutionDAO() - .getRunningWorkflowIds( - workflow.getWorkflowName(), workflow.getWorkflowVersion()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - List bytime = - getExecutionDAO() - .getWorkflowsByType( - workflow.getWorkflowName(), - System.currentTimeMillis(), - System.currentTimeMillis() + 100); - assertNotNull(bytime); - assertTrue(bytime.isEmpty()); - - bytime = - getExecutionDAO() - .getWorkflowsByType( - workflow.getWorkflowName(), - workflow.getCreateTime() - 10, - workflow.getCreateTime() + 10); - assertNotNull(bytime); - assertEquals(1, bytime.size()); - } - - protected WorkflowModel createTestWorkflow() { - WorkflowDef def = new WorkflowDef(); - def.setName("Junit Workflow"); - def.setVersion(3); - def.setSchemaVersion(2); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.setCorrelationId("correlationX"); - workflow.setCreatedBy("junit_tester"); - workflow.setEndTime(200L); - - Map input = new HashMap<>(); - input.put("param1", "param1 value"); - input.put("param2", 100); - workflow.setInput(input); - - Map output = new HashMap<>(); - output.put("ouput1", "output 1 value"); - output.put("op2", 300); - workflow.setOutput(output); - - workflow.setOwnerApp("workflow"); - workflow.setParentWorkflowId("parentWorkflowId"); - workflow.setParentWorkflowTaskId("parentWFTaskId"); - workflow.setReasonForIncompletion("missing recipe"); - workflow.setReRunFromWorkflowId("re-run from id1"); - workflow.setCreateTime(90L); - workflow.setStatus(WorkflowModel.Status.FAILED); - workflow.setWorkflowId(UUID.randomUUID().toString()); - - List tasks = new LinkedList<>(); - - TaskModel task = new TaskModel(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(UUID.randomUUID().toString()); - task.setReferenceTaskName("t1"); - task.setWorkflowInstanceId(workflow.getWorkflowId()); - task.setTaskDefName("task1"); - - TaskModel task2 = new TaskModel(); - task2.setScheduledTime(2L); - task2.setSeq(2); - task2.setTaskId(UUID.randomUUID().toString()); - task2.setReferenceTaskName("t2"); - task2.setWorkflowInstanceId(workflow.getWorkflowId()); - task2.setTaskDefName("task2"); - - TaskModel task3 = new TaskModel(); - task3.setScheduledTime(2L); - task3.setSeq(3); - task3.setTaskId(UUID.randomUUID().toString()); - task3.setReferenceTaskName("t3"); - task3.setWorkflowInstanceId(workflow.getWorkflowId()); - task3.setTaskDefName("task3"); - - tasks.add(task); - tasks.add(task2); - tasks.add(task3); - - workflow.setTasks(tasks); - - workflow.setUpdatedBy("junit_tester"); - workflow.setUpdatedTime(800L); - - return workflow; - } - - protected List generateWorkflows(WorkflowModel base, int count) { - List workflowIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - String workflowId = UUID.randomUUID().toString(); - base.setWorkflowId(workflowId); - base.setCorrelationId("corr001"); - base.setStatus(WorkflowModel.Status.RUNNING); - getExecutionDAO().createWorkflow(base); - workflowIds.add(workflowId); - } - return workflowIds; - } -} diff --git a/core/src/test/java/com/netflix/conductor/dao/PollDataDAOTest.java b/core/src/test/java/com/netflix/conductor/dao/PollDataDAOTest.java deleted file mode 100644 index 97f4406b3..000000000 --- a/core/src/test/java/com/netflix/conductor/dao/PollDataDAOTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao; - -import java.util.List; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.PollData; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public abstract class PollDataDAOTest { - - protected abstract PollDataDAO getPollDataDAO(); - - @Test - public void testPollData() { - getPollDataDAO().updateLastPollData("taskDef", null, "workerId1"); - PollData pollData = getPollDataDAO().getPollData("taskDef", null); - assertNotNull(pollData); - assertTrue(pollData.getLastPollTime() > 0); - assertEquals(pollData.getQueueName(), "taskDef"); - assertNull(pollData.getDomain()); - assertEquals(pollData.getWorkerId(), "workerId1"); - - getPollDataDAO().updateLastPollData("taskDef", "domain1", "workerId1"); - pollData = getPollDataDAO().getPollData("taskDef", "domain1"); - assertNotNull(pollData); - assertTrue(pollData.getLastPollTime() > 0); - assertEquals(pollData.getQueueName(), "taskDef"); - assertEquals(pollData.getDomain(), "domain1"); - assertEquals(pollData.getWorkerId(), "workerId1"); - - List pData = getPollDataDAO().getPollData("taskDef"); - assertEquals(pData.size(), 2); - - pollData = getPollDataDAO().getPollData("taskDef", "domain2"); - assertNull(pollData); - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/EventServiceTest.java b/core/src/test/java/com/netflix/conductor/service/EventServiceTest.java deleted file mode 100644 index 620bb4e30..000000000 --- a/core/src/test/java/com/netflix/conductor/service/EventServiceTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.core.events.EventQueues; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class EventServiceTest { - - @TestConfiguration - static class TestEventConfiguration { - - @Bean - public EventService eventService() { - MetadataService metadataService = mock(MetadataService.class); - EventQueues eventQueues = mock(EventQueues.class); - return new EventServiceImpl(metadataService, eventQueues); - } - } - - @Autowired private EventService eventService; - - @Test(expected = ConstraintViolationException.class) - public void testAddEventHandler() { - try { - eventService.addEventHandler(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("EventHandler cannot be null.")); - throw ex; - } - fail("eventService.addEventHandler did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateEventHandler() { - try { - eventService.updateEventHandler(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("EventHandler cannot be null.")); - throw ex; - } - fail("eventService.updateEventHandler did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testRemoveEventHandlerStatus() { - try { - eventService.removeEventHandlerStatus(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("EventHandler name cannot be null or empty.")); - throw ex; - } - fail("eventService.removeEventHandlerStatus did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testGetEventHandlersForEvent() { - try { - eventService.getEventHandlersForEvent(null, false); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Event cannot be null or empty.")); - throw ex; - } - fail("eventService.getEventHandlersForEvent did not throw ConstraintViolationException !"); - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/ExecutionServiceTest.java b/core/src/test/java/com/netflix/conductor/service/ExecutionServiceTest.java deleted file mode 100644 index 34bedeec3..000000000 --- a/core/src/test/java/com/netflix/conductor/service/ExecutionServiceTest.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.time.Duration; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.dal.ExecutionDAOFacade; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.dao.QueueDAO; - -import static junit.framework.TestCase.assertEquals; -import static org.mockito.Mockito.when; - -@RunWith(SpringRunner.class) -public class ExecutionServiceTest { - - @Mock private WorkflowExecutor workflowExecutor; - @Mock private ExecutionDAOFacade executionDAOFacade; - @Mock private QueueDAO queueDAO; - @Mock private ConductorProperties conductorProperties; - @Mock private ExternalPayloadStorage externalPayloadStorage; - @Mock private SystemTaskRegistry systemTaskRegistry; - - private ExecutionService executionService; - - private Workflow workflow1; - private Workflow workflow2; - private Task taskWorkflow1; - private Task taskWorkflow2; - private final List sort = Collections.singletonList("Sort"); - - @Before - public void setup() { - when(conductorProperties.getTaskExecutionPostponeDuration()) - .thenReturn(Duration.ofSeconds(60)); - executionService = - new ExecutionService( - workflowExecutor, - executionDAOFacade, - queueDAO, - conductorProperties, - externalPayloadStorage, - systemTaskRegistry); - WorkflowDef workflowDef = new WorkflowDef(); - workflow1 = new Workflow(); - workflow1.setWorkflowId("wf1"); - workflow1.setWorkflowDefinition(workflowDef); - workflow2 = new Workflow(); - workflow2.setWorkflowId("wf2"); - workflow2.setWorkflowDefinition(workflowDef); - taskWorkflow1 = new Task(); - taskWorkflow1.setTaskId("task1"); - taskWorkflow1.setWorkflowInstanceId("wf1"); - taskWorkflow2 = new Task(); - taskWorkflow2.setTaskId("task2"); - taskWorkflow2.setWorkflowInstanceId("wf2"); - } - - @Test - public void workflowSearchTest() { - when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - workflow1.getWorkflowId(), workflow2.getWorkflowId()))); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenReturn(workflow2); - SearchResult searchResult = - executionService.search("query", "*", 0, 2, sort); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(2, searchResult.getResults().size()); - assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - assertEquals(workflow2.getWorkflowId(), searchResult.getResults().get(1).getWorkflowId()); - } - - @Test - public void workflowSearchExceptionTest() { - when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - workflow1.getWorkflowId(), workflow2.getWorkflowId()))); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenThrow(new RuntimeException()); - SearchResult searchResult = - executionService.search("query", "*", 0, 2, sort); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(1, searchResult.getResults().size()); - assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - } - - @Test - public void workflowSearchV2Test() { - when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - workflow1.getWorkflowId(), workflow2.getWorkflowId()))); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenReturn(workflow2); - SearchResult searchResult = executionService.searchV2("query", "*", 0, 2, sort); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(Arrays.asList(workflow1, workflow2), searchResult.getResults()); - } - - @Test - public void workflowSearchV2ExceptionTest() { - when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - workflow1.getWorkflowId(), workflow2.getWorkflowId()))); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenThrow(new RuntimeException()); - SearchResult searchResult = executionService.searchV2("query", "*", 0, 2, sort); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(Collections.singletonList(workflow1), searchResult.getResults()); - } - - @Test - public void workflowSearchByTasksTest() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenReturn(workflow2); - SearchResult searchResult = - executionService.searchWorkflowByTasks("query", "*", 0, 2, sort); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(2, searchResult.getResults().size()); - assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - assertEquals(workflow2.getWorkflowId(), searchResult.getResults().get(1).getWorkflowId()); - } - - @Test - public void workflowSearchByTasksExceptionTest() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())) - .thenThrow(new RuntimeException()); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - SearchResult searchResult = - executionService.searchWorkflowByTasks("query", "*", 0, 2, sort); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(1, searchResult.getResults().size()); - assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - } - - @Test - public void workflowSearchByTasksV2Test() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false)) - .thenReturn(workflow2); - SearchResult searchResult = - executionService.searchWorkflowByTasksV2("query", "*", 0, 2, sort); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(Arrays.asList(workflow1, workflow2), searchResult.getResults()); - } - - @Test - public void workflowSearchByTasksV2ExceptionTest() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())) - .thenThrow(new RuntimeException()); - when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false)) - .thenReturn(workflow1); - SearchResult searchResult = - executionService.searchWorkflowByTasksV2("query", "*", 0, 2, sort); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(Collections.singletonList(workflow1), searchResult.getResults()); - } - - @Test - public void TaskSearchTest() { - List taskList = Arrays.asList(taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()); - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn(new SearchResult<>(2, taskList)); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2); - SearchResult searchResult = - executionService.getSearchTasks("query", "*", 0, 2, "Sort"); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(2, searchResult.getResults().size()); - assertEquals(taskWorkflow1.getTaskId(), searchResult.getResults().get(0).getTaskId()); - assertEquals(taskWorkflow2.getTaskId(), searchResult.getResults().get(1).getTaskId()); - } - - @Test - public void TaskSearchExceptionTest() { - List taskList = Arrays.asList(taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()); - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn(new SearchResult<>(2, taskList)); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())) - .thenThrow(new RuntimeException()); - SearchResult searchResult = - executionService.getSearchTasks("query", "*", 0, 2, "Sort"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(1, searchResult.getResults().size()); - assertEquals(taskWorkflow1.getTaskId(), searchResult.getResults().get(0).getTaskId()); - } - - @Test - public void TaskSearchV2Test() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2); - SearchResult searchResult = - executionService.getSearchTasksV2("query", "*", 0, 2, "Sort"); - assertEquals(2, searchResult.getTotalHits()); - assertEquals(Arrays.asList(taskWorkflow1, taskWorkflow2), searchResult.getResults()); - } - - @Test - public void TaskSearchV2ExceptionTest() { - when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort)) - .thenReturn( - new SearchResult<>( - 2, - Arrays.asList( - taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId()))); - when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1); - when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())) - .thenThrow(new RuntimeException()); - SearchResult searchResult = - executionService.getSearchTasksV2("query", "*", 0, 2, "Sort"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(Collections.singletonList(taskWorkflow1), searchResult.getResults()); - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java b/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java deleted file mode 100644 index bc508a7b0..000000000 --- a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.dao.MetadataDAO; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class MetadataServiceTest { - - @TestConfiguration - static class TestMetadataConfiguration { - - @Bean - public MetadataDAO metadataDAO() { - return mock(MetadataDAO.class); - } - - @Bean - public ConductorProperties properties() { - ConductorProperties properties = mock(ConductorProperties.class); - when(properties.isOwnerEmailMandatory()).thenReturn(true); - return properties; - } - - @Bean - public MetadataService metadataService( - MetadataDAO metadataDAO, ConductorProperties properties) { - EventHandlerDAO eventHandlerDAO = mock(EventHandlerDAO.class); - return new MetadataServiceImpl(metadataDAO, eventHandlerDAO, properties); - } - } - - @Autowired private MetadataDAO metadataDAO; - - @Autowired private MetadataService metadataService; - - @Test(expected = ConstraintViolationException.class) - public void testRegisterTaskDefNoName() { - TaskDef taskDef = new TaskDef(); - try { - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskDef name cannot be null or empty")); - assertTrue(messages.contains("ownerEmail cannot be empty")); - throw ex; - } - fail("metadataService.registerTaskDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testRegisterTaskDefNull() { - try { - metadataService.registerTaskDef(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskDefList cannot be empty or null")); - throw ex; - } - fail("metadataService.registerTaskDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testRegisterTaskDefNoResponseTimeout() { - try { - TaskDef taskDef = new TaskDef(); - taskDef.setName("somename"); - taskDef.setOwnerEmail("sample@test.com"); - taskDef.setResponseTimeoutSeconds(0); - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue( - messages.contains( - "TaskDef responseTimeoutSeconds: 0 should be minimum 1 second")); - throw ex; - } - fail("metadataService.registerTaskDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateTaskDefNameNull() { - try { - TaskDef taskDef = new TaskDef(); - metadataService.updateTaskDef(taskDef); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskDef name cannot be null or empty")); - assertTrue(messages.contains("ownerEmail cannot be empty")); - throw ex; - } - fail("metadataService.updateTaskDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateTaskDefNull() { - try { - metadataService.updateTaskDef(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskDef cannot be null")); - throw ex; - } - fail("metadataService.updateTaskDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ApplicationException.class) - public void testUpdateTaskDefNotExisting() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setOwnerEmail("sample@test.com"); - when(metadataDAO.getTaskDef(any())).thenReturn(null); - metadataService.updateTaskDef(taskDef); - } - - @Test(expected = ApplicationException.class) - public void testUpdateTaskDefDaoException() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setOwnerEmail("sample@test.com"); - when(metadataDAO.getTaskDef(any())).thenReturn(null); - metadataService.updateTaskDef(taskDef); - } - - @Test - public void testRegisterTaskDef() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("somename"); - taskDef.setOwnerEmail("sample@test.com"); - taskDef.setResponseTimeoutSeconds(60 * 60); - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - verify(metadataDAO, times(1)).createTaskDef(any(TaskDef.class)); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateWorkflowDefNull() { - try { - List workflowDefList = null; - metadataService.updateWorkflowDef(workflowDefList); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowDef list name cannot be null or empty")); - throw ex; - } - fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateWorkflowDefEmptyList() { - try { - List workflowDefList = new ArrayList<>(); - metadataService.updateWorkflowDef(workflowDefList); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowDefList is empty")); - throw ex; - } - fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateWorkflowDefWithNullWorkflowDef() { - try { - List workflowDefList = new ArrayList<>(); - workflowDefList.add(null); - metadataService.updateWorkflowDef(workflowDefList); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowDef cannot be null")); - throw ex; - } - fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateWorkflowDefWithEmptyWorkflowDefName() { - try { - List workflowDefList = new ArrayList<>(); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(null); - workflowDef.setOwnerEmail(null); - workflowDefList.add(workflowDef); - metadataService.updateWorkflowDef(workflowDefList); - } catch (ConstraintViolationException ex) { - assertEquals(3, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(messages.contains("WorkflowTask list cannot be empty")); - assertTrue(messages.contains("ownerEmail cannot be empty")); - throw ex; - } - fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test - public void testUpdateWorkflowDef() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("somename"); - workflowDef.setOwnerEmail("sample@test.com"); - List tasks = new ArrayList<>(); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("hello"); - workflowTask.setName("hello"); - tasks.add(workflowTask); - workflowDef.setTasks(tasks); - when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef()); - metadataService.updateWorkflowDef(Collections.singletonList(workflowDef)); - verify(metadataDAO, times(1)).updateWorkflowDef(workflowDef); - } - - @Test(expected = ConstraintViolationException.class) - public void testRegisterWorkflowDefNoName() { - try { - WorkflowDef workflowDef = new WorkflowDef(); - metadataService.registerWorkflowDef(workflowDef); - } catch (ConstraintViolationException ex) { - assertEquals(3, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(messages.contains("WorkflowTask list cannot be empty")); - assertTrue(messages.contains("ownerEmail cannot be empty")); - throw ex; - } - fail("metadataService.registerWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testRegisterWorkflowDefInvalidName() { - try { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("invalid:name"); - workflowDef.setOwnerEmail("inavlid-email"); - metadataService.registerWorkflowDef(workflowDef); - } catch (ConstraintViolationException ex) { - assertEquals(3, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowTask list cannot be empty")); - assertTrue( - messages.contains( - "Workflow name cannot contain the following set of characters: ':'")); - assertTrue(messages.contains("ownerEmail should be valid email address")); - throw ex; - } - fail("metadataService.registerWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test - public void testRegisterWorkflowDef() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("somename"); - workflowDef.setSchemaVersion(2); - workflowDef.setOwnerEmail("sample@test.com"); - List tasks = new ArrayList<>(); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("hello"); - workflowTask.setName("hello"); - tasks.add(workflowTask); - workflowDef.setTasks(tasks); - when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef()); - metadataService.registerWorkflowDef(workflowDef); - verify(metadataDAO, times(1)).createWorkflowDef(workflowDef); - assertEquals(2, workflowDef.getSchemaVersion()); - } - - @Test(expected = ConstraintViolationException.class) - public void testUnregisterWorkflowDefNoName() { - try { - metadataService.unregisterWorkflowDef("", null); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Workflow name cannot be null or empty")); - assertTrue(messages.contains("Version cannot be null")); - throw ex; - } - fail("metadataService.unregisterWorkflowDef did not throw ConstraintViolationException !"); - } - - @Test - public void testUnregisterWorkflowDef() { - metadataService.unregisterWorkflowDef("somename", 111); - verify(metadataDAO, times(1)).removeWorkflowDef("somename", 111); - } - - @Test(expected = ConstraintViolationException.class) - public void testValidateEventNull() { - try { - metadataService.addEventHandler(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("EventHandler cannot be null")); - throw ex; - } - fail("metadataService.addEventHandler did not throw ConstraintViolationException !"); - } - - @Test(expected = ConstraintViolationException.class) - public void testValidateEventNoEvent() { - try { - EventHandler eventHandler = new EventHandler(); - metadataService.addEventHandler(eventHandler); - } catch (ConstraintViolationException ex) { - assertEquals(3, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Missing event handler name")); - assertTrue(messages.contains("Missing event location")); - assertTrue( - messages.contains("No actions specified. Please specify at-least one action")); - throw ex; - } - fail("metadataService.addEventHandler did not throw ConstraintViolationException !"); - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/TaskServiceTest.java b/core/src/test/java/com/netflix/conductor/service/TaskServiceTest.java deleted file mode 100644 index 2c54d3a31..000000000 --- a/core/src/test/java/com/netflix/conductor/service/TaskServiceTest.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.dao.QueueDAO; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class TaskServiceTest { - - @TestConfiguration - static class TestTaskConfiguration { - - @Bean - public ExecutionService executionService() { - return mock(ExecutionService.class); - } - - @Bean - public TaskService taskService(ExecutionService executionService) { - QueueDAO queueDAO = mock(QueueDAO.class); - return new TaskServiceImpl(executionService, queueDAO); - } - } - - @Autowired private TaskService taskService; - - @Autowired private ExecutionService executionService; - - @Test(expected = ConstraintViolationException.class) - public void testPoll() { - try { - taskService.poll(null, null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testBatchPoll() { - try { - taskService.batchPoll(null, null, null, null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testGetTasks() { - try { - taskService.getTasks(null, null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testGetPendingTaskForWorkflow() { - try { - taskService.getPendingTaskForWorkflow(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - assertTrue(messages.contains("TaskReferenceName cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateTask() { - try { - taskService.updateTask(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskResult cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testUpdateTaskInValid() { - try { - TaskResult taskResult = new TaskResult(); - taskService.updateTask(taskResult); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Workflow Id cannot be null or empty")); - assertTrue(messages.contains("Task ID cannot be null or empty")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testAckTaskReceived() { - try { - taskService.ackTaskReceived(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskId cannot be null or empty.")); - throw ex; - } - } - - @Test - public void testAckTaskReceivedMissingWorkerId() { - String ack = taskService.ackTaskReceived("abc", null); - assertNotNull(ack); - } - - @Test(expected = ConstraintViolationException.class) - public void testLog() { - try { - taskService.log(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testGetTaskLogs() { - try { - taskService.getTaskLogs(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testGetTask() { - try { - taskService.getTask(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRemoveTaskFromQueue() { - try { - taskService.removeTaskFromQueue(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskId cannot be null or empty.")); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testGetPollData() { - try { - taskService.getPollData(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRequeuePendingTask() { - try { - taskService.requeuePendingTask(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("TaskType cannot be null or empty.")); - throw ex; - } - } - - @Test - public void testSearch() { - SearchResult searchResult = - new SearchResult<>(2, List.of(mock(TaskSummary.class), mock(TaskSummary.class))); - when(executionService.getSearchTasks("query", "*", 0, 2, "Sort")).thenReturn(searchResult); - assertEquals(searchResult, taskService.search(0, 2, "Sort", "*", "query")); - } - - @Test - public void testSearchV2() { - SearchResult searchResult = - new SearchResult<>(2, List.of(mock(Task.class), mock(Task.class))); - when(executionService.getSearchTasksV2("query", "*", 0, 2, "Sort")) - .thenReturn(searchResult); - assertEquals(searchResult, taskService.searchV2(0, 2, "Sort", "*", "query")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/WorkflowBulkServiceTest.java b/core/src/test/java/com/netflix/conductor/service/WorkflowBulkServiceTest.java deleted file mode 100644 index 20dcbfd4f..000000000 --- a/core/src/test/java/com/netflix/conductor/service/WorkflowBulkServiceTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.core.execution.WorkflowExecutor; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class WorkflowBulkServiceTest { - - @TestConfiguration - static class TestWorkflowBulkConfiguration { - - @Bean - WorkflowExecutor workflowExecutor() { - return mock(WorkflowExecutor.class); - } - - @Bean - public WorkflowBulkService workflowBulkService(WorkflowExecutor workflowExecutor) { - return new WorkflowBulkServiceImpl(workflowExecutor); - } - } - - @Autowired private WorkflowExecutor workflowExecutor; - - @Autowired private WorkflowBulkService workflowBulkService; - - @Test(expected = ConstraintViolationException.class) - public void testPauseWorkflowNull() { - try { - workflowBulkService.pauseWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testPauseWorkflowWithInvalidListSize() { - try { - List list = new ArrayList<>(1001); - for (int i = 0; i < 1002; i++) { - list.add("test"); - } - workflowBulkService.pauseWorkflow(list); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue( - messages.contains( - "Cannot process more than 1000 workflows. Please use multiple requests.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testResumeWorkflowNull() { - try { - workflowBulkService.resumeWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRestartWorkflowNull() { - try { - workflowBulkService.restart(null, false); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRetryWorkflowNull() { - try { - workflowBulkService.retry(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - throw ex; - } - } - - @Test - public void testRetryWorkflowSuccessful() { - // When - workflowBulkService.retry(Collections.singletonList("anyId")); - // Then - verify(workflowExecutor).retry("anyId", false); - } - - @Test(expected = ConstraintViolationException.class) - public void testTerminateNull() { - try { - workflowBulkService.terminate(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowIds list cannot be null.")); - throw ex; - } - } -} diff --git a/core/src/test/java/com/netflix/conductor/service/WorkflowServiceTest.java b/core/src/test/java/com/netflix/conductor/service/WorkflowServiceTest.java deleted file mode 100644 index b710e0e20..000000000 --- a/core/src/test/java/com/netflix/conductor/service/WorkflowServiceTest.java +++ /dev/null @@ -1,575 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.service; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.validation.ConstraintViolationException; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; - -import static com.netflix.conductor.TestUtils.getConstraintViolationMessages; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@SuppressWarnings("SpringJavaAutowiredMembersInspection") -@RunWith(SpringRunner.class) -@EnableAutoConfiguration -public class WorkflowServiceTest { - - @TestConfiguration - static class TestWorkflowConfiguration { - - @Bean - public WorkflowExecutor workflowExecutor() { - return mock(WorkflowExecutor.class); - } - - @Bean - public ExecutionService executionService() { - return mock(ExecutionService.class); - } - - @Bean - public MetadataService metadataService() { - return mock(MetadataServiceImpl.class); - } - - @Bean - public WorkflowService workflowService( - WorkflowExecutor workflowExecutor, - ExecutionService executionService, - MetadataService metadataService) { - return new WorkflowServiceImpl(workflowExecutor, executionService, metadataService); - } - } - - @Autowired private WorkflowExecutor workflowExecutor; - - @Autowired private ExecutionService executionService; - - @Autowired private MetadataService metadataService; - - @Autowired private WorkflowService workflowService; - - @Test(expected = ConstraintViolationException.class) - public void testStartWorkflowNull() { - try { - workflowService.startWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("StartWorkflowRequest cannot be null")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testStartWorkflowName() { - try { - Map input = new HashMap<>(); - input.put("1", "abc"); - workflowService.startWorkflow(null, 1, "abc", input); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Workflow name cannot be null or empty")); - throw ex; - } - } - - @Test - public void testStartWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - workflowDef.setVersion(1); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName("test"); - startWorkflowRequest.setVersion(1); - - Map input = new HashMap<>(); - input.put("1", "abc"); - startWorkflowRequest.setInput(input); - String workflowID = "w112"; - - when(metadataService.getWorkflowDef("test", 1)).thenReturn(workflowDef); - when(workflowExecutor.startWorkflow( - anyString(), - anyInt(), - isNull(), - anyInt(), - anyMap(), - isNull(), - isNull(), - anyMap())) - .thenReturn(workflowID); - assertEquals("w112", workflowService.startWorkflow(startWorkflowRequest)); - } - - @Test - public void testStartWorkflowParam() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - workflowDef.setVersion(1); - - Map input = new HashMap<>(); - input.put("1", "abc"); - String workflowID = "w112"; - - when(metadataService.getWorkflowDef("test", 1)).thenReturn(workflowDef); - when(workflowExecutor.startWorkflow( - anyString(), anyInt(), anyString(), anyInt(), anyMap(), isNull())) - .thenReturn(workflowID); - assertEquals("w112", workflowService.startWorkflow("test", 1, "c123", input)); - } - - @Test(expected = ApplicationException.class) - public void testApplicationExceptionStartWorkflowMessageParam() { - try { - when(metadataService.getWorkflowDef("test", 1)).thenReturn(null); - - Map input = new HashMap<>(); - input.put("1", "abc"); - - workflowService.startWorkflow("test", 1, "c123", input); - } catch (ApplicationException ex) { - String message = "No such workflow found by name: test, version: 1"; - assertEquals(message, ex.getMessage()); - throw ex; - } - fail("ApplicationException did not throw!"); - } - - @Test(expected = ConstraintViolationException.class) - public void testGetWorkflowsNoName() { - try { - workflowService.getWorkflows("", "c123", true, true); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Workflow name cannot be null or empty")); - throw ex; - } - } - - @Test - public void testGetWorklfowsSingleCorrelationId() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - List workflowArrayList = Collections.singletonList(workflow); - - when(executionService.getWorkflowInstances( - anyString(), anyString(), anyBoolean(), anyBoolean())) - .thenReturn(workflowArrayList); - assertEquals(workflowArrayList, workflowService.getWorkflows("test", "c123", true, true)); - } - - @Test - public void testGetWorklfowsMultipleCorrelationId() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - List workflowArrayList = Collections.singletonList(workflow); - - List correlationIdList = Collections.singletonList("c123"); - - Map> workflowMap = new HashMap<>(); - workflowMap.put("c123", workflowArrayList); - - when(executionService.getWorkflowInstances( - anyString(), anyString(), anyBoolean(), anyBoolean())) - .thenReturn(workflowArrayList); - assertEquals( - workflowMap, workflowService.getWorkflows("test", true, true, correlationIdList)); - } - - @Test - public void testGetExecutionStatus() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - when(executionService.getExecutionStatus(anyString(), anyBoolean())).thenReturn(workflow); - assertEquals(workflow, workflowService.getExecutionStatus("w123", true)); - } - - @Test(expected = ConstraintViolationException.class) - public void testGetExecutionStatusNoWorkflowId() { - try { - workflowService.getExecutionStatus("", true); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ApplicationException.class) - public void testApplicationExceptionGetExecutionStatus() { - try { - when(executionService.getExecutionStatus(anyString(), anyBoolean())).thenReturn(null); - workflowService.getExecutionStatus("w123", true); - } catch (ApplicationException ex) { - String message = "Workflow with Id: w123 not found."; - assertEquals(message, ex.getMessage()); - throw ex; - } - fail("ApplicationException did not throw!"); - } - - @Test - public void testDeleteWorkflow() { - workflowService.deleteWorkflow("w123", true); - verify(executionService, times(1)).removeWorkflow(anyString(), anyBoolean()); - } - - @Test(expected = ConstraintViolationException.class) - public void testInvalidDeleteWorkflow() { - try { - workflowService.deleteWorkflow(null, true); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testInvalidPauseWorkflow() { - try { - workflowService.pauseWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testInvalidResumeWorkflow() { - try { - workflowService.resumeWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testInvalidSkipTaskFromWorkflow() { - try { - SkipTaskRequest skipTaskRequest = new SkipTaskRequest(); - workflowService.skipTaskFromWorkflow(null, null, skipTaskRequest); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId name cannot be null or empty.")); - assertTrue(messages.contains("TaskReferenceName cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testInvalidWorkflowNameGetRunningWorkflows() { - try { - workflowService.getRunningWorkflows(null, 123, null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("Workflow name cannot be null or empty.")); - throw ex; - } - } - - @Test - public void testGetRunningWorkflowsTime() { - workflowService.getRunningWorkflows("test", 1, 100L, 120L); - verify(workflowExecutor, times(1)) - .getWorkflows(anyString(), anyInt(), anyLong(), anyLong()); - } - - @Test - public void testGetRunningWorkflows() { - workflowService.getRunningWorkflows("test", 1, null, null); - verify(workflowExecutor, times(1)).getRunningWorkflowIds(anyString(), anyInt()); - } - - @Test - public void testDecideWorkflow() { - workflowService.decideWorkflow("test"); - verify(workflowExecutor, times(1)).decide(anyString()); - } - - @Test - public void testPauseWorkflow() { - workflowService.pauseWorkflow("test"); - verify(workflowExecutor, times(1)).pauseWorkflow(anyString()); - } - - @Test - public void testResumeWorkflow() { - workflowService.resumeWorkflow("test"); - verify(workflowExecutor, times(1)).resumeWorkflow(anyString()); - } - - @Test - public void testSkipTaskFromWorkflow() { - workflowService.skipTaskFromWorkflow("test", "testTask", null); - verify(workflowExecutor, times(1)).skipTaskFromWorkflow(anyString(), anyString(), isNull()); - } - - @Test - public void testRerunWorkflow() { - RerunWorkflowRequest request = new RerunWorkflowRequest(); - workflowService.rerunWorkflow("test", request); - verify(workflowExecutor, times(1)).rerun(any(RerunWorkflowRequest.class)); - } - - @Test(expected = ConstraintViolationException.class) - public void testRerunWorkflowNull() { - try { - workflowService.rerunWorkflow(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(2, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - assertTrue(messages.contains("RerunWorkflowRequest cannot be null.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRestartWorkflowNull() { - try { - workflowService.restartWorkflow(null, false); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testRetryWorkflowNull() { - try { - workflowService.retryWorkflow(null, false); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testResetWorkflowNull() { - try { - workflowService.resetWorkflow(null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test(expected = ConstraintViolationException.class) - public void testTerminateWorkflowNull() { - try { - workflowService.terminateWorkflow(null, null); - } catch (ConstraintViolationException ex) { - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue(messages.contains("WorkflowId cannot be null or empty.")); - throw ex; - } - } - - @Test - public void testRerunWorkflowReturnWorkflowId() { - RerunWorkflowRequest request = new RerunWorkflowRequest(); - String workflowId = "w123"; - when(workflowExecutor.rerun(any(RerunWorkflowRequest.class))).thenReturn(workflowId); - assertEquals(workflowId, workflowService.rerunWorkflow("test", request)); - } - - @Test - public void testRestartWorkflow() { - workflowService.restartWorkflow("w123", false); - verify(workflowExecutor, times(1)).restart(anyString(), anyBoolean()); - } - - @Test - public void testRetryWorkflow() { - workflowService.retryWorkflow("w123", false); - verify(workflowExecutor, times(1)).retry(anyString(), anyBoolean()); - } - - @Test - public void testResetWorkflow() { - workflowService.resetWorkflow("w123"); - verify(workflowExecutor, times(1)).resetCallbacksForWorkflow(anyString()); - } - - @Test - public void testTerminateWorkflow() { - workflowService.terminateWorkflow("w123", "test"); - verify(workflowExecutor, times(1)).terminateWorkflow(anyString(), anyString()); - } - - @Test - public void testSearchWorkflows() { - Workflow workflow = new Workflow(); - WorkflowDef def = new WorkflowDef(); - def.setName("name"); - def.setVersion(1); - workflow.setWorkflowDefinition(def); - workflow.setCorrelationId("c123"); - - WorkflowSummary workflowSummary = new WorkflowSummary(workflow); - List listOfWorkflowSummary = Collections.singletonList(workflowSummary); - - SearchResult searchResult = new SearchResult<>(100, listOfWorkflowSummary); - - when(executionService.search("*", "*", 0, 100, Collections.singletonList("asc"))) - .thenReturn(searchResult); - assertEquals(searchResult, workflowService.searchWorkflows(0, 100, "asc", "*", "*")); - assertEquals( - searchResult, - workflowService.searchWorkflows( - 0, 100, Collections.singletonList("asc"), "*", "*")); - } - - @Test - public void testSearchWorkflowsV2() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - List listOfWorkflow = Collections.singletonList(workflow); - SearchResult searchResult = new SearchResult<>(1, listOfWorkflow); - - when(executionService.searchV2("*", "*", 0, 100, Collections.singletonList("asc"))) - .thenReturn(searchResult); - assertEquals(searchResult, workflowService.searchWorkflowsV2(0, 100, "asc", "*", "*")); - assertEquals( - searchResult, - workflowService.searchWorkflowsV2( - 0, 100, Collections.singletonList("asc"), "*", "*")); - } - - @Test - public void testInvalidSizeSearchWorkflows() { - ConstraintViolationException ex = - assertThrows( - ConstraintViolationException.class, - () -> workflowService.searchWorkflows(0, 6000, "asc", "*", "*")); - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue( - messages.contains( - "Cannot return more than 5000 workflows. Please use pagination.")); - } - - @Test - public void testInvalidSizeSearchWorkflowsV2() { - ConstraintViolationException ex = - assertThrows( - ConstraintViolationException.class, - () -> workflowService.searchWorkflowsV2(0, 6000, "asc", "*", "*")); - assertEquals(1, ex.getConstraintViolations().size()); - Set messages = getConstraintViolationMessages(ex.getConstraintViolations()); - assertTrue( - messages.contains( - "Cannot return more than 5000 workflows. Please use pagination.")); - } - - @Test - public void testSearchWorkflowsByTasks() { - Workflow workflow = new Workflow(); - WorkflowDef def = new WorkflowDef(); - def.setName("name"); - def.setVersion(1); - workflow.setWorkflowDefinition(def); - workflow.setCorrelationId("c123"); - - WorkflowSummary workflowSummary = new WorkflowSummary(workflow); - List listOfWorkflowSummary = Collections.singletonList(workflowSummary); - SearchResult searchResult = new SearchResult<>(100, listOfWorkflowSummary); - - when(executionService.searchWorkflowByTasks( - "*", "*", 0, 100, Collections.singletonList("asc"))) - .thenReturn(searchResult); - assertEquals(searchResult, workflowService.searchWorkflowsByTasks(0, 100, "asc", "*", "*")); - assertEquals( - searchResult, - workflowService.searchWorkflowsByTasks( - 0, 100, Collections.singletonList("asc"), "*", "*")); - } - - @Test - public void testSearchWorkflowsByTasksV2() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - List listOfWorkflow = Collections.singletonList(workflow); - SearchResult searchResult = new SearchResult<>(1, listOfWorkflow); - - when(executionService.searchWorkflowByTasksV2( - "*", "*", 0, 100, Collections.singletonList("asc"))) - .thenReturn(searchResult); - assertEquals( - searchResult, workflowService.searchWorkflowsByTasksV2(0, 100, "asc", "*", "*")); - assertEquals( - searchResult, - workflowService.searchWorkflowsByTasksV2( - 0, 100, Collections.singletonList("asc"), "*", "*")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/validations/WorkflowDefConstraintTest.java b/core/src/test/java/com/netflix/conductor/validations/WorkflowDefConstraintTest.java deleted file mode 100644 index 6e6f5d3b2..000000000 --- a/core/src/test/java/com/netflix/conductor/validations/WorkflowDefConstraintTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.validations; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -import org.apache.bval.jsr.ApacheValidationProvider; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.dao.MetadataDAO; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; - -public class WorkflowDefConstraintTest { - - private static Validator validator; - private static ValidatorFactory validatorFactory; - private MetadataDAO mockMetadataDao; - - @BeforeClass - public static void init() { - validatorFactory = - Validation.byProvider(ApacheValidationProvider.class) - .configure() - .buildValidatorFactory(); - validator = validatorFactory.getValidator(); - } - - @AfterClass - public static void close() { - validatorFactory.close(); - } - - @Before - public void setUp() { - mockMetadataDao = Mockito.mock(MetadataDAO.class); - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - ValidationContext.initialize(mockMetadataDao); - } - - @Test - public void testWorkflowTaskName() { - TaskDef taskDef = new TaskDef(); // name is null - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - Set> result = validator.validate(taskDef); - assertEquals(2, result.size()); - } - - @Test - public void testWorkflowTaskSimple() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("sampleWorkflow"); - workflowDef.setDescription("Sample workflow def"); - workflowDef.setOwnerEmail("sample@test.com"); - workflowDef.setVersion(2); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("fileLocation", "${workflow.input.fileLocation}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - Set> result = validator.validate(workflowDef); - assertEquals(0, result.size()); - } - - @Test - /*Testcase to check inputParam is not valid - */ - public void testWorkflowTaskInvalidInputParam() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("sampleWorkflow"); - workflowDef.setDescription("Sample workflow def"); - workflowDef.setOwnerEmail("sample@test.com"); - workflowDef.setVersion(2); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("fileLocation", "${work.input.fileLocation}"); - - workflowTask_1.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - validator = factory.getValidator(); - - when(mockMetadataDao.getTaskDef("work1")).thenReturn(new TaskDef()); - Set> result = validator.validate(workflowDef); - assertEquals(1, result.size()); - assertEquals( - result.iterator().next().getMessage(), - "taskReferenceName: work for given task: task_1 input value: fileLocation of input parameter: ${work.input.fileLocation} is not defined in workflow definition."); - } - - @Test - public void testWorkflowTaskReferenceNameNotUnique() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("sampleWorkflow"); - workflowDef.setDescription("Sample workflow def"); - workflowDef.setOwnerEmail("sample@test.com"); - workflowDef.setVersion(2); - - WorkflowTask workflowTask_1 = new WorkflowTask(); - workflowTask_1.setName("task_1"); - workflowTask_1.setTaskReferenceName("task_1"); - workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE); - - Map inputParam = new HashMap<>(); - inputParam.put("fileLocation", "${task_2.input.fileLocation}"); - - workflowTask_1.setInputParameters(inputParam); - - WorkflowTask workflowTask_2 = new WorkflowTask(); - workflowTask_2.setName("task_2"); - workflowTask_2.setTaskReferenceName("task_1"); - workflowTask_2.setType(TaskType.TASK_TYPE_SIMPLE); - - workflowTask_2.setInputParameters(inputParam); - - List tasks = new ArrayList<>(); - tasks.add(workflowTask_1); - tasks.add(workflowTask_2); - - workflowDef.setTasks(tasks); - - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - validator = factory.getValidator(); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - Set> result = validator.validate(workflowDef); - assertEquals(3, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "taskReferenceName: task_2 for given task: task_2 input value: fileLocation of input parameter: ${task_2.input.fileLocation} is not defined in workflow definition.")); - assertTrue( - validationErrors.contains( - "taskReferenceName: task_2 for given task: task_1 input value: fileLocation of input parameter: ${task_2.input.fileLocation} is not defined in workflow definition.")); - assertTrue( - validationErrors.contains( - "taskReferenceName: task_1 should be unique across tasks for a given workflowDefinition: sampleWorkflow")); - } -} diff --git a/core/src/test/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraintTest.java b/core/src/test/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraintTest.java deleted file mode 100644 index e1172e0dc..000000000 --- a/core/src/test/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraintTest.java +++ /dev/null @@ -1,598 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.validations; - -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; -import javax.validation.executable.ExecutableValidator; - -import org.apache.bval.jsr.ApacheValidationProvider; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mockito; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.tasks.Terminate; -import com.netflix.conductor.dao.MetadataDAO; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; - -public class WorkflowTaskTypeConstraintTest { - - private static Validator validator; - private static ValidatorFactory validatorFactory; - private MetadataDAO mockMetadataDao; - - @BeforeClass - public static void init() { - validatorFactory = - Validation.byProvider(ApacheValidationProvider.class) - .configure() - .buildValidatorFactory(); - validator = validatorFactory.getValidator(); - } - - @AfterClass - public static void close() { - validatorFactory.close(); - } - - @Before - public void setUp() { - mockMetadataDao = Mockito.mock(MetadataDAO.class); - ValidationContext.initialize(mockMetadataDao); - } - - @Test - public void testWorkflowTaskMissingReferenceName() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setDynamicForkTasksParam("taskList"); - workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam"); - workflowTask.setTaskReferenceName(null); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - assertEquals( - result.iterator().next().getMessage(), - "WorkflowTask taskReferenceName name cannot be empty or null"); - } - - @Test - public void testWorkflowTaskTestSetType() throws NoSuchMethodException { - WorkflowTask workflowTask = createSampleWorkflowTask(); - - Method method = WorkflowTask.class.getMethod("setType", String.class); - Object[] parameterValues = {""}; - - ExecutableValidator executableValidator = validator.forExecutables(); - - Set> result = - executableValidator.validateParameters(workflowTask, method, parameterValues); - - assertEquals(1, result.size()); - assertEquals( - result.iterator().next().getMessage(), "WorkTask type cannot be null or empty"); - } - - @Test - public void testWorkflowTaskTypeEvent() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("EVENT"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - assertEquals( - result.iterator().next().getMessage(), - "sink field is required for taskType: EVENT taskName: encode"); - } - - @Test - public void testWorkflowTaskTypeDynamic() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("DYNAMIC"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - assertEquals( - result.iterator().next().getMessage(), - "dynamicTaskNameParam field is required for taskType: DYNAMIC taskName: encode"); - } - - @Test - public void testWorkflowTaskTypeDecision() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("DECISION"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "decisionCases should have atleast one task for taskType: DECISION taskName: encode")); - assertTrue( - validationErrors.contains( - "caseValueParam or caseExpression field is required for taskType: DECISION taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeDoWhile() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("DO_WHILE"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "loopExpression field is required for taskType: DO_WHILE taskName: encode")); - assertTrue( - validationErrors.contains( - "loopover field is required for taskType: DO_WHILE taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeDecisionWithCaseParam() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("DECISION"); - workflowTask.setCaseExpression("$.valueCheck == null ? 'true': 'false'"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "decisionCases should have atleast one task for taskType: DECISION taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeForJoinDynamic() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "dynamicForkTasksInputParamName field is required for taskType: FORK_JOIN_DYNAMIC taskName: encode")); - assertTrue( - validationErrors.contains( - "dynamicForkTasksParam field is required for taskType: FORK_JOIN_DYNAMIC taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeForJoinDynamicLegacy() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - workflowTask.setDynamicForkJoinTasksParam("taskList"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeForJoinDynamicWithForJoinTaskParam() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - workflowTask.setDynamicForkJoinTasksParam("taskList"); - workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: FORK_JOIN_DYNAMIC taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeForJoinDynamicValid() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - workflowTask.setDynamicForkTasksParam("ForkTasksParam"); - workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeForJoinDynamicWithForJoinTaskParamAndInputTaskParam() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - workflowTask.setDynamicForkJoinTasksParam("taskList"); - workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam"); - workflowTask.setDynamicForkTasksParam("ForkTasksParam"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: FORK_JOIN_DYNAMIC taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeHTTP() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("HTTP"); - workflowTask.getInputParameters().put("http_request", "http://www.netflix.com"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeHTTPWithHttpParamMissing() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("HTTP"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "inputParameters.http_request field is required for taskType: HTTP taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeHTTPWithHttpParamInTaskDef() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("HTTP"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("encode"); - taskDef.getInputTemplate().put("http_request", "http://www.netflix.com"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeHTTPWithHttpParamInTaskDefAndWorkflowTask() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("HTTP"); - workflowTask.getInputParameters().put("http_request", "http://www.netflix.com"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("encode"); - taskDef.getInputTemplate().put("http_request", "http://www.netflix.com"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeFork() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("FORK_JOIN"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "forkTasks should have atleast one task for taskType: FORK_JOIN taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeSubworkflowMissingSubworkflowParam() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("SUB_WORKFLOW"); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "subWorkflowParam field is required for taskType: SUB_WORKFLOW taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeSubworkflow() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("SUB_WORKFLOW"); - - SubWorkflowParams subWorkflowTask = new SubWorkflowParams(); - workflowTask.setSubWorkflowParam(subWorkflowTask); - - Set> result = validator.validate(workflowTask); - assertEquals(2, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue(validationErrors.contains("SubWorkflowParams name cannot be null")); - assertTrue(validationErrors.contains("SubWorkflowParams name cannot be empty")); - } - - @Test - public void testWorkflowTaskTypeTerminateWithoutTerminationStatus() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - workflowTask.setName("terminate_task"); - - workflowTask.setInputParameters( - Collections.singletonMap( - Terminate.getTerminationWorkflowOutputParameter(), "blah")); - List validationErrors = getErrorMessages(workflowTask); - - Assert.assertEquals(1, validationErrors.size()); - Assert.assertEquals( - "terminate task must have an terminationStatus parameter and must be set to COMPLETED or FAILED, taskName: terminate_task", - validationErrors.get(0)); - } - - @Test - public void testWorkflowTaskTypeTerminateWithInvalidStatus() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - workflowTask.setName("terminate_task"); - - workflowTask.setInputParameters( - Collections.singletonMap(Terminate.getTerminationStatusParameter(), "blah")); - - List validationErrors = getErrorMessages(workflowTask); - - Assert.assertEquals(1, validationErrors.size()); - Assert.assertEquals( - "terminate task must have an terminationStatus parameter and must be set to COMPLETED or FAILED, taskName: terminate_task", - validationErrors.get(0)); - } - - @Test - public void testWorkflowTaskTypeTerminateOptional() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - workflowTask.setName("terminate_task"); - - workflowTask.setInputParameters( - Collections.singletonMap(Terminate.getTerminationStatusParameter(), "COMPLETED")); - workflowTask.setOptional(true); - - List validationErrors = getErrorMessages(workflowTask); - - Assert.assertEquals(1, validationErrors.size()); - Assert.assertEquals( - "terminate task cannot be optional, taskName: terminate_task", - validationErrors.get(0)); - } - - @Test - public void testWorkflowTaskTypeTerminateValid() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - workflowTask.setName("terminate_task"); - - workflowTask.setInputParameters( - Collections.singletonMap(Terminate.getTerminationStatusParameter(), "COMPLETED")); - - List validationErrors = getErrorMessages(workflowTask); - - Assert.assertEquals(0, validationErrors.size()); - } - - @Test - public void testWorkflowTaskTypeKafkaPublish() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("KAFKA_PUBLISH"); - workflowTask.getInputParameters().put("kafka_request", "testInput"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeKafkaPublishWithRequestParamMissing() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("KAFKA_PUBLISH"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "inputParameters.kafka_request field is required for taskType: KAFKA_PUBLISH taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeKafkaPublishWithKafkaParamInTaskDef() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("KAFKA_PUBLISH"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("encode"); - taskDef.getInputTemplate().put("kafka_request", "test_kafka_request"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeKafkaPublishWithRequestParamInTaskDefAndWorkflowTask() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("KAFKA_PUBLISH"); - workflowTask.getInputParameters().put("kafka_request", "http://www.netflix.com"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("encode"); - taskDef.getInputTemplate().put("kafka_request", "test Kafka Request"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeJSONJQTransform() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("JSON_JQ_TRANSFORM"); - workflowTask.getInputParameters().put("queryExpression", "."); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - @Test - public void testWorkflowTaskTypeJSONJQTransformWithQueryParamMissing() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("JSON_JQ_TRANSFORM"); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef()); - - Set> result = validator.validate(workflowTask); - assertEquals(1, result.size()); - - List validationErrors = new ArrayList<>(); - - result.forEach(e -> validationErrors.add(e.getMessage())); - - assertTrue( - validationErrors.contains( - "inputParameters.queryExpression field is required for taskType: JSON_JQ_TRANSFORM taskName: encode")); - } - - @Test - public void testWorkflowTaskTypeJSONJQTransformWithQueryParamInTaskDef() { - WorkflowTask workflowTask = createSampleWorkflowTask(); - workflowTask.setType("JSON_JQ_TRANSFORM"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("encode"); - taskDef.getInputTemplate().put("queryExpression", "."); - - when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef); - - Set> result = validator.validate(workflowTask); - assertEquals(0, result.size()); - } - - private List getErrorMessages(WorkflowTask workflowTask) { - Set> result = validator.validate(workflowTask); - List validationErrors = new ArrayList<>(); - result.forEach(e -> validationErrors.add(e.getMessage())); - - return validationErrors; - } - - private WorkflowTask createSampleWorkflowTask() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("encode"); - workflowTask.setTaskReferenceName("encode"); - workflowTask.setType("FORK_JOIN_DYNAMIC"); - Map inputParam = new HashMap<>(); - inputParam.put("fileLocation", "${workflow.input.fileLocation}"); - workflowTask.setInputParameters(inputParam); - return workflowTask; - } -} diff --git a/core/src/test/resources/completed.json b/core/src/test/resources/completed.json deleted file mode 100644 index 38baf37e9..000000000 --- a/core/src/test/resources/completed.json +++ /dev/null @@ -1,3788 +0,0 @@ -{ - "ownerApp": "cpeworkflowtests", - "createTime": 1547430586952, - "updateTime": 1547430613550, - "status": "COMPLETED", - "endTime": 1547430613550, - "workflowId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "tasks": [ - { - "taskType": "perf_task_1", - "status": "COMPLETED", - "inputData": { - "mod": "0", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_1", - "retryCount": 0, - "seq": 1, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_1", - "scheduledTime": 1547430586967, - "startTime": 1547430589848, - "endTime": 1547430589873, - "updateTime": 1547430613560, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "485fdbdf-9f49-4879-9471-4722225e5613", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-0618a1a5e9526c9a1", - "outputData": { - "mod": "8", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_1", - "taskReferenceName": "perf_task_1", - "inputParameters": { - "mod": "workflow.input.mod", - "oddEven": "workflow.input.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389709, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_1", - "description": "perf_task_1", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 2881, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:49:867 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_1,1", - "01/14/19, 01:49:49:867 : Starting to execute perf_task_1, id=485fdbdf-9f49-4879-9471-4722225e5613", - "01/14/19, 01:49:49:867 : failure probability is 0.3066777 against 0.0", - "01/14/19, 01:49:49:868 : Marking task completed" - ] - }, - { - "taskType": "perf_task_10", - "status": "COMPLETED", - "inputData": { - "taskToExecute": "perf_task_10" - }, - "referenceTaskName": "perf_task_2", - "retryCount": 0, - "seq": 2, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_10", - "scheduledTime": 1547430589900, - "startTime": 1547430590465, - "endTime": 1547430590499, - "updateTime": 1547430613572, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "14988072-378d-4b6c-a596-09db9c88c5d1", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-07f2166099c597efe", - "outputData": { - "mod": "0", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_10", - "taskReferenceName": "perf_task_2", - "inputParameters": { - "taskToExecute": "workflow.input.task2Name" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389226, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_10", - "description": "perf_task_10", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 565, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:50:489 : Starting to execute perf_task_10, id=14988072-378d-4b6c-a596-09db9c88c5d1", - "01/14/19, 01:49:50:489 : failure probability is 0.040783882 against 0.0", - "01/14/19, 01:49:50:489 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_2,1", - "01/14/19, 01:49:50:490 : Marking task completed" - ] - }, - { - "taskType": "perf_task_3", - "status": "COMPLETED", - "inputData": { - "mod": "0", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_3", - "retryCount": 0, - "seq": 3, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_3", - "scheduledTime": 1547430590531, - "startTime": 1547430591460, - "endTime": 1547430591488, - "updateTime": 1547430613582, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "91b6ba4c-c414-4cb1-a2e7-18edd7aa22fd", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-0618a1a5e9526c9a1", - "outputData": { - "mod": "9", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_3", - "taskReferenceName": "perf_task_3", - "inputParameters": { - "mod": "perf_task_2.output.mod", - "oddEven": "perf_task_2.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389814, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_3", - "description": "perf_task_3", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 929, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:51:477 : Starting to execute perf_task_3, id=91b6ba4c-c414-4cb1-a2e7-18edd7aa22fd", - "01/14/19, 01:49:51:477 : failure probability is 0.9401053 against 0.0", - "01/14/19, 01:49:51:477 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_3,1", - "01/14/19, 01:49:51:479 : Marking task completed" - ] - }, - { - "taskType": "HTTP", - "status": "COMPLETED", - "inputData": { - "http_request": { - "uri": "/wfe_perf/workflow/_search?q=status:RUNNING&size=0&devint", - "method": "GET", - "vipAddress": "es_conductor.netflix.com" - } - }, - "referenceTaskName": "get_es_1", - "retryCount": 0, - "seq": 4, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "get_from_es", - "scheduledTime": 1547430591524, - "startTime": 1547430591961, - "endTime": 1547430592238, - "updateTime": 1547430613601, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "b8095fef-0028-4fa3-a2a2-6e59c224bb7d", - "callbackAfterSeconds": 0, - "workerId": "i-01815a305a47fb626", - "outputData": { - "response": { - "headers": { - "Content-Length": [ - "121" - ], - "Content-Type": [ - "application/json; charset=UTF-8" - ] - }, - "reasonPhrase": "OK", - "body": { - "took": 2, - "timed_out": false, - "_shards": { - "total": 6, - "successful": 6, - "failed": 0 - }, - "hits": { - "total": 0, - "max_score": 0, - "hits": [] - } - }, - "statusCode": 200 - } - }, - "workflowTask": { - "name": "get_from_es", - "taskReferenceName": "get_es_1", - "type": "HTTP", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "workflowPriority": 0, - "queueWaitTime": 437, - "taskDefinition": { - "present": false - }, - "taskStatus": "COMPLETED", - "logs": [] - }, - { - "taskType": "DECISION", - "status": "COMPLETED", - "inputData": { - "hasChildren": "true", - "case": "1" - }, - "referenceTaskName": "oddEvenDecision", - "retryCount": 0, - "seq": 5, - "correlationId": "1547430586940", - "pollCount": 0, - "taskDefName": "DECISION", - "scheduledTime": 1547430592280, - "startTime": 1547430592292, - "endTime": 1547430592284, - "updateTime": 1547430613614, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "5c2d843a-8320-4b6c-9765-e91bff433dba", - "callbackAfterSeconds": 0, - "outputData": { - "caseOutput": [ - "1" - ] - }, - "workflowTask": { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390494, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_4", - "description": "perf_task_4", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "perf_task_4.output.dynamicTasks", - "input": "perf_task_4.output.inputs" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_5", - "taskReferenceName": "perf_task_5", - "inputParameters": { - "mod": "perf_task_4.output.mod", - "oddEven": "perf_task_4.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390611, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_5", - "description": "perf_task_5", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_6", - "taskReferenceName": "perf_task_6", - "inputParameters": { - "mod": "perf_task_5.output.mod", - "oddEven": "perf_task_5.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390789, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_6", - "description": "perf_task_6", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "1": [ - { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390955, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_7", - "description": "perf_task_7", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "perf_task_7.output.mod", - "oddEven": "perf_task_7.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391122, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_8", - "description": "perf_task_8", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "perf_task_8.output.mod", - "oddEven": "perf_task_8.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391291, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_9", - "description": "perf_task_9", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "perf_task_8.output.mod" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389427, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_12", - "description": "perf_task_12", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389276, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_13", - "description": "perf_task_13", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388963, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_15", - "description": "perf_task_15", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "perf_task_15.output.mod", - "oddEven": "perf_task_15.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389067, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_16", - "description": "perf_task_16", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388904, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_18", - "description": "perf_task_18", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "perf_task_18.output.mod", - "oddEven": "perf_task_18.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389173, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_19", - "description": "perf_task_19", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390669, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_21", - "description": "perf_task_21", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "perf_task_21.output.mod", - "oddEven": "perf_task_21.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391345, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_22", - "description": "perf_task_22", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391074, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_24", - "description": "perf_task_24", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "perf_task_24.output.mod", - "oddEven": "perf_task_24.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391177, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_25", - "description": "perf_task_25", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "startDelay": 0, - "optional": false, - "asyncComplete": false - } - ] - }, - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 12, - "taskDefinition": { - "present": false - }, - "taskStatus": "COMPLETED", - "logs": [] - }, - { - "taskType": "perf_task_7", - "status": "COMPLETED", - "inputData": { - "mod": "9", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_7", - "retryCount": 0, - "seq": 6, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_7", - "scheduledTime": 1547430592287, - "startTime": 1547430593603, - "endTime": 1547430593641, - "updateTime": 1547430613624, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "10efe69b-691f-49c6-9bce-42ba08ff4d2e", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "5", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390955, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_7", - "description": "perf_task_7", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 1316, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:53:622 : Starting to execute perf_task_7, id=10efe69b-691f-49c6-9bce-42ba08ff4d2e", - "01/14/19, 01:49:53:622 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_7,1", - "01/14/19, 01:49:53:622 : failure probability is 0.62726057 against 0.0", - "01/14/19, 01:49:53:625 : Marking task completed" - ] - }, - { - "taskType": "perf_task_8", - "status": "COMPLETED", - "inputData": { - "mod": "5", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_8", - "retryCount": 0, - "seq": 7, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_8", - "scheduledTime": 1547430593685, - "startTime": 1547430594976, - "endTime": 1547430595009, - "updateTime": 1547430613634, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "51020906-8fe0-4993-9020-66a081847bf3", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "5", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "perf_task_7.output.mod", - "oddEven": "perf_task_7.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391122, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_8", - "description": "perf_task_8", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 1291, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:54:994 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_8,1", - "01/14/19, 01:49:54:994 : failure probability is 0.017497659 against 0.0", - "01/14/19, 01:49:54:994 : Starting to execute perf_task_8, id=51020906-8fe0-4993-9020-66a081847bf3", - "01/14/19, 01:49:54:995 : Marking task completed" - ] - }, - { - "taskType": "perf_task_9", - "status": "COMPLETED", - "inputData": { - "mod": "5", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_9", - "retryCount": 0, - "seq": 8, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_9", - "scheduledTime": 1547430595069, - "startTime": 1547430596047, - "endTime": 1547430596081, - "updateTime": 1547430613642, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "c82cf62f-9f48-46c0-ae32-9bbfad57e71f", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "5", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "perf_task_8.output.mod", - "oddEven": "perf_task_8.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391291, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_9", - "description": "perf_task_9", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 978, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:56:065 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_9,1", - "01/14/19, 01:49:56:065 : Marking task completed", - "01/14/19, 01:49:56:065 : Starting to execute perf_task_9, id=c82cf62f-9f48-46c0-ae32-9bbfad57e71f", - "01/14/19, 01:49:56:065 : failure probability is 0.7340754 against 0.0" - ] - }, - { - "taskType": "DECISION", - "status": "COMPLETED", - "inputData": { - "hasChildren": "true", - "case": "5" - }, - "referenceTaskName": "modDecision", - "retryCount": 0, - "seq": 9, - "correlationId": "1547430586940", - "pollCount": 0, - "taskDefName": "DECISION", - "scheduledTime": 1547430596122, - "startTime": 1547430596133, - "endTime": 1547430596125, - "updateTime": 1547430613650, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "597b18b6-6d99-4356-b205-dbe532fc7983", - "callbackAfterSeconds": 0, - "outputData": { - "caseOutput": [ - "5" - ] - }, - "workflowTask": { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "perf_task_8.output.mod" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389427, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_12", - "description": "perf_task_12", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389276, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_13", - "description": "perf_task_13", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388963, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_15", - "description": "perf_task_15", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "perf_task_15.output.mod", - "oddEven": "perf_task_15.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389067, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_16", - "description": "perf_task_16", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388904, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_18", - "description": "perf_task_18", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "perf_task_18.output.mod", - "oddEven": "perf_task_18.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389173, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_19", - "description": "perf_task_19", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390669, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_21", - "description": "perf_task_21", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "perf_task_21.output.mod", - "oddEven": "perf_task_21.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391345, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_22", - "description": "perf_task_22", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391074, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_24", - "description": "perf_task_24", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "perf_task_24.output.mod", - "oddEven": "perf_task_24.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391177, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_25", - "description": "perf_task_25", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 11, - "taskDefinition": { - "present": false - }, - "taskStatus": "COMPLETED", - "logs": [] - }, - { - "taskType": "perf_task_21", - "status": "COMPLETED", - "inputData": { - "mod": "5", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_21", - "retryCount": 0, - "seq": 10, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_21", - "scheduledTime": 1547430596128, - "startTime": 1547430597361, - "endTime": 1547430597400, - "updateTime": 1547430613663, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "f44f4598-7623-46db-a513-75000ccf39b8", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "2", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390669, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_21", - "description": "perf_task_21", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 1233, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:49:57:378 : Starting to execute perf_task_21, id=f44f4598-7623-46db-a513-75000ccf39b8", - "01/14/19, 01:49:57:378 : failure probability is 0.88135785 against 0.0", - "01/14/19, 01:49:57:378 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_21,1", - "01/14/19, 01:49:57:383 : Marking task completed" - ] - }, - { - "taskType": "SUB_WORKFLOW", - "status": "COMPLETED", - "inputData": { - "workflowInput": {}, - "subWorkflowId": "e18f09cb-9b3e-4296-bc77-87339d2eb34c", - "subWorkflowName": "sub_flow_1", - "subWorkflowVersion": 1 - }, - "referenceTaskName": "wf3", - "retryCount": 0, - "seq": 11, - "correlationId": "1547430586940", - "pollCount": 0, - "taskDefName": "sub_workflow_x", - "scheduledTime": 1547430606665, - "startTime": 1547430597443, - "endTime": 1547430606672, - "updateTime": 1547430613674, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "37514448-8b14-4d5e-8483-0eabd89b73f6", - "callbackAfterSeconds": 0, - "outputData": { - "subWorkflowId": "e18f09cb-9b3e-4296-bc77-87339d2eb34c", - "mod": null, - "oddEven": null, - "es2statuses": [] - }, - "workflowTask": { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": -9222, - "taskDefinition": { - "present": false - }, - "taskStatus": "COMPLETED", - "logs": [] - }, - { - "taskType": "perf_task_22", - "status": "COMPLETED", - "inputData": { - "mod": "2", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_22", - "retryCount": 0, - "seq": 12, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_22", - "scheduledTime": 1547430606701, - "startTime": 1547430607444, - "endTime": 1547430607481, - "updateTime": 1547430613684, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "f2448612-4960-4717-84f7-6686434733fe", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "2", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "perf_task_21.output.mod", - "oddEven": "perf_task_21.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391345, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_22", - "description": "perf_task_22", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 743, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:50:07:462 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_22,1", - "01/14/19, 01:50:07:462 : Marking task completed", - "01/14/19, 01:50:07:462 : Starting to execute perf_task_22, id=f2448612-4960-4717-84f7-6686434733fe", - "01/14/19, 01:50:07:462 : failure probability is 0.6165708 against 0.0" - ] - }, - { - "taskType": "perf_task_28", - "status": "COMPLETED", - "inputData": { - "mod": "9", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_28", - "retryCount": 0, - "seq": 13, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_28", - "scheduledTime": 1547430607541, - "startTime": 1547430608584, - "endTime": 1547430608631, - "updateTime": 1547430613694, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "f44c0a56-ae5b-4aba-ac69-c9f48ad6ecfc", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-0618a1a5e9526c9a1", - "outputData": { - "mod": "8", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_28", - "taskReferenceName": "perf_task_28", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390042, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_28", - "description": "perf_task_28", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 1043, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:50:08:605 : Starting to execute perf_task_28, id=f44c0a56-ae5b-4aba-ac69-c9f48ad6ecfc", - "01/14/19, 01:50:08:605 : failure probability is 0.8953033 against 0.0", - "01/14/19, 01:50:08:605 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_28,1", - "01/14/19, 01:50:08:608 : Marking task completed" - ] - }, - { - "taskType": "perf_task_29", - "status": "COMPLETED", - "inputData": { - "mod": "8", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_29", - "retryCount": 0, - "seq": 14, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_29", - "scheduledTime": 1547430608681, - "startTime": 1547430611220, - "endTime": 1547430611262, - "updateTime": 1547430613702, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "ff3961e9-a7cf-454e-a5a5-31d9582fc3be", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-075e5e67066be5d52", - "outputData": { - "mod": "0", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_29", - "taskReferenceName": "perf_task_29", - "inputParameters": { - "mod": "perf_task_28.output.mod", - "oddEven": "perf_task_28.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390098, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_29", - "description": "perf_task_29", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 2539, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:50:11:238 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_29,1", - "01/14/19, 01:50:11:238 : Starting to execute perf_task_29, id=ff3961e9-a7cf-454e-a5a5-31d9582fc3be", - "01/14/19, 01:50:11:238 : failure probability is 0.3055073 against 0.0", - "01/14/19, 01:50:11:240 : Marking task completed" - ] - }, - { - "taskType": "perf_task_30", - "status": "COMPLETED", - "inputData": { - "mod": "0", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_30", - "retryCount": 0, - "seq": 15, - "correlationId": "1547430586940", - "pollCount": 1, - "taskDefName": "perf_task_30", - "scheduledTime": 1547430611308, - "startTime": 1547430613454, - "endTime": 1547430613496, - "updateTime": 1547430613712, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 300, - "workflowInstanceId": "1be75865-00a1-4e2b-95c0-573c444d98d7", - "workflowType": "performance_test_1", - "taskId": "603a164f-3198-40ed-a5b6-7dd439349c25", - "callbackAfterSeconds": 0, - "workerId": "cpeworkflowtests-devint-i-0618a1a5e9526c9a1", - "outputData": { - "mod": "6", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_30", - "taskReferenceName": "perf_task_30", - "inputParameters": { - "mod": "perf_task_29.output.mod", - "oddEven": "perf_task_29.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069392094, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_30", - "description": "perf_task_30", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 0, - "workflowPriority": 0, - "queueWaitTime": 2146, - "taskDefinition": { - "present": true - }, - "taskStatus": "COMPLETED", - "logs": [ - "01/14/19, 01:50:13:473 : Starting to execute perf_task_30, id=603a164f-3198-40ed-a5b6-7dd439349c25", - "01/14/19, 01:50:13:473 : Attempt 1be75865-00a1-4e2b-95c0-573c444d98d7,perf_task_30,1", - "01/14/19, 01:50:13:473 : failure probability is 0.4859264 against 0.0", - "01/14/19, 01:50:13:476 : Marking task completed" - ] - } - ], - "input": { - "mod": "0", - "oddEven": "0", - "task2Name": "perf_task_10" - }, - "output": { - "mod": "6", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": {}, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - }, - "joinOn": [], - "sink": null, - "optional": false, - "taskDefinition": null, - "rateLimited": null - } - ], - "attempt": 1 - }, - "workflowType": "performance_test_1", - "version": 1, - "correlationId": "1547430586940", - "schemaVersion": 1, - "workflowDefinition": { - "createTime": 1477681181098, - "updateTime": 1484162039528, - "name": "performance_test_1", - "description": "performance_test_1", - "version": 1, - "tasks": [ - { - "name": "perf_task_1", - "taskReferenceName": "perf_task_1", - "inputParameters": { - "mod": "workflow.input.mod", - "oddEven": "workflow.input.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389709, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_1", - "description": "perf_task_1", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_10", - "taskReferenceName": "perf_task_2", - "inputParameters": { - "taskToExecute": "workflow.input.task2Name" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389226, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_10", - "description": "perf_task_10", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_3", - "taskReferenceName": "perf_task_3", - "inputParameters": { - "mod": "perf_task_2.output.mod", - "oddEven": "perf_task_2.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389814, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_3", - "description": "perf_task_3", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "get_from_es", - "taskReferenceName": "get_es_1", - "type": "HTTP", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390494, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_4", - "description": "perf_task_4", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "perf_task_4.output.dynamicTasks", - "input": "perf_task_4.output.inputs" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_5", - "taskReferenceName": "perf_task_5", - "inputParameters": { - "mod": "perf_task_4.output.mod", - "oddEven": "perf_task_4.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390611, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_5", - "description": "perf_task_5", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_6", - "taskReferenceName": "perf_task_6", - "inputParameters": { - "mod": "perf_task_5.output.mod", - "oddEven": "perf_task_5.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390789, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_6", - "description": "perf_task_6", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "1": [ - { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390955, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_7", - "description": "perf_task_7", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "perf_task_7.output.mod", - "oddEven": "perf_task_7.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391122, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_8", - "description": "perf_task_8", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "perf_task_8.output.mod", - "oddEven": "perf_task_8.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391291, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_9", - "description": "perf_task_9", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "perf_task_8.output.mod" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389427, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_12", - "description": "perf_task_12", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389276, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_13", - "description": "perf_task_13", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388963, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_15", - "description": "perf_task_15", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "perf_task_15.output.mod", - "oddEven": "perf_task_15.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389067, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_16", - "description": "perf_task_16", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069388904, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_18", - "description": "perf_task_18", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "perf_task_18.output.mod", - "oddEven": "perf_task_18.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069389173, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_19", - "description": "perf_task_19", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390669, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_21", - "description": "perf_task_21", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "perf_task_21.output.mod", - "oddEven": "perf_task_21.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391345, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_22", - "description": "perf_task_22", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "perf_task_9.output.mod", - "oddEven": "perf_task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391074, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_24", - "description": "perf_task_24", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "perf_task_12.output.mod", - "oddEven": "perf_task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - }, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "perf_task_24.output.mod", - "oddEven": "perf_task_24.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069391177, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_25", - "description": "perf_task_25", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "startDelay": 0, - "optional": false, - "asyncComplete": false - } - ] - }, - "startDelay": 0, - "optional": false, - "asyncComplete": false - }, - { - "name": "perf_task_28", - "taskReferenceName": "perf_task_28", - "inputParameters": { - "mod": "perf_task_3.output.mod", - "oddEven": "perf_task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390042, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_28", - "description": "perf_task_28", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_29", - "taskReferenceName": "perf_task_29", - "inputParameters": { - "mod": "perf_task_28.output.mod", - "oddEven": "perf_task_28.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069390098, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_29", - "description": "perf_task_29", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - }, - { - "name": "perf_task_30", - "taskReferenceName": "perf_task_30", - "inputParameters": { - "mod": "perf_task_29.output.mod", - "oddEven": "perf_task_29.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false, - "taskDefinition": { - "createTime": 1547069392094, - "createdBy": "CPEWORKFLOW", - "name": "perf_task_30", - "description": "perf_task_30", - "retryCount": 2, - "timeoutSeconds": 600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 300, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "asyncComplete": false - } - ], - "schemaVersion": 1, - "restartable": true, - "workflowStatusListenerEnabled": false - }, - "priority": 0, - "workflowName": "performance_test_1", - "workflowVersion": 1, - "startTime": 1547430586952 -} \ No newline at end of file diff --git a/core/src/test/resources/conditional_flow.json b/core/src/test/resources/conditional_flow.json deleted file mode 100644 index ae03402cb..000000000 --- a/core/src/test/resources/conditional_flow.json +++ /dev/null @@ -1,211 +0,0 @@ -{ - "name": "ConditionalTaskWF", - "description": "ConditionalTaskWF", - "version": 1, - "tasks": [{ - "name": "conditional", - "taskReferenceName": "conditional", - "inputParameters": { - "case": "${workflow.input.param1}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "nested": [{ - "name": "conditional2", - "taskReferenceName": "conditional2", - "inputParameters": { - "case": "${workflow.input.param2}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "one": [{ - "name": "junit_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_1", - "description": "junit_task_1", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "junit_task_3", - "taskReferenceName": "t3", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_3", - "description": "junit_task_3", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ], - "two": [{ - "name": "junit_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp3": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_2", - "description": "junit_task_2", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }] - }, - "startDelay": 0 - }], - "three": [{ - "name": "junit_task_3", - "taskReferenceName": "t31", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_3", - "description": "junit_task_3", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }] - }, - "defaultCase": [{ - "name": "junit_task_2", - "taskReferenceName": "t21", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp3": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_2", - "description": "junit_task_2", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }], - "startDelay": 0 - }, - { - "name": "finalcondition", - "taskReferenceName": "tf", - "inputParameters": { - "finalCase": "{workflow.input.finalCase}" - }, - "type": "DECISION", - "caseValueParam": "finalCase", - "decisionCases": { - "notify": [{ - "name": "junit_task_4", - "taskReferenceName": "junit_task_4", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_4", - "description": "junit_task_4", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }] - }, - "startDelay": 0 - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "schemaVersion": 2, - "ownerEmail": "unit@test.com" -} diff --git a/core/src/test/resources/conditional_flow_with_switch.json b/core/src/test/resources/conditional_flow_with_switch.json deleted file mode 100644 index 53d3482bd..000000000 --- a/core/src/test/resources/conditional_flow_with_switch.json +++ /dev/null @@ -1,226 +0,0 @@ -{ - "name": "ConditionalTaskWF", - "description": "ConditionalTaskWF", - "version": 1, - "tasks": [ - { - "name": "conditional", - "taskReferenceName": "conditional", - "inputParameters": { - "case": "${workflow.input.param1}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "nested": [ - { - "name": "conditional2", - "taskReferenceName": "conditional2", - "inputParameters": { - "case": "${workflow.input.param2}" - }, - "type": "SWITCH", - "evaluatorType": "javascript", - "expression": "$.case == 'one' ? 'one' : ($.case == 'two' ? 'two' : ($.case == 'three' ? 'three' : 'other'))", - "decisionCases": { - "one": [ - { - "name": "junit_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_1", - "description": "junit_task_1", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "junit_task_3", - "taskReferenceName": "t3", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_3", - "description": "junit_task_3", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ], - "two": [ - { - "name": "junit_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp3": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_2", - "description": "junit_task_2", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ] - }, - "startDelay": 0 - } - ], - "three": [ - { - "name": "junit_task_3", - "taskReferenceName": "t31", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_3", - "description": "junit_task_3", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ] - }, - "defaultCase": [ - { - "name": "junit_task_2", - "taskReferenceName": "t21", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp3": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_2", - "description": "junit_task_2", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ], - "startDelay": 0 - }, - { - "name": "finalcondition", - "taskReferenceName": "tf", - "inputParameters": { - "finalCase": "{workflow.input.finalCase}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "finalCase", - "decisionCases": { - "notify": [ - { - "name": "junit_task_4", - "taskReferenceName": "junit_task_4", - "type": "SIMPLE", - "startDelay": 0, - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "junit_task_4", - "description": "junit_task_4", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ] - }, - "startDelay": 0 - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "schemaVersion": 2, - "ownerEmail": "unit@test.com" -} diff --git a/core/src/test/resources/payload.json b/core/src/test/resources/payload.json deleted file mode 100644 index c13bc5d2b..000000000 --- a/core/src/test/resources/payload.json +++ /dev/null @@ -1,423 +0,0 @@ -{ - "imageType": "TEST_SAMPLE", - "filteredSourceList": { - "TEST_SAMPLE": [ - { - "sourceId": "1413900_10830", - "url": "file/location/a0bdc4d0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_50241", - "url": "file/location/cd4e00a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-55ee8663-85c2-42d3-aca2-4076707e6d4e", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-14056154-1544-4350-81db-b3751fe44777", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-0b0ae5ea-d5c5-410c-adc9-bf16d2909c2e", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-08869779-614d-417c-bfea-36a3f8f199da", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-e117db45-1c48-45d0-b751-89386eb2d81d", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0221421-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/4a009209-002f-4b58-8b96-cb2198f8ba3c" - }, - { - "sourceId": "f0252161-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/55b56298-5e7a-4949-b919-88c5c9557e8e" - }, - { - "sourceId": "f038d070-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/3c4804f4-e826-436f-90c9-52b8d9266d52" - }, - { - "sourceId": "f04e0621-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/689283a1-1816-48ef-83da-7f9ac874bf45" - }, - { - "sourceId": "f04ddf10-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/586666ae-7321-445a-80b6-323c8c241ecd" - }, - { - "sourceId": "f05950c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/31795cc4-2590-4b20-a617-deaa18301f99" - }, - { - "sourceId": "1413900_46819", - "url": "file/location/c74497a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_11177", - "url": "file/location/a231c730-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48713", - "url": "file/location/ca638ae0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48525", - "url": "file/location/ca0c9140-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_73303", - "url": "file/location/d5943a40-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_55202", - "url": "file/location/d1a4d7a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-61413adf-3c10-4484-b25d-e238df898f45", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-addca397-f050-4339-ae86-9ba8c4e1b0d5", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e4de9810-0f69-4593-8926-01ed82cbebcb", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e16e2074-7af6-4700-ab05-ca41ba9c9ab4", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-341c86f8-57a5-40e1-8842-3eb41dd9f528", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-88c2ea9b-cef7-4120-8043-b92713d8fade", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3f6a731f-3c92-4677-9923-f80b8a6be632", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-1508b871-64de-47ce-8b07-76c5cb3f3e1e", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "generated-1406dce8-7b9c-4956-a7e8-78721c476ce9", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "f0206671-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35ebee36-3072-44c5-abb5-702a5a3b1a91" - }, - { - "sourceId": "f01f5501-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d3a9133d-c681-4910-a769-8195526ae634" - }, - { - "sourceId": "f022b060-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8fc1413d-170e-4644-a554-5e0c596b225c" - }, - { - "sourceId": "f02fa8b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35bed0a2-7def-457b-bded-4f4d7d94f76e" - }, - { - "sourceId": "f031f2a0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a5a2ea1f-8d13-429c-a44d-3057d21f608a" - }, - { - "sourceId": "f0424650-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/1c599ffc-4f10-4c0b-8d9a-ae41c7256113" - }, - { - "sourceId": "f04ec970-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8404a421-e1a6-41cf-af63-a35ccb474457" - }, - { - "sourceId": "1413900_47197", - "url": "file/location/c81b6fa0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-2a63c0c8-62ea-44a4-a33b-f0b3047e8b00", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-b27face7-3589-4209-944a-5153b20c5996", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-144675b3-9321-48d2-8b5b-e19a40d30ef2", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-8cbe821e-b1fb-48ce-beb5-735319af4db6", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-ecc4ea47-9bad-4b91-97c7-35f4ea6fb479", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-c1eb9ed0-8560-4e09-a748-f926edb7cdc2", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-6bed81fd-c777-4c61-8da1-0bb7f7cf0082", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-852e5510-dd5d-4900-a614-854148fcc716", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-f4dedcb7-37c9-4ba9-ab37-64ec9be7c882", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0259691-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/721bc0de-e75f-4386-8b2e-ca84eb653596" - }, - { - "sourceId": "f02b3be1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d2043b17-8ce5-42ee-a5e4-81c68f0c4838" - }, - { - "sourceId": "f02b62f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/63931561-3b5b-4ffe-af47-da2c9de94684" - }, - { - "sourceId": "f0315660-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d99ed629-2885-4e4a-8a1b-22e487b875fa" - }, - { - "sourceId": "f0306c00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6f8e673a-7003-44aa-96b9-e2ed8a4654ff" - }, - { - "sourceId": "f033c760-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/627c00f9-14b3-4057-b6e2-0f962ad0308e" - }, - { - "sourceId": "f03526f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fafabaf9-fe58-4a9a-b555-026521aeb2fe" - }, - { - "sourceId": "f03acc41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6c9fed2c-558a-4db3-8360-659b5e8c46e4" - }, - { - "sourceId": "f0463df1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e9fb83d2-5f14-4442-92b5-67e613f2e35f" - }, - { - "sourceId": "f04fb3d0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e7a0f82f-be8d-4ada-a4b1-13e8165e08be" - }, - { - "sourceId": "f05272f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9aba488a-22b3-4932-85a7-52c461203541" - }, - { - "sourceId": "f0581841-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/457415f6-6d0c-4304-8533-0d5b43fac564" - }, - { - "sourceId": "generated-8fefb48c-6fde-4fd6-8f33-a1f3f3b62105", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-30c61aa5-f5bd-4077-8c32-336b87acbe96", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-d5da37db-d486-46d4-8f7d-1e0710a77eb5", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-77af26fe-9e22-48af-99e3-f63f10fbe6de", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-2e807016-3d11-4b60-bec7-c380a608b67d", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-615d02e9-62c2-43ab-9df7-753b6b8e2c22", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3e1600fd-a626-4ee6-972b-5f0187e96c38", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-1dcb208c-6a58-4334-a60c-6fb54c8a2af5", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f024ac30-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0af2107b-4231-4d23-bef3-4e417ac6c5d3" - }, - { - "sourceId": "f0282ea1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0f592681-fd23-4194-ae43-42f61c664485" - }, - { - "sourceId": "f02c4d50-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ec46b9a3-99af-410a-af7d-726f8854909f" - }, - { - "sourceId": "f02b8a00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aed7e5da-b524-4d41-b264-28ce615ec826" - }, - { - "sourceId": "f02b14d1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/b88c9055-ab0d-4d27-a405-265ba2a15f0c" - }, - { - "sourceId": "f03044f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb8c4df9-d59e-4ac3-880e-4ea94cd880a4" - }, - { - "sourceId": "f034ffe1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/59f3fbe8-b300-4861-9b2f-dac7b15aea7d" - }, - { - "sourceId": "f03c2bd0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/19a06d54-41ed-419d-9947-f10cd5f0d85c" - }, - { - "sourceId": "f03fae41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a9a48a62-7d62-4f67-b281-cc6fdc1e722c" - }, - { - "sourceId": "f0455390-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0aeffc0a-a5ad-46ff-abab-1b3bc6a5840a" - }, - { - "sourceId": "f04b1ff1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9a08aaed-c125-48f7-9d1d-fd11266c2b12" - }, - { - "sourceId": "f04cf4b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/17a6e0f9-aa64-411f-9af7-837c84f7443f" - }, - { - "sourceId": "f0511360-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb633c73-cb33-4806-bc08-049024644856" - }, - { - "sourceId": "f0538460-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a7012248-6769-42da-a6c8-d4b831f6efce" - }, - { - "sourceId": "f058db91-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/bcf71522-6168-48c4-86c9-995bca60ae51" - }, - { - "sourceId": "generated-adf005c4-95c1-4904-9968-09cc19a26bfe", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-c4d367a4-4cdc-412e-af79-09b227f2e3ba", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-48dba018-f884-49db-b87e-67274e244c8f", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "generated-26700b83-4892-420e-8b46-1ee21eba75fb", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-632f3198-c0dc-4348-974f-51684d4e443e", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-86e2dd1d-1aa4-4dbe-b37b-b488f5dd1c70", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f04134e0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ff8f59bf-7757-4d51-a7e4-619f3e8ffaf2" - }, - { - "sourceId": "f04f65b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d66467d1-3ac6-4041-8d15-e722ee07231f" - }, - { - "sourceId": "1413900_15255", - "url": "file/location/a9e20260-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-e953493b-cbe3-4319-885e-00c82089c76c", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-65c54676-3adb-4ef0-b65e-8e2a49533cbf", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "f02ac6b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/21568877-07a5-411f-9715-5e92806c4448" - }, - { - "sourceId": "f02fcfc1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/f3b1f1a2-48d3-475d-a607-2e5a1fe532e7" - }, - { - "sourceId": "f03526f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/84a40c66-d925-4a4a-ba62-8491d26e29e9" - }, - { - "sourceId": "f03e75c1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e84c00e8-a148-46cf-9a0b-431c4c2aeb08" - }, - { - "sourceId": "f0429471-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/178de9fa-7cc8-457a-8fb6-5c080e6163ea" - }, - { - "sourceId": "f047eba0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/18d153aa-e13b-4264-ae03-f3da75eb425b" - }, - { - "sourceId": "f04fdae0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/7c843e53-8d87-47cf-bca5-1a02e7f5e33f" - }, - { - "sourceId": "f0553210-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/26bacd65-9082-4d83-9506-90e5f1ccd16a" - }, - { - "sourceId": "1413900_84904", - "url": "file/location/d8f7b090-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-84adc784-8d7d-4088-ba51-16fde57fbc21", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-9e49c58b-0b33-4daf-a39a-8fc91e302328", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "f02dd3f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8937b328-8f0d-4762-8d1f-7d7bc80c3d2e" - }, - { - "sourceId": "f03240c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aab6e386-4d59-4b40-b257-9aed12a45446" - } - ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/test.json b/core/src/test/resources/test.json deleted file mode 100644 index e2c1a8b5d..000000000 --- a/core/src/test/resources/test.json +++ /dev/null @@ -1,1277 +0,0 @@ -{ - "ownerApp": "cpeworkflowtests", - "createTime": 1505587453961, - "updateTime": 1505588471071, - "status": "RUNNING", - "endTime": 0, - "workflowId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "tasks": [ - { - "taskType": "perf_task_1", - "status": "COMPLETED", - "inputData": { - "mod": "0", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_1", - "retryCount": 0, - "seq": 1, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_1", - "scheduledTime": 1505587453972, - "startTime": 1505587455481, - "endTime": 1505587455539, - "updateTime": 1505587455539, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "3a54e268-0054-4eab-aea2-e54d1b89896c", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "5", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_1", - "taskReferenceName": "perf_task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 1509, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_10", - "status": "COMPLETED", - "inputData": { - "taskToExecute": "perf_task_10" - }, - "referenceTaskName": "perf_task_2", - "retryCount": 0, - "seq": 2, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_10", - "scheduledTime": 1505587455517, - "startTime": 1505587457017, - "endTime": 1505587457075, - "updateTime": 1505587457075, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "3731c3ee-f918-42b7-8bb3-fb016fc0ecae", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "1", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_10", - "taskReferenceName": "perf_task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0 - }, - "queueWaitTime": 1500, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_3", - "status": "COMPLETED", - "inputData": { - "mod": "1", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_3", - "retryCount": 0, - "seq": 3, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_3", - "scheduledTime": 1505587457064, - "startTime": 1505587459498, - "endTime": 1505587459560, - "updateTime": 1505587459560, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "738370d6-596f-4ae5-95bf-ca635c7f10dd", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "6", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_3", - "taskReferenceName": "perf_task_3", - "inputParameters": { - "mod": "${perf_task_2.output.mod}", - "oddEven": "${perf_task_2.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 2434, - "taskStatus": "COMPLETED" - }, - { - "taskType": "HTTP", - "status": "COMPLETED", - "inputData": { - "http_request": { - "uri": "/wfe_perf/workflow/_search?q=status:RUNNING&size=0&beta", - "method": "GET", - "vipAddress": "es_cpe_wfe.us-east-1.cloud.netflix.com" - } - }, - "referenceTaskName": "get_es_1", - "retryCount": 0, - "seq": 4, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "get_from_es", - "scheduledTime": 1505587459547, - "startTime": 1505587459996, - "endTime": 1505587460250, - "updateTime": 1505587460250, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "64b49d62-1dfb-4290-94d4-971b4d033f33", - "callbackAfterSeconds": 0, - "workerId": "i-04c53d07aba5b5e9c", - "outputData": { - "response": { - "headers": { - "Content-Length": [ - "121" - ], - "Content-Type": [ - "application/json; charset=UTF-8" - ] - }, - "reasonPhrase": "OK", - "body": { - "took": 1, - "timed_out": false, - "_shards": { - "total": 6, - "successful": 6, - "failed": 0 - }, - "hits": { - "total": 1, - "max_score": 0.0, - "hits": [] - } - }, - "statusCode": 200 - } - }, - "workflowTask": { - "name": "get_from_es", - "taskReferenceName": "get_es_1", - "type": "HTTP", - "startDelay": 0 - }, - "queueWaitTime": 449, - "taskStatus": "COMPLETED" - }, - { - "taskType": "DECISION", - "status": "COMPLETED", - "inputData": { - "hasChildren": "true", - "case": "0" - }, - "referenceTaskName": "oddEvenDecision", - "retryCount": 0, - "seq": 5, - "correlationId": "1505587453950", - "pollCount": 0, - "taskDefName": "DECISION", - "scheduledTime": 1505587460216, - "startTime": 1505587460241, - "endTime": 1505587460274, - "updateTime": 1505587460274, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "5a596a36-09eb-4a11-a952-01ab5a7c362f", - "callbackAfterSeconds": 0, - "outputData": { - "caseOutput": [ - "0" - ] - }, - "workflowTask": { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${perf_task_4.output.dynamicTasks}", - "input": "${perf_task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0 - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0 - }, - { - "name": "perf_task_5", - "taskReferenceName": "perf_task_5", - "inputParameters": { - "mod": "${perf_task_4.output.mod}", - "oddEven": "${perf_task_4.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_6", - "taskReferenceName": "perf_task_6", - "inputParameters": { - "mod": "${perf_task_5.output.mod}", - "oddEven": "${perf_task_5.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "1": [ - { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "${perf_task_7.output.mod}", - "oddEven": "${perf_task_7.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "${perf_task_8.output.mod}", - "oddEven": "${perf_task_8.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "${perf_task_8.output.mod}" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "${perf_task_15.output.mod}", - "oddEven": "${perf_task_15.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "${perf_task_18.output.mod}", - "oddEven": "${perf_task_18.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "${perf_task_21.output.mod}", - "oddEven": "${perf_task_21.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "${perf_task_24.output.mod}", - "oddEven": "${perf_task_24.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "startDelay": 0 - } - ] - }, - "startDelay": 0 - }, - "queueWaitTime": 25, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_4", - "status": "COMPLETED", - "inputData": { - "mod": "6", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_4", - "retryCount": 0, - "seq": 6, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_4", - "scheduledTime": 1505587460234, - "startTime": 1505587463699, - "endTime": 1505587463718, - "updateTime": 1505587463718, - "startDelayInSeconds": 0, - "retried": false, - "executed": false, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "1bf3da08-9d16-4f8a-98c3-4a6efee0e03a", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "9", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 3465, - "taskStatus": "COMPLETED" - } - ], - "input": { - "mod": "0", - "oddEven": "0", - "task2Name": "perf_task_10" - }, - "workflowType": "performance_test_1", - "version": 1, - "correlationId": "1505587453950", - "schemaVersion": 2, - "taskToDomain": { - "*": "beta" - }, - "startTime": 1505587453961, - "workflowDefinition": { - "createTime": 1477681181098, - "updateTime": 1502738273998, - "name": "performance_test_1", - "description": "performance_test_1", - "version": 1, - "tasks": [ - { - "name": "perf_task_1", - "taskReferenceName": "perf_task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "dyntask", - "taskReferenceName": "perf_task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0 - }, - { - "name": "perf_task_3", - "taskReferenceName": "perf_task_3", - "inputParameters": { - "mod": "${perf_task_2.output.mod}", - "oddEven": "${perf_task_2.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "get_from_es", - "taskReferenceName": "get_es_1", - "type": "HTTP", - "startDelay": 0 - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${perf_task_4.output.dynamicTasks}", - "input": "${perf_task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0 - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0 - }, - { - "name": "perf_task_5", - "taskReferenceName": "perf_task_5", - "inputParameters": { - "mod": "${perf_task_4.output.mod}", - "oddEven": "${perf_task_4.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_6", - "taskReferenceName": "perf_task_6", - "inputParameters": { - "mod": "${perf_task_5.output.mod}", - "oddEven": "${perf_task_5.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "1": [ - { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "${perf_task_7.output.mod}", - "oddEven": "${perf_task_7.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "${perf_task_8.output.mod}", - "oddEven": "${perf_task_8.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "${perf_task_8.output.mod}" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "${perf_task_15.output.mod}", - "oddEven": "${perf_task_15.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "${perf_task_18.output.mod}", - "oddEven": "${perf_task_18.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "${perf_task_21.output.mod}", - "oddEven": "${perf_task_21.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "${perf_task_24.output.mod}", - "oddEven": "${perf_task_24.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "startDelay": 0 - } - ] - }, - "startDelay": 0 - }, - { - "name": "perf_task_28", - "taskReferenceName": "perf_task_28", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_29", - "taskReferenceName": "perf_task_29", - "inputParameters": { - "mod": "${perf_task_28.output.mod}", - "oddEven": "${perf_task_28.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_30", - "taskReferenceName": "perf_task_30", - "inputParameters": { - "mod": "${perf_task_29.output.mod}", - "oddEven": "${perf_task_29.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "schemaVersion": 2 - } -} diff --git a/dependencies.gradle b/dependencies.gradle deleted file mode 100644 index 7f49e20bf..000000000 --- a/dependencies.gradle +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -/* - * Common place to define all the version dependencies - */ -ext { - revActivation = '2.0.0' - revAwaitility = '3.1.6' - revAwsSdk = '1.11.86' - revBval = '2.0.5' - revCassandra = '3.10.2' - revCassandraUnit = '3.11.2.0' - revCommonsIo = '2.7' - revDynoQueues = '2.0.20' - revElasticSearch6 = '6.8.12' - revEmbeddedRedis = '0.6' - revEurekaClient = '1.10.10' - revGroovy = '2.5.13' - revGrpc = '1.+' - revGuava = '30.0-jre' - revHamcrestAllMatchers = '1.8' - revHealth = '1.1.+' - revJAXB = '2.3.3' - revJAXRS = '2.1.1' - revJedis = '3.3.0' - revJersey = '1.19.4' - revJerseyCommon = '2.22.2' - revJsonPath = '2.4.0' - revJq = '0.0.13' - revJsr311Api = '1.1.1' - revMockServerClient = '5.12.0' - revOpenapi = '1.6.+' - revPowerMock = '2.0.9' - revProtoBuf = '3.13.0' - revProtogenAnnotations = '1.0.0' - revProtogenCodegen = '1.4.0' - revRarefiedRedis = '0.0.17' - revRedisson = '3.13.3' - revRxJava = '1.2.2' - revSpectator = '0.122.0' - revSpock = '1.3-groovy-2.5' - revSpotifyCompletableFutures = '0.3.3' - revTestContainer = '1.15.3' -} diff --git a/dependencies.lock b/dependencies.lock deleted file mode 100644 index 181a46d98..000000000 --- a/dependencies.lock +++ /dev/null @@ -1,103 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "jacocoAgent": { - "org.jacoco:org.jacoco.agent": { - "locked": "0.8.7" - } - }, - "jacocoAnt": { - "org.jacoco:org.jacoco.ant": { - "locked": "0.8.7" - } - }, - "runtimeClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/docker/README.md b/docker/README.md deleted file mode 100644 index 538ebae35..000000000 --- a/docker/README.md +++ /dev/null @@ -1 +0,0 @@ -[Docker Instructions](/docs/docs/gettingstarted/docker.md) \ No newline at end of file diff --git a/docker/ci/Dockerfile b/docker/ci/Dockerfile deleted file mode 100644 index 19a0287cf..000000000 --- a/docker/ci/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM openjdk:11-jdk - -WORKDIR /workspace/conductor -COPY . /workspace/conductor - -RUN ./gradlew clean build diff --git a/docker/docker-compose-dynomite.yaml b/docker/docker-compose-dynomite.yaml deleted file mode 100644 index 99e15af93..000000000 --- a/docker/docker-compose-dynomite.yaml +++ /dev/null @@ -1,31 +0,0 @@ -version: '2.3' - -services: - conductor-server: - environment: - - CONFIG_PROP=config.properties - links: - - dynomite:dyno1 - depends_on: - dynomite: - condition: service_healthy - - dynomite: - image: v1r3n/dynomite - networks: - - internal - ports: - - 8102:8102 - healthcheck: - test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/8102' - interval: 5s - timeout: 5s - retries: 12 - logging: - driver: "json-file" - options: - max-size: "1k" - max-file: "3" - -networks: - internal: diff --git a/docker/docker-compose-postgres.yaml b/docker/docker-compose-postgres.yaml deleted file mode 100644 index 74bcf0fa3..000000000 --- a/docker/docker-compose-postgres.yaml +++ /dev/null @@ -1,40 +0,0 @@ -version: '2.3' - -services: - conductor-server: - environment: - - CONFIG_PROP=config-postgres.properties - links: - - postgres:postgresdb - depends_on: - postgres: - condition: service_healthy - - postgres: - image: postgres - environment: - - POSTGRES_USER=conductor - - POSTGRES_PASSWORD=conductor - volumes: - - pgdata-conductor:/var/lib/postgresql/data - networks: - - internal - ports: - - 5432:5432 - healthcheck: - test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/5432' - interval: 5s - timeout: 5s - retries: 12 - logging: - driver: "json-file" - options: - max-size: "1k" - max-file: "3" - -volumes: - pgdata-conductor: - driver: local - -networks: - internal: diff --git a/docker/docker-compose-prometheus.yaml b/docker/docker-compose-prometheus.yaml deleted file mode 100644 index 10f8d80e4..000000000 --- a/docker/docker-compose-prometheus.yaml +++ /dev/null @@ -1,20 +0,0 @@ -version: '3' - -services: - - prometheus: - image: prom/prometheus - volumes: - - ./prometheus/:/etc/prometheus/ - command: - - '--config.file=/etc/prometheus/prometheus.yml' - ports: - - 9090:9090 - external_links: - - conductor-server:conductor-server - networks: - - internal - restart: always - -networks: - internal: \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml deleted file mode 100644 index 8b665b09f..000000000 --- a/docker/docker-compose.yaml +++ /dev/null @@ -1,76 +0,0 @@ -version: '2.3' - -services: - conductor-server: - environment: - - CONFIG_PROP=config-local.properties - image: conductor:server - build: - context: ../ - dockerfile: docker/server/Dockerfile - networks: - - internal - ports: - - 8080:8080 - healthcheck: - test: ["CMD", "curl","-I" ,"-XGET", "http://localhost:8080/health"] - interval: 60s - timeout: 30s - retries: 12 - links: - - elasticsearch:es - depends_on: - elasticsearch: - condition: service_healthy - logging: - driver: "json-file" - options: - max-size: "1k" - max-file: "3" - - conductor-ui: - environment: - - WF_SERVER=http://conductor-server:8080 - image: conductor:ui - build: - context: ../ - dockerfile: docker/ui/Dockerfile - networks: - - internal - ports: - - 5000:5000 - links: - - conductor-server - stdin_open: true - - elasticsearch: - image: elasticsearch:6.8.15 - environment: - - "ES_JAVA_OPTS=-Xms512m -Xmx1024m" - - transport.host=0.0.0.0 - - discovery.type=single-node - - xpack.security.enabled=false - volumes: - - esdata-conductor:/usr/share/elasticsearch/data - networks: - - internal - ports: - - 9200:9200 - - 9300:9300 - healthcheck: - test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/9300' - interval: 5s - timeout: 5s - retries: 12 - logging: - driver: "json-file" - options: - max-size: "1k" - max-file: "3" - -volumes: - esdata-conductor: - driver: local - -networks: - internal: diff --git a/docker/grpc/Makefile b/docker/grpc/Makefile deleted file mode 100644 index e111f0c63..000000000 --- a/docker/grpc/Makefile +++ /dev/null @@ -1,18 +0,0 @@ - -clean-db: - docker volume rm grpc_conductor_mysql - -compose-build: - docker-compose build - -dependencies-up: - docker-compose up -d mysql elasticsearch - -dependencies-down: - docker-compose down - -stack-up: - docker-compose up - -stack-down: - docker-compose down diff --git a/docker/grpc/docker-compose.yaml b/docker/grpc/docker-compose.yaml deleted file mode 100644 index a212bc889..000000000 --- a/docker/grpc/docker-compose.yaml +++ /dev/null @@ -1,87 +0,0 @@ -version: '2.3' - -services: - - conductor-server: - environment: - - CONFIG_PROP=config-mysql-grpc.properties - image: conductor:server - build: - context: ../../ - dockerfile: docker/server/Dockerfile - networks: - - internal - ports: - - 8080:8080 - - 8090:8090 - links: - - elasticsearch:es - depends_on: - elasticsearch: - condition: service_healthy - mysql: - condition: service_healthy - - conductor-ui: - environment: - - WF_SERVER=http://conductor-server:8080 - image: conductor:ui - build: - context: ../../ - dockerfile: docker/ui/Dockerfile - networks: - - internal - ports: - - 5000:5000 - depends_on: - - conductor-server - - mysql: - image: mysql:5.7 - environment: - MYSQL_ROOT_PASSWORD: 12345 - MYSQL_DATABASE: conductor - MYSQL_USER: conductor - MYSQL_PASSWORD: conductor - volumes: - - type: volume - source: conductor_mysql - target: /var/lib/mysql - networks: - - internal - ports: - - 3306:3306 - healthcheck: - test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/3306' - interval: 5s - timeout: 5s - retries: 12 - - elasticsearch: - image: elasticsearch:6.8.15 - environment: - - "ES_JAVA_OPTS=-Xms512m -Xmx1024m" - - transport.host=0.0.0.0 - - discovery.type=single-node - - xpack.security.enabled=false - networks: - - internal - ports: - - 9200:9200 - - 9300:9300 - healthcheck: - test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/9300' - interval: 5s - timeout: 5s - retries: 12 - logging: - driver: "json-file" - options: - max-size: "1k" - max-file: "3" - -volumes: - conductor_mysql: - -networks: - internal: diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile deleted file mode 100644 index 15a6d6eac..000000000 --- a/docker/server/Dockerfile +++ /dev/null @@ -1,40 +0,0 @@ -# -# conductor:server - Netflix conductor server -# - -# =========================================================================================================== -# 0. Builder stage -# =========================================================================================================== -FROM openjdk:11-jdk AS builder - -LABEL maintainer="Netflix OSS " - -# Copy the project directly onto the image -COPY . /conductor -WORKDIR /conductor - -# Build the server on run -RUN ./gradlew build -x test --stacktrace - -# =========================================================================================================== -# 1. Bin stage -# =========================================================================================================== -FROM openjdk:11-jre - -LABEL maintainer="Netflix OSS " - -# Make app folders -RUN mkdir -p /app/config /app/logs /app/libs - -# Copy the compiled output to new image -COPY --from=builder /conductor/docker/server/bin /app -COPY --from=builder /conductor/docker/server/config /app/config -COPY --from=builder /conductor/server/build/libs/conductor-server-*-boot.jar /app/libs - -# Copy the files for the server into the app folders -RUN chmod +x /app/startup.sh - -HEALTHCHECK --interval=60s --timeout=30s --retries=10 CMD curl -I -XGET http://localhost:8080/health || exit 1 - -CMD [ "/app/startup.sh" ] -ENTRYPOINT [ "/bin/sh"] diff --git a/docker/server/README.md b/docker/server/README.md deleted file mode 100644 index 8baafbfc0..000000000 --- a/docker/server/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Docker -## Conductor server -This Dockerfile create the conductor:server image - -## Building the image - -Run the following commands from the project root. - -`docker build -f docker/server/Dockerfile -t conductor:server .` - -## Running the conductor server - - Standalone server (interal DB): `docker run -p 8080:8080 -d -t conductor:server` - - Server (external DB required): `docker run -p 8080:8080 -d -t -e "CONFIG_PROP=config.properties" conductor:server` diff --git a/docker/server/bin/startup.sh b/docker/server/bin/startup.sh deleted file mode 100755 index 9d1b98cba..000000000 --- a/docker/server/bin/startup.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# Copyright 2021 Netflix, Inc. -#

    -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -#

    -# http://www.apache.org/licenses/LICENSE-2.0 -#

    -# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -# - -# startup.sh - startup script for the server docker image - -echo "Starting Conductor server" - -# Start the server -cd /app/libs -echo "Property file: $CONFIG_PROP" -echo $CONFIG_PROP -export config_file= - -if [ -z "$CONFIG_PROP" ]; - then - echo "Using an in-memory instance of conductor"; - export config_file=/app/config/config-local.properties - else - echo "Using '$CONFIG_PROP'"; - export config_file=/app/config/$CONFIG_PROP -fi - -echo "Using java options config: $JAVA_OPTS" - -java ${JAVA_OPTS} -jar -DCONDUCTOR_CONFIG_FILE=$config_file conductor-server-*-boot.jar 2>&1 | tee -a /app/logs/server.log diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties deleted file mode 100755 index 6e5071ea7..000000000 --- a/docker/server/config/config-local.properties +++ /dev/null @@ -1,36 +0,0 @@ -# Servers. -conductor.grpc-server.enabled=false - -# Database persistence model. -conductor.db.type=memory - -# Dynomite Cluster details. -# format is host:port:rack separated by semicolon -conductor.redis.hosts=dyno1:8102:us-east-1c - -# Namespace for the keys stored in Dynomite/Redis -conductor.redis.workflowNamespacePrefix=conductor - -# Namespace prefix for the dyno queues -conductor.redis.queueNamespacePrefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# By default with dynomite, we want the repairservice enabled -conductor.app.workflowRepairServiceEnabled=true - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -conductor.redis.queuesNonQuorumPort=22122 - -# Elastic search instance indexing is disabled. -conductor.indexing.enabled=false -conductor.elasticsearch.url=http://es:9200 -conductor.elasticsearch.indexReplicasCount=0 - -# Load sample kitchen sink workflow -loadSample=true - -conductor.elasticsearch.clusterHealthColor=yellow diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties deleted file mode 100755 index 2582b4d89..000000000 --- a/docker/server/config/config-mysql-grpc.properties +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright 2021 Netflix, Inc. -#

    -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -#

    -# http://www.apache.org/licenses/LICENSE-2.0 -#

    -# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -# - -# Servers. -conductor.grpc-server.enabled=true - -# Database persistence model. -conductor.db.type=mysql - -spring.datasource.url=jdbc:mysql://mysql:3306/conductor -spring.datasource.username=conductor -spring.datasource.password=conductor - -# Hikari pool sizes are -1 by default and prevent startup -spring.datasource.hikari.maximum-pool-size=10 -spring.datasource.hikari.minimum-idle=2 - -# Elastic search instance indexing is enabled. -conductor.indexing.enabled=true - -# Transport address to elasticsearch -conductor.elasticsearch.url=http://es:9200 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties deleted file mode 100755 index 760039830..000000000 --- a/docker/server/config/config-mysql.properties +++ /dev/null @@ -1,25 +0,0 @@ -# Servers. -conductor.grpc-server.enabled=false - -# Database persistence type. -conductor.db.type=mysql - -spring.datasource.url=jdbc:mysql://mysql:3306/conductor -spring.datasource.username=conductor -spring.datasource.password=conductor - -# Hikari pool sizes are -1 by default and prevent startup -spring.datasource.hikari.maximum-pool-size=10 -spring.datasource.hikari.minimum-idle=2 - -# Elastic search instance indexing is enabled. -conductor.indexing.enabled=true - -# Transport address to elasticsearch -conductor.elasticsearch.url=http://es:9200 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/server/config/config-postgres.properties b/docker/server/config/config-postgres.properties deleted file mode 100755 index 2a27a5cac..000000000 --- a/docker/server/config/config-postgres.properties +++ /dev/null @@ -1,25 +0,0 @@ -# Servers. -conductor.grpc-server.enabled=false - -# Database persistence type. -conductor.db.type=postgres - -spring.datasource.url=jdbc:postgresql://postgres:5432/conductor -spring.datasource.username=conductor -spring.datasource.password=conductor - -# Hikari pool sizes are -1 by default and prevent startup -spring.datasource.hikari.maximum-pool-size=10 -spring.datasource.hikari.minimum-idle=2 - -# Elastic search instance indexing is enabled. -conductor.indexing.enabled=true - -# Transport address to elasticsearch -conductor.elasticsearch.url=http://es:9200 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties deleted file mode 100755 index 55124c78c..000000000 --- a/docker/server/config/config.properties +++ /dev/null @@ -1,54 +0,0 @@ -# Servers. -conductor.grpc-server.enabled=false - -# Database persistence type. -conductor.db.type=dynomite - -# Dynomite Cluster details. -# format is host:port:rack separated by semicolon -conductor.redis.hosts=dyno1:8102:us-east-1c - -# Dynomite cluster name -conductor.redis.clusterName=dyno1 - -# Namespace for the keys stored in Dynomite/Redis -conductor.redis.workflowNamespacePrefix=conductor - -# Namespace prefix for the dyno queues -conductor.redis.queueNamespacePrefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# By default with dynomite, we want the repairservice enabled -conductor.app.workflowRepairServiceEnabled=true - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -conductor.redis.queuesNonQuorumPort=22122 - -# Elastic search instance indexing is enabled. -conductor.indexing.enabled=true - -# Transport address to elasticsearch -conductor.elasticsearch.url=http://es:9200 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor -#conductor.event-queues.amqp.queueType=classic -#conductor.event-queues.amqp.sequentialMsgProcessing=true - -# Additional modules for metrics collection exposed via logger (optional) -# conductor.metrics-logger.enabled=true -# conductor.metrics-logger.reportPeriodSeconds=15 - -# Additional modules for metrics collection exposed to Prometheus (optional) -# conductor.metrics-prometheus.enabled=true -# management.endpoints.web.exposure.include=prometheus - -# To enable Workflow/Task Summary Input/Output JSON Serialization, use the following: -# conductor.app.summary-input-output-json-serialization.enabled=true - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/server/config/log4j-file-appender.properties b/docker/server/config/log4j-file-appender.properties deleted file mode 100644 index 99405bdfd..000000000 --- a/docker/server/config/log4j-file-appender.properties +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright 2020 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -log4j.rootLogger=INFO,console,file - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - -log4j.appender.file=org.apache.log4j.RollingFileAppender -log4j.appender.file.File=/app/logs/conductor.log -log4j.appender.file.MaxFileSize=10MB -log4j.appender.file.MaxBackupIndex=10 -log4j.appender.file.layout=org.apache.log4j.PatternLayout -log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - -# Dedicated file appender for metrics -log4j.appender.fileMetrics=org.apache.log4j.RollingFileAppender -log4j.appender.fileMetrics.File=/app/logs/metrics.log -log4j.appender.fileMetrics.MaxFileSize=10MB -log4j.appender.fileMetrics.MaxBackupIndex=10 -log4j.appender.fileMetrics.layout=org.apache.log4j.PatternLayout -log4j.appender.fileMetrics.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - -log4j.logger.ConductorMetrics=INFO,console,fileMetrics -log4j.additivity.ConductorMetrics=false - diff --git a/docker/server/config/log4j.properties b/docker/server/config/log4j.properties deleted file mode 100644 index bb249b00d..000000000 --- a/docker/server/config/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set root logger level to DEBUG and its only appender to A1. -log4j.rootLogger=INFO, A1 - -# A1 is set to be a ConsoleAppender. -log4j.appender.A1=org.apache.log4j.ConsoleAppender - -# A1 uses PatternLayout. -log4j.appender.A1.layout=org.apache.log4j.PatternLayout -log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n diff --git a/docker/serverAndUI/Dockerfile b/docker/serverAndUI/Dockerfile deleted file mode 100644 index 6d3cbbdbd..000000000 --- a/docker/serverAndUI/Dockerfile +++ /dev/null @@ -1,63 +0,0 @@ -# -# conductor:serverAndUI - Combined Netflix conductor server & UI -# -# =========================================================================================================== -# 0. Builder stage -# =========================================================================================================== -FROM openjdk:11-jdk AS builder -LABEL maintainer="Netflix OSS " - -# Install Node -SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - \ - && curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \ - && echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \ - && apt-get update -qq \ - && apt-get install -qq --no-install-recommends \ - build-essential \ - nodejs \ - yarn \ - && apt-get upgrade -qq \ - && rm -rf /var/lib/apt/lists/* - -# Copy the project onto the builder image -COPY . /conductor - -# Build the server -WORKDIR /conductor -RUN ./gradlew build -x test - -# Build the client -WORKDIR /conductor/ui -RUN yarn install && yarn build - -# =========================================================================================================== -# 1. Bin stage -# =========================================================================================================== - -FROM nginx:alpine -RUN apk add openjdk11-jre - -LABEL maintainer="Netflix OSS " - -# Make app folders -RUN mkdir -p /app/config /app/logs /app/libs - -# Copy the compiled output to new image -COPY --from=builder /conductor/docker/server/bin /app -COPY --from=builder /conductor/docker/server/config /app/config -COPY --from=builder /conductor/server/build/libs/conductor-server-*-boot.jar /app/libs - -# Copy compiled UI assets to nginx www directory -WORKDIR /usr/share/nginx/html -RUN rm -rf ./* -COPY --from=builder /conductor/ui/build . -COPY --from=builder /conductor/docker/serverAndUI/nginx/nginx.conf /etc/nginx/conf.d/default.conf - -# Copy the files for the server into the app folders -RUN chmod +x /app/startup.sh - -HEALTHCHECK --interval=60s --timeout=30s --retries=10 CMD curl -I -XGET http://localhost:8080/health || exit 1 - -CMD [ "/app/startup.sh" ] -ENTRYPOINT [ "/bin/sh"] diff --git a/docker/serverAndUI/README.md b/docker/serverAndUI/README.md deleted file mode 100644 index 275d74add..000000000 --- a/docker/serverAndUI/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Docker -## Conductor server and UI -This Dockerfile create the conductor:serverAndUI image - -## Building the image -`docker build -t conductor:serverAndUI .` - -## Running the conductor server - - Standalone server (interal DB): `docker run -p 8080:8080 -p 80:5000 -d -t conductor:serverAndUI` - - Server (external DB required): `docker run -p 8080:8080 -p 80:5000 -d -t -e "CONFIG_PROP=config.properties" conductor:serverAndUI` diff --git a/docker/serverAndUI/bin/startup.sh b/docker/serverAndUI/bin/startup.sh deleted file mode 100755 index 0070cd0b9..000000000 --- a/docker/serverAndUI/bin/startup.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# -# Copyright 2021 Netflix, Inc. -#

    -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -#

    -# http://www.apache.org/licenses/LICENSE-2.0 -#

    -# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -# - -echo "Starting Conductor Server and UI" -echo "Running Nginx in background" -# Start nginx as daemon -nginx - -# Start the server -cd /app/libs -echo "Property file: $CONFIG_PROP" -echo $CONFIG_PROP -export config_file= - -if [ -z "$CONFIG_PROP" ]; - then - echo "Using an in-memory instance of conductor"; - export config_file=/app/config/config-local.properties - else - echo "Using '$CONFIG_PROP'"; - export config_file=/app/config/$CONFIG_PROP -fi - -nohup java -jar -DCONDUCTOR_CONFIG_FILE=$config_file conductor-server-*-boot.jar 1>&2 > /app/logs/server.log diff --git a/docker/serverAndUI/config/config-local.properties b/docker/serverAndUI/config/config-local.properties deleted file mode 100755 index d725130e8..000000000 --- a/docker/serverAndUI/config/config-local.properties +++ /dev/null @@ -1,33 +0,0 @@ -# Database persistence type. -conductor.db.type=memory - -# Dynomite Cluster details. -# format is host:port:rack separated by semicolon -conductor.redis.hosts=dyno1:8102:us-east-1c - -# Namespace for the keys stored in Dynomite/Redis -conductor.redis.workflowNamespacePrefix=conductor - -# Namespace prefix for the dyno queues -conductor.redis.queueNamespacePrefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# By default with dynomite, we want the repairservice enabled -conductor.app.workflowRepairServiceEnabled=true - - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -conductor.redis.queuesNonQuorumPort=22122 - -# Transport address to elasticsearch -conductor.elasticsearch.url=localhost:9300 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/serverAndUI/config/config.properties b/docker/serverAndUI/config/config.properties deleted file mode 100755 index c596c6f10..000000000 --- a/docker/serverAndUI/config/config.properties +++ /dev/null @@ -1,35 +0,0 @@ -# Database persistence model. -conductor.db.type=dynomite - -# Dynomite Cluster details. -# format is host:port:rack separated by semicolon -conductor.redis.hosts=dyno1:8102:us-east-1c - -# Dynomite cluster name -conductor.redis.clusterName=dyno1 - -# Namespace for the keys stored in Dynomite/Redis -conductor.redis.workflowNamespacePrefix=conductor - -# Namespace prefix for the dyno queues -conductor.redis.queueNamespacePrefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# By default with dynomite, we want the repairservice enabled -conductor.app.workflowRepairServiceEnabled=true - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -conductor.redis.queuesNonQuorumPort=22122 - -# Transport address to elasticsearch -conductor.elasticsearch.url=es:9300 - -# Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/serverAndUI/nginx/nginx.conf b/docker/serverAndUI/nginx/nginx.conf deleted file mode 100644 index 74e0ec2e6..000000000 --- a/docker/serverAndUI/nginx/nginx.conf +++ /dev/null @@ -1,20 +0,0 @@ -server { - listen 5000; - server_name conductor; - location / { - # This would be the directory where your React app's static files are stored at - root /usr/share/nginx/html; - try_files $uri /index.html; - } - - location /api { - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-NginX-Proxy true; - proxy_pass http://localhost:8080/api; - proxy_ssl_session_reuse off; - proxy_set_header Host $http_host; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } -} \ No newline at end of file diff --git a/docker/ui/Dockerfile b/docker/ui/Dockerfile deleted file mode 100644 index 02a87f498..000000000 --- a/docker/ui/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# -# conductor:ui - Netflix Conductor UI -# -FROM node:14-alpine -LABEL maintainer="Netflix OSS " - -# Install the required packages for the node build -# to run on alpine -RUN apk update && apk add --no-cache python3 py3-pip make g++ - -# A directory within the virtualized Docker environment -# Becomes more relevant when using Docker Compose later -WORKDIR /usr/src/app - -# Copies package.json to Docker environment in a separate layer as a performance optimization -COPY ./ui/package.json ./ - -# Installs all node packages. Cached unless package.json changes -RUN yarn install - -# Copies everything else over to Docker environment -# node_modules excluded in .dockerignore. -COPY ./ui . - -CMD [ "yarn", "start" ] diff --git a/docker/ui/README.md b/docker/ui/README.md deleted file mode 100644 index 960340e22..000000000 --- a/docker/ui/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Docker -## Conductor UI -This Dockerfile create the conductor:ui image - -## Building the image - -Run the following commands from the project root. - -`docker build -f docker/ui/Dockerfile -t conductor:ui .` - -## Running the conductor server - - With localhost conductor server: `docker run -p 5000:5000 -d -t conductor:ui` - - With external conductor server: `docker run -p 5000:5000 -d -t -e "WF_SERVER=http://conductor-server:8080" conductor:ui` diff --git a/docs/docs/apispec.md b/docs/docs/apispec.md deleted file mode 100644 index 94df57fca..000000000 --- a/docs/docs/apispec.md +++ /dev/null @@ -1,173 +0,0 @@ -# API Specification - -## Task & Workflow Metadata -| Endpoint | Description | Input | -|------------------------------------------|:---------------------------------|-------------------------------------------------------------| -| `GET /metadata/taskdefs` | Get all the task definitions | n/a | -| `GET /metadata/taskdefs/{taskType}` | Retrieve task definition | Task Name | -| `POST /metadata/taskdefs` | Register new task definitions | List of [Task Definitions](/configuration/taskdef.html) | -| `PUT /metadata/taskdefs` | Update a task definition | A [Task Definition](/configuration/taskdef.html) | -| `DELETE /metadata/taskdefs/{taskType}` | Delete a task definition | Task Name | -||| -| `GET /metadata/workflow` | Get all the workflow definitions | n/a | -| `POST /metadata/workflow` | Register new workflow | [Workflow Definition](/configuration/workflowdef.html) | -| `PUT /metadata/workflow` | Register/Update new workflows | List of [Workflow Definition](/configuration/workflowdef.html) | -| `GET /metadata/workflow/{name}?version=` | Get the workflow definitions | workflow name, version (optional) | -||| - -## Start A Workflow -### With Input only -See [Start Workflow Request](/gettingstarted/startworkflow.html). - -#### Output -Id of the workflow (GUID) - -### With Input and Task Domains -``` -POST /workflow -{ - //JSON payload for Start workflow request -} -``` -#### Start workflow request -JSON for start workflow request -``` -{ - "name": "myWorkflow", // Name of the workflow - "version": 1, // Version - “correlationId”: “corr1”, // correlation Id - "priority": 1, // Priority - "input": { - // Input map. - }, - "taskToDomain": { - // Task to domain map - } -} -``` - -#### Output -Id of the workflow (GUID) - - -## Retrieve Workflows -| Endpoint | Description | -|-----------------------------------------------------------------------------|-----------------------------------------------| -| `GET /workflow/{workflowId}?includeTasks=true | false` |Get Workflow State by workflow Id. If includeTasks is set, then also includes all the tasks executed and scheduled.| -| `GET /workflow/running/{name}` | Get all the running workflows of a given type | -| `GET /workflow/running/{name}/correlated/{correlationId}?includeClosed=true | false&includeTasks=true |false`|Get all the running workflows filtered by correlation Id. If includeClosed is set, also includes workflows that have completed running.| -| `GET /workflow/search` | Search for workflows. See Below. | - - -## Search for Workflows -Conductor uses Elasticsearch for indexing workflow execution and is used by search APIs. - -`GET /workflow/search?start=&size=&sort=&freeText=&query=` - -| Parameter | Description | -|-----------|------------------------------------------------------------------------------------------------------------------| -| start | Page number. Defaults to 0 | -| size | Number of results to return | -| sort | Sorting. Format is: `ASC:` or `DESC:` to sort in ascending or descending order by a field | -| freeText | Elasticsearch supported query. e.g. workflowType:"name_of_workflow" | -| query | SQL like where clause. e.g. workflowType = 'name_of_workflow'. Optional if freeText is provided. | - -### Output -Search result as described below: -```json -{ - "totalHits": 0, - "results": [ - { - "workflowType": "string", - "version": 0, - "workflowId": "string", - "correlationId": "string", - "startTime": "string", - "updateTime": "string", - "endTime": "string", - "status": "RUNNING", - "input": "string", - "output": "string", - "reasonForIncompletion": "string", - "executionTime": 0, - "event": "string" - } - ] -} -``` - -## Manage Workflows -| Endpoint | Description | -|-----------------------------------------------------------|----------------------------------------------------------------------------------------------------| -| `PUT /workflow/{workflowId}/pause` | Pause. No further tasks will be scheduled until resumed. Currently running tasks are not paused. | -| `PUT /workflow/{workflowId}/resume` | Resume normal operations after a pause. | -| `POST /workflow/{workflowId}/rerun` | See Below. | -| `POST /workflow/{workflowId}/restart` | Restart workflow execution from the start. Current execution history is wiped out. | -| `POST /workflow/{workflowId}/retry` | Retry the last failed task. | -| `PUT /workflow/{workflowId}/skiptask/{taskReferenceName}` | See below. | -| `DELETE /workflow/{workflowId}` | Terminates the running workflow. | -| `DELETE /workflow/{workflowId}/remove` | Deletes the workflow from system. Use with caution. | - -### Rerun -Re-runs a completed workflow from a specific task. - -`POST /workflow/{workflowId}/rerun` - -```json -{ - "reRunFromWorkflowId": "string", - "workflowInput": {}, - "reRunFromTaskId": "string", - "taskInput": {} -} -``` - -###Skip Task - -Skips a task execution (specified as `taskReferenceName` parameter) in a running workflow and continues forward. -Optionally updating task's input and output as specified in the payload. -`PUT /workflow/{workflowId}/skiptask/{taskReferenceName}?workflowId=&taskReferenceName=` -```json -{ - "taskInput": {}, - "taskOutput": {} -} -``` - -## Manage Tasks -| Endpoint | Description | -|-------------------------------------------------------|-------------------------------------------------------| -| `GET /tasks/{taskId}` | Get task details. | -| `GET /tasks/queue/all` | List the pending task sizes. | -| `GET /tasks/queue/all/verbose` | Same as above, includes the size per shard | -| `GET /tasks/queue/sizes?taskType=&taskType=&taskType` | Return the size of pending tasks for given task types | -||| - -## Polling, Ack and Update Task -These are critical endpoints used to poll for task, send ack (after polling) and finally updating the task result by worker. - - -| Endpoint | Description | -|---------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `GET /tasks/poll/{taskType}?workerid=&domain=` | Poll for a task. `workerid` identifies the worker that polled for the job and `domain` allows the poller to poll for a task in a specific domain | -| `GET /tasks/poll/batch/{taskType}?count=&timeout=&workerid=&domain` | Poll for a task in a batch specified by `count`. This is a long poll and the connection will wait until `timeout` or if there is at-least 1 item available, whichever comes first.`workerid` identifies the worker that polled for the job and `domain` allows the poller to poll for a task in a specific domain | -| `POST /tasks` | Update the result of task execution. See the schema below. | - - -### Schema for updating Task Result -```json -{ - "workflowInstanceId": "Workflow Instance Id", - "taskId": "ID of the task to be updated", - "reasonForIncompletion" : "If failed, reason for failure", - "callbackAfterSeconds": 0, - "status": "IN_PROGRESS|FAILED|COMPLETED", - "outputData": { - //JSON document representing Task execution output - } - -} -``` -!!!Info "Acknowledging tasks after poll" - If the worker fails to ack the task after polling, the task is re-queued and put back in queue and is made available during subsequent poll. diff --git a/docs/docs/architecture/overview.md b/docs/docs/architecture/overview.md deleted file mode 100644 index 81bd00e38..000000000 --- a/docs/docs/architecture/overview.md +++ /dev/null @@ -1,21 +0,0 @@ -# Overview - -![Architecture diagram](/img/conductor-architecture.png) - -The API and storage layers are pluggable and provide ability to work with different backends and queue service providers. - -## Runtime Model -Conductor follows RPC based communication model where workers are running on a separate machine from the server. Workers communicate with server over HTTP based endpoints and employs polling model for managing work queues. - -![Runtime Model of Conductor](/img/overview.png) - -**Notes** - -* Workers are remote systems that communicate over HTTP with the conductor servers. -* Task Queues are used to schedule tasks for workers. We use [dyno-queues][1] internally but it can easily be swapped with SQS or similar pub-sub mechanism. -* conductor-redis-persistence module uses [Dynomite][2] for storing the state and metadata along with [Elasticsearch][3] for indexing backend. -* See section under extending backend for implementing support for different databases for storage and indexing. - -[1]: https://github.com/Netflix/dyno-queues -[2]: https://github.com/Netflix/dynomite -[3]: https://www.elastic.co diff --git a/docs/docs/architecture/tasklifecycle.md b/docs/docs/architecture/tasklifecycle.md deleted file mode 100644 index 57f99512e..000000000 --- a/docs/docs/architecture/tasklifecycle.md +++ /dev/null @@ -1,47 +0,0 @@ -## Task state transitions -The figure below depicts the state transitions that a task can go through within a workflow execution. - -![Task_States](/img/task_states.png) - -## Retries and Failure Scenarios - -### Task failure and retries -Retries for failed task executions of each task can be configured independently. retryCount, retryDelaySeconds and retryLogic can be used to configure the retry mechanism. - -![Task Failure](/img/TaskFailure.png) - -1. Worker (W1) polls for task T1 from the Conductor server and receives the task. -2. Upon processing this task, the worker determines that the task execution is a failure and reports this to the server with FAILED status after 10 seconds. -3. The server will persist this FAILED execution of T1. A new execution of task T1 will be created and scheduled to be polled. This task will be available to be polled after 5 (retryDelaySeconds) seconds. - - -### Timeout seconds -Timeout is the maximum amount of time that the task must reach a terminal state in, else the task will be marked as TIMED_OUT. - -![Task Timeout](/img/TimeoutSeconds.png) - -**0 seconds** -> Worker polls for task T1 fom the Conductor server and receives the task. T1 is put into IN_PROGRESS status by the server. -Worker starts processing the task but is unable to process the task at this time. Worker updates the server with T1 set to IN_PROGRESS status and a callback of 9 seconds. -Server puts T1 back in the queue but makes it invisible and the worker continues to poll for the task but does not receive T1 for 9 seconds. - -**9,18 seconds** -> Worker receives T1 from the server and is still unable to process the task and updates the server with a callback of 9 seconds. - -**27 seconds** -> Worker polls and receives task T1 from the server and is now able to process this task. - -**30 seconds** (T1 timeout) -> Server marks T1 as TIMED_OUT because it is not in a terminal state after first being moved to IN_PROGRESS status. Server schedules a new task based on the retry count. - -**32 seconds** -> Worker completes processing of T1 and updates the server with COMPLETED status. Server will ignore this update since T1 has already been moved to a terminal status (TIMED_OUT). - - -### Response timeout seconds -Response timeout is the time within which the worker must respond to the server with an update for the task, else the task will be marked as TIMED_OUT. - -![Response Timeout](/img/ResponseTimeoutSeconds.png) - -**0 seconds** -> Worker polls for the task T1 from the Conductor server and receives the task. T1 is put into IN_PROGRESS status by the server. - -Worker starts processing the task but the worker instance dies during this execution. - -**20 seconds** (T1 responseTimeout) -> Server marks T1 as TIMED_OUT since the task has not been updated by the worker within the configured responseTimeoutSeconds (20). A new instance of task T1 is scheduled as per the retry configuration. - -**25 seconds** -> The retried instance of T1 is available to be polled by the worker, after the retryDelaySeconds (5) has elapsed. diff --git a/docs/docs/bestpractices.md b/docs/docs/bestpractices.md deleted file mode 100644 index 3889bddaf..000000000 --- a/docs/docs/bestpractices.md +++ /dev/null @@ -1,8 +0,0 @@ -## Response Timeout -- Configure the responseTimeoutSeconds of each task to be > 0. -- Should be less than or equal to timeoutSeconds. - -## Payload sizes -- Configure your workflows such that conductor is not used as a persistence store. -- Ensure that the output data in the task result set in your worker is used by your workflow for execution. If the values in the output payloads are not used by subsequent tasks in your workflow, this data should not be sent back to conductor in the task result. -- In cases where the output data of your task is used within subsequent tasks in your workflow but is substantially large (> 100KB), consider uploading this data to an object store (S3 or similar) and set the location to the object in your task output. The subsequent tasks can then download this data from the given location and use it during execution. diff --git a/docs/docs/configuration/eventhandlers.md b/docs/docs/configuration/eventhandlers.md deleted file mode 100644 index 8417d67c0..000000000 --- a/docs/docs/configuration/eventhandlers.md +++ /dev/null @@ -1,122 +0,0 @@ -# Event Handlers -Eventing in Conductor provides for loose coupling between workflows and support for producing and consuming events from external systems. - -This includes: - -1. Being able to produce an event (message) in an external system like SQS or internal to Conductor. -2. Start a workflow when a specific event occurs that matches the provided criteria. - -Conductor provides SUB_WORKFLOW task that can be used to embed a workflow inside parent workflow. Eventing supports provides similar capability without explicitly adding dependencies and provides **fire-and-forget** style integrations. - -## Event Task -Event task provides ability to publish an event (message) to either Conductor or an external eventing system like SQS. Event tasks are useful for creating event based dependencies for workflows and tasks. - -See [Event Task](/reference-docs/event-task.html) for documentation. - -## Event Handler -Event handlers are listeners registered that executes an action when a matching event occurs. The supported actions are: - -1. Start a Workflow -2. Fail a Task -3. Complete a Task - -Event Handlers can be configured to listen to Conductor Events or an external event like SQS. - -### Configuration -Event Handlers are configured via ```/event/``` APIs. - -#### Structure: -```json -{ - "name" : "descriptive unique name", - "event": "event_type:event_location", - "condition": "boolean condition", - "actions": ["see examples below"] -} -``` -#### Condition -Condition is an expression that MUST evaluate to a boolean value. A Javascript like syntax is supported that can be used to evaluate condition based on the payload. -Actions are executed only when the condition evaluates to `true`. - -**Examples** - -Given the following payload in the message: - -```json -{ - "fileType": "AUDIO", - "version": 3, - "metadata": { - "length": 300, - "codec": "aac" - } -} -``` - -|Expression|Result| -|---|---| -|`$.version > 1`|true| -|`$.version > 10`|false| -|`$.metadata.length == 300`|true| - - -### Actions - -**Start A Workflow** - -```json -{ - "action": "start_workflow", - "start_workflow": { - "name": "WORKFLOW_NAME", - "version": "", - "input": { - "param1": "${param1}" - } - } -} -``` - -**Complete Task*** - -```json -{ - "action": "complete_task", - "complete_task": { - "workflowId": "${workflowId}", - "taskRefName": "task_1", - "output": { - "response": "${result}" - } - }, - "expandInlineJSON": true -} -``` - -**Fail Task*** - -```json -{ - "action": "fail_task", - "fail_task": { - "workflowId": "${workflowId}", - "taskRefName": "task_1", - "output": { - "response": "${result}" - } - }, - "expandInlineJSON": true -} -``` -Input for starting a workflow and output when completing / failing task follows the same [expressions](/configuration/workflowdef.html#wiring-inputs-and-outputs) used for wiring workflow inputs. - -!!!info "Expanding stringified JSON elements in payload" - `expandInlineJSON` property, when set to true will expand the inlined stringified JSON elements in the payload to JSON documents and replace the string value with JSON document. - This feature allows such elements to be used with JSON path expressions. - -## Extending - -Provide the implementation of [EventQueueProvider](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/events/EventQueueProvider.java). - -SQS Queue Provider: -[SQSEventQueueProvider.java ](https://github.com/Netflix/conductor/blob/master/contribs/src/main/java/com/netflix/conductor/core/events/sqs/SQSEventQueueProvider.java) diff --git a/docs/docs/configuration/isolationgroups.md b/docs/docs/configuration/isolationgroups.md deleted file mode 100644 index 1d2320261..000000000 --- a/docs/docs/configuration/isolationgroups.md +++ /dev/null @@ -1,153 +0,0 @@ -# Isolation Groups - -Consider an HTTP task where the latency of an API is high, task queue piles up effecting execution of other HTTP tasks which have low latency. - -We can isolate the execution of such tasks to have predictable performance using `isolationgroupId`, a property of task definition. - -When we set isolationGroupId, the executor `SystemTaskWorkerCoordinator` will allocate an isolated queue and an isolated thread pool for execution of those tasks. - -If no `isolationgroupId` is specified in task definition, then fallback is default behaviour where the executor executes the task in shared thread-pool for all tasks. - -## Example - -** Task Definition ** -```json -{ - "name": "encode_task", - "retryCount": 3, - - "timeoutSeconds": 1200, - "inputKeys": [ - "sourceRequestId", - "qcElementType" - ], - "outputKeys": [ - "state", - "skipped", - "result" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "rateLimitPerFrequency": 50, - "isolationgroupId": "myIsolationGroupId" -} -``` -** Workflow Definition ** -```json -{ - "name": "encode_and_deploy", - "description": "Encodes a file and deploys to CDN", - "version": 1, - "tasks": [ - { - "name": "encode", - "taskReferenceName": "encode", - "type": "HTTP", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - } - } - ], - "outputParameters": { - "cdn_url": "${d1.output.location}" - }, - "failureWorkflow": "cleanup_encode_resources", - "restartable": true, - "workflowStatusListenerEnabled": true, - "schemaVersion": 2 -} -``` - - -- puts `encode` in `HTTP-myIsolationGroupId` queue, and allocates a new thread pool for this for execution. - -Note: To enable this feature, the `workflow.isolated.system.task.enable` property needs to be made `true`,its default value is `false` - -The property `workflow.isolated.system.task.worker.thread.count` sets the thread pool size for isolated tasks; default is `1`. - -isolationGroupId is currently supported only in HTTP and kafka Task. - -### Execution Name Space - -`executionNameSpace` A property of taskdef can be used to provide JVM isolation to task execution and scale executor deployments horizontally. - -Limitation of using isolationGroupId is that we need to scale executors vertically as the executor allocates a new thread pool per `isolationGroupId`. Also, since the executor runs the tasks in the same JVM, task execution is not isolated completely. - -To support JVM isolation, and also allow the executors to scale horizontally, we can use `executionNameSpace` property in taskdef. - -Executor consumes tasks whose executionNameSpace matches with the configuration property `workflow.system.task.worker.executionNameSpace` - -If the property is not set, the executor executes tasks without any executionNameSpace set. - - -```json -{ - "name": "encode_task", - "retryCount": 3, - - "timeoutSeconds": 1200, - "inputKeys": [ - "sourceRequestId", - "qcElementType" - ], - "outputKeys": [ - "state", - "skipped", - "result" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "rateLimitPerFrequency": 50, - "executionNameSpace": "myExecutionNameSpace" -} -``` - -#### Example Workflow task - -```json -{ - "name": "encode_and_deploy", - "description": "Encodes a file and deploys to CDN", - "version": 1, - "tasks": [ - { - "name": "encode", - "taskReferenceName": "encode", - "type": "HTTP", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - } - } - ], - "outputParameters": { - "cdn_url": "${d1.output.location}" - }, - "failureWorkflow": "cleanup_encode_resources", - "restartable": true, - "workflowStatusListenerEnabled": true, - "schemaVersion": 2 -} -``` - -- `encode` task is executed by the executor deployment whose `workflow.system.task.worker.executionNameSpace` property is `myExecutionNameSpace` - -`executionNameSpace` can be used along with `isolationGroupId` - -If the above task contains a isolationGroupId `myIsolationGroupId`, the tasks will be scheduled in a queue HTTP@myExecutionNameSpace-myIsolationGroupId, and have a new threadpool for execution in the deployment group with myExecutionNameSpace - - - diff --git a/docs/docs/configuration/sysoperator.md b/docs/docs/configuration/sysoperator.md deleted file mode 100644 index 181d72fd3..000000000 --- a/docs/docs/configuration/sysoperator.md +++ /dev/null @@ -1,20 +0,0 @@ -# System Operators - -Operators are built-in primitives in Conductor that allow you to define the control flow in the workflow. -Operators are similar to programming constructs such as for loops, decisions, etc. -Conductor has support for most of the programing primitives allowing you to define the most advanced workflows. - -## Supported Operators -Conductor supports the following programming language constructs: - -| Language Construct | Conductor Operator | -|----------------------------------|-------------------------------------------------------------| -| Do/While or Loops | [Do While Task](/reference-docs/do-while-task.html) | -| Dynamic Fork | [Dynamic Fork Task](/reference-docs/dynamic-fork-task.html) | -| Fork / Parallel execution | [Fork Task](/reference-docs/fork-task.html) | -| Join | [Join Task](/reference-docs/join-task.html) | -| Sub Process / Sub-Flow | [Sub Workflow Task](/reference-docs/sub-workflow-task.html) | -| Switch//Decision/if..then...else | [Switch Task](/reference-docs/switch-task.html) | -| Terminate | [Terminate Task](/reference-docs/terminate-task.html) | -| Variables | [Variable Task](/reference-docs/set-variable-task.html) | -| Wait | [Wait Task](/reference-docs/wait-task.html) | diff --git a/docs/docs/configuration/systask.md b/docs/docs/configuration/systask.md deleted file mode 100644 index 4b2f964a6..000000000 --- a/docs/docs/configuration/systask.md +++ /dev/null @@ -1,256 +0,0 @@ -# System Tasks - -System Tasks (Workers) are built-in tasks that are general purpose and re-usable. They run on the Conductor servers. -Such tasks allow you to get started without having to write custom workers. - -## Available System Tasks - -Conductor has the following set of system tasks available. - -| Task | Description | Use Case | -|-----------------------|--------------------------------------------------------|------------------------------------------------------------------------------------| -| Event Publishing | [Event Task](/reference-docs/event-task.html) | External eventing system integration. e.g. amqp, sqs, nats | -| HTTP | [HTTP Task](/reference-docs/http-task.html) | Invoke any HTTP(S) endpoints | -| Inline Code Execution | [Inline Task](/reference-docs/inline-task.html) | Execute arbitrary lightweight javascript code | -| JQ Transform | [JQ Task](/reference-docs/json-jq-transform-task.html) | Use JQ to transform task input/output | -| Kafka Publish | [Kafka Task](/reference-docs/kafka-publish-task.html) | Publish messages to Kafka | - -| Name | Description | -|--------------------------|-------------------------------------------------------------------------------------------------------------------------------------------| -| joinOn | List of task reference names, which the EXCLUSIVE_JOIN will lookout for to capture output. From above example, this could be ["T2", "T3"] | -| defaultExclusiveJoinTask | Task reference name, whose output should be used incase the decision case is undefined. From above example, this could be ["T1"] | - - -**Example** - -``` json -{ - "name": "exclusive_join", - "taskReferenceName": "exclusiveJoin", - "type": "EXCLUSIVE_JOIN", - "joinOn": [ - "task2", - "task3" - ], - "defaultExclusiveJoinTask": [ - "task1" - ] -} -``` - - -## Wait -A wait task is implemented as a gate that remains in ```IN_PROGRESS``` state unless marked as ```COMPLETED``` or ```FAILED``` by an external trigger. -To use a wait task, set the task type as ```WAIT``` - -**Parameters:** -None required. - -**External Triggers for Wait Task** - -Task Resource endpoint can be used to update the status of a task to a terminate state. - -Contrib module provides SQS integration where an external system can place a message in a pre-configured queue that the server listens on. As the messages arrive, they are marked as ```COMPLETED``` or ```FAILED```. - -**SQS Queues** - -* SQS queues used by the server to update the task status can be retrieve using the following API: -``` -GET /queue -``` -* When updating the status of the task, the message needs to conform to the following spec: - * Message has to be a valid JSON string. - * The message JSON should contain a key named ```externalId``` with the value being a JSONified string that contains the following keys: - * ```workflowId```: Id of the workflow - * ```taskRefName```: Task reference name that should be updated. - * Each queue represents a specific task status and tasks are marked accordingly. e.g. message coming to a ```COMPLETED``` queue marks the task status as ```COMPLETED```. - * Tasks' output is updated with the message. - -**Example SQS Payload:** - -```json -{ - "some_key": "valuex", - "externalId": "{\"taskRefName\":\"TASK_REFERENCE_NAME\",\"workflowId\":\"WORKFLOW_ID\"}" -} -``` - - -## Dynamic Task - -Dynamic Task allows to execute one of the registered Tasks dynamically at run-time. It accepts the task name to execute in inputParameters. - -**Parameters:** - -|name|description| -|---|---| -| dynamicTaskNameParam|Name of the parameter from the task input whose value is used to schedule the task. e.g. if the value of the parameter is ABC, the next task scheduled is of type 'ABC'.| - -**Example** -``` json -{ - "name": "user_task", - "taskReferenceName": "t1", - "inputParameters": { - "files": "${workflow.input.files}", - "taskToExecute": "${workflow.input.user_supplied_task}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" -} -``` -If the workflow is started with input parameter user_supplied_task's value as __user_task_2__, Conductor will schedule __user_task_2__ when scheduling this dynamic task. - -## Inline Task - -Inline Task helps execute ad-hoc logic at Workflow run-time, using any evaluator engine. Supported evaluators -are `value-param` evaluator which simply translates the input parameter to output and `javascript` evaluator that -evaluates Javascript expression. - -This is particularly helpful in running simple evaluations in Conductor server, over creating Workers. - -**Parameters:** - -|name|type|description|notes| -|---|---|---|---| -|evaluatorType|String|Type of the evaluator. Supported evaluators: `value-param`, `javascript` which evaluates javascript expression.| -|expression|String|Expression associated with the type of evaluator. For `javascript` evaluator, Javascript evaluation engine is used to evaluate expression defined as a string. Must return a value.|Must be non-empty.| - -Besides `expression`, any value is accessible as `$.value` for the `expression` to evaluate. - -**Outputs:** - -|name|type|description| -|---|---|---| -|result|Map|Contains the output returned by the evaluator based on the `expression`| - -The task output can then be referenced in downstream tasks like: -```"${inline_test.output.result.testvalue}"``` - -**Example** -``` json -{ - "name": "INLINE_TASK", - "taskReferenceName": "inline_test", - "type": "INLINE", - "inputParameters": { - "inlineValue": "${workflow.input.inlineValue}", - "evaluatorType": "javascript", - "expression": "function scriptFun(){if ($.inlineValue == 1){ return {testvalue: true} } else { return - {testvalue: false} }} scriptFun();" - } -} -``` - -## Terminate Task - -Task that can terminate a workflow with a given status and modify the workflow's output with a given parameter. It can act as a "return" statement for conditions where you simply want to terminate your workflow. - -For example, if you have a decision where the first condition is met, you want to execute some tasks, otherwise you want to finish your workflow. - -**Parameters:** - -|name|type| description | notes | -|---|---|---------------------------------------------------|-------------------------| -|terminationStatus|String| can only accept "COMPLETED" or "FAILED" | task cannot be optional | - |terminationReason|String| reason for incompletion to be set in the workflow | optional | -|workflowOutput|Any| Expected workflow output | optional | - -**Outputs:** - -|name|type|description| -|---|---|---| -|output|Map|The content of `workflowOutput` from the inputParameters. An empty object if `workflowOutput` is not set.| - -```json -{ - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "COMPLETED", - "terminationReason": "", - "workflowOutput": "${task0.output}" - }, - "type": "TERMINATE", - "startDelay": 0, - "optional": false -} -``` - - -## Kafka Publish Task - -A kafka Publish task is used to push messages to another microservice via kafka - -**Parameters:** - -The task expects an input parameter named ```kafka_request``` as part of the task's input with the following details: - -|name|description| -|---|---| -| bootStrapServers |bootStrapServers for connecting to given kafka.| -|key|Key to be published| -|keySerializer | Serializer used for serializing the key published to kafka. One of the following can be set :
    1. org.apache.kafka.common.serialization.IntegerSerializer
    2. org.apache.kafka.common.serialization.LongSerializer
    3. org.apache.kafka.common.serialization.StringSerializer.
    Default is String serializer | -|value| Value published to kafka| -|requestTimeoutMs| Request timeout while publishing to kafka. If this value is not given the value is read from the property `kafka.publish.request.timeout.ms`. If the property is not set the value defaults to 100 ms | -|maxBlockMs| maxBlockMs while publishing to kafka. If this value is not given the value is read from the property `kafka.publish.max.block.ms`. If the property is not set the value defaults to 500 ms | -|headers|A map of additional kafka headers to be sent along with the request.| -|topic|Topic to publish| - -The producer created in the kafka task is cached. By default the cache size is 10 and expiry time is 120000 ms. To change the defaults following can be modified kafka.publish.producer.cache.size,kafka.publish.producer.cache.time.ms respectively. - -**Kafka Task Output** - -Task status transitions to COMPLETED - -**Example** - -Task sample - -```json -{ - "name": "call_kafka", - "taskReferenceName": "call_kafka", - "inputParameters": { - "kafka_request": { - "topic": "userTopic", - "value": "Message to publish", - "bootStrapServers": "localhost:9092", - "headers": { - "x-Auth":"Auth-key" - }, - "key": "123", - "keySerializer": "org.apache.kafka.common.serialization.IntegerSerializer" - } - }, - "type": "KAFKA_PUBLISH" -} -``` - -The task is marked as ```FAILED``` if the message could not be published to the Kafka queue. - - -## Do While Task - -Sequentially execute a list of task as long as a condition is true. The list of tasks is executed first, before the condition is -checked (even for the first iteration). - -When scheduled, each task of this loop will see its `taskReferenceName` concatenated with `__i`, with `i` being the -iteration number, starting at 1. Warning: `taskReferenceName` containing arithmetic operators must not be used. - -Each task output is stored as part of the `DO_WHILE` task, indexed by the iteration value (see example below), allowing -the condition to reference the output of a task for a specific iteration (eg. ```$.LoopTask['iteration]['first_task']```) - -The `DO_WHILE` task is set to `FAILED` as soon as one of the loopTask fails. In such case retry, iteration starts from 1. - -Limitations: - - Domain or isolation group execution is unsupported; - - Nested `DO_WHILE` is unsupported; - - `SUB_WORKFLOW` is unsupported; - - Since loopover tasks will be executed in loop inside scope of parent do while task, crossing branching outside of DO_WHILE - task is not respected. Branching inside loopover task is supported. - -**Parameters:** - -|name|type|description| - diff --git a/docs/docs/configuration/taskdef.md b/docs/docs/configuration/taskdef.md deleted file mode 100644 index 075912ff4..000000000 --- a/docs/docs/configuration/taskdef.md +++ /dev/null @@ -1,112 +0,0 @@ -# Task Definition - -Tasks are the building blocks of workflow in Conductor. A task can be an operator, system task or custom code written in any programming language. - -A typical Conductor workflow is a list of tasks that are executed until completion or the termination of the workflow. - -Conductor maintains a registry of worker tasks. A task MUST be registered before being used in a workflow. - -**Example** -``` json -{ - "name": "encode_task", - "retryCount": 3, - - "timeoutSeconds": 1200, - "pollTimeoutSeconds": 3600, - "inputKeys": [ - "sourceRequestId", - "qcElementType" - ], - "outputKeys": [ - "state", - "skipped", - "result" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 1200, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "rateLimitPerFrequency": 50, - "ownerEmail": "foo@bar.com", - "description": "Sample Encoding task" -} -``` - -| Field | Description | Notes | -|----------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------| -| name | Task Type. Unique name of the Task that resonates with it's function. | Unique | -| description | Description of the task | optional | -| retryCount | No. of retries to attempt when a Task is marked as failure | defaults to 3 | -| retryLogic | Mechanism for the retries | [Retry Logic values](#retry-logic) | -| retryDelaySeconds | Time to wait before retries | defaults to 60 seconds | -| timeoutPolicy | Task's timeout policy | [timeout policy values](#timeout-policy) | -| timeoutSeconds | Time in seconds, after which the task is marked as `TIMED_OUT` if not completed after transitioning to `IN_PROGRESS` status for the first time | No timeouts if set to 0 | -| pollTimeoutSeconds | Time in seconds, after which the task is marked as `TIMED_OUT` if not polled by a worker | No timeouts if set to 0 | -| responseTimeoutSeconds | Must be greater than 0 and less than timeoutSeconds. The task is rescheduled if not updated with a status after this time (heartbeat mechanism). Useful when the worker polls for the task but fails to complete due to errors/network failure. | defaults to 3600 | -| backoffScaleFactor | Must be greater than 0. Scale factor for linearity of the backoff | defaults to 1 | -| inputKeys | Array of keys of task's expected input. Used for documenting task's input. See [Using inputKeys and outputKeys](#using-inputkeys-and-outputkeys). | optional | -| outputKeys | Array of keys of task's expected output. Used for documenting task's output | optional | -| inputTemplate | See [Using inputTemplate](#using-inputtemplate) below. | optional | -| concurrentExecLimit | Number of tasks that can be executed at any given time. | optional | -| rateLimitFrequencyInSeconds, rateLimitPerFrequency | See [Task Rate limits](#task-rate-limits) below. | optional | - - -### Retry Logic - -* FIXED : Reschedule the task after the ```retryDelaySeconds``` -* EXPONENTIAL_BACKOFF : Reschedule after ```retryDelaySeconds 2^(attemptNumber)``` -* LINEAR_BACKOFF : Reschedule after ```retryDelaySeconds * backoffRate * attemptNumber``` - -### Timeout Policy - -* RETRY : Retries the task again -* TIME_OUT_WF : Workflow is marked as TIMED_OUT and terminated -* ALERT_ONLY : Registers a counter (task_timeout) - -### Task Concurrent Execution Limits - -* `concurrentExecLimit` limits the number of simultaneous Task executions at any point. -**Example:** -If you have 1000 task executions waiting in the queue, and 1000 workers polling this queue for tasks, but if you have set `concurrentExecLimit` to 10, only 10 tasks would be given to workers (which would lead to starvation). If any of the workers finishes execution, a new task(s) will be removed from the queue, while still keeping the current execution count to 10. - -### Task Rate limits - -> Note: Rate limiting is only supported for the Redis-persistence module and is not available with other persistence layers. - -* `rateLimitFrequencyInSeconds` and `rateLimitPerFrequency` should be used together. -* `rateLimitFrequencyInSeconds` sets the "frequency window", i.e the `duration` to be used in `events per duration`. Eg: 1s, 5s, 60s, 300s etc. -* `rateLimitPerFrequency`defines the number of Tasks that can be given to Workers per given "frequency window". - -**Example:** -Let's set `rateLimitFrequencyInSeconds = 5`, and `rateLimitPerFrequency = 12`. This means, our frequency window is of 5 seconds duration, and for each frequency window, Conductor would only give 12 tasks to workers. So, in a given minute, Conductor would only give 12*(60/5) = 144 tasks to workers irrespective of the number of workers that are polling for the task. -Note that unlike `concurrentExecLimit`, rate limiting doesn't take into account tasks already in progress/completed. Even if all the previous tasks are executed within 1 sec, or would take a few days, the new tasks are still given to workers at configured frequency, 144 tasks per minute in above example. - - -### Using inputKeys and outputKeys - -* `inputKeys` and `outputKeys` can be considered as parameters and return values for the Task. -* Consider the task Definition as being represented by an interface: ```(value1, value2 .. valueN) someTaskDefinition(key1, key2 .. keyN);``` -* However, these parameters are not strictly enforced at the moment. Both `inputKeys` and `outputKeys` act as a documentation for task re-use. The tasks in workflow need not define all of the keys in the task definition. -* In the future, this can be extended to be a strict template that all task implementations must adhere to, just like interfaces in programming languages. - -### Using inputTemplate - -* `inputTemplate` allows to define default values, which can be overridden by values provided in Workflow. -* Eg: In your Task Definition, you can define your inputTemplate as: - -```json -"inputTemplate": { - "url": "https://some_url:7004" -} -``` - -* Now, in your workflow Definition, when using above task, you can use the default `url` or override with something else in the task's `inputParameters`. - -```json -"inputParameters": { - "url": "${workflow.input.some_new_url}" -} -``` diff --git a/docs/docs/configuration/taskdomains.md b/docs/docs/configuration/taskdomains.md deleted file mode 100644 index 2d3f3d7da..000000000 --- a/docs/docs/configuration/taskdomains.md +++ /dev/null @@ -1,92 +0,0 @@ -# Task Domains -Task domains helps support task development. The idea is same “task definition” can be implemented in different “domains”. A domain is some arbitrary name that the developer controls. So when the workflow is started, the caller can specify, out of all the tasks in the workflow, which tasks need to run in a specific domain, this domain is then used to poll for task on the client side to execute it. - -As an example if a workflow (WF1) has 3 tasks T1, T2, T3. The workflow is deployed and working fine, which means there are T2 workers polling and executing. If you modify T2 and run it locally there is no guarantee that your modified T2 worker will get the task that you are looking for as it coming from the general T2 queue. “Task Domain” feature solves this problem by splitting the T2 queue by domains, so when the app polls for task T2 in a specific domain, it get the correct task. - -When starting a workflow multiple domains can be specified as a fall backs, for example "domain1,domain2". Conductor keeps track of last polling time for each task, so in this case it checks if the there are any active workers for "domain1" then the task is put in "domain1", if not then the same check is done for the next domain in sequence "domain2" and so on. - -If no workers are active for the domains provided: - -- If `NO_DOMAIN` is provided as last token in list of domains, then no domain is set. -- Else, task will be added to last inactive domain in list of domains, hoping that workers would soon be available for that domain. - -Also, a `*` token can be used to apply domains for all tasks. This can be overridden by providing task specific mappings along with `*`. - -For example, the below configuration: - -```json -"taskToDomain": { - "*": "mydomain", - "some_task_x":"NO_DOMAIN", - "some_task_y": "someDomain, NO_DOMAIN", - "some_task_z": "someInactiveDomain1, someInactiveDomain2" -} -``` - -- puts `some_task_x` in default queue (no domain). -- puts `some_task_y` in `someDomain` domain, if available or in default otherwise. -- puts `some_task_z` in `someInactiveDomain2`, even though workers are not available yet. -- and puts all other tasks in `mydomain` (even if workers are not available). - - -Note that this "fall back" type domain strings can only be used when starting the workflow, when polling from the client only one domain is used. Also, `NO_DOMAIN` token should be used last. - -## How to use Task Domains -### Change the poll call -The poll call must now specify the domain. - -#### Java Client -If you are using the java client then a simple property change will force TaskRunnerConfigurer to pass the domain to the poller. -``` - conductor.worker.T2.domain=mydomain //Task T2 needs to poll for domain "mydomain" -``` -#### REST call -`GET /tasks/poll/batch/T2?workerid=myworker&domain=mydomain` -`GET /tasks/poll/T2?workerid=myworker&domain=mydomain` - -### Change the start workflow call -When starting the workflow, make sure the task to domain mapping is passes - -#### Java Client -``` - Map input = new HashMap<>(); - input.put("wf_input1", "one”); - - Map taskToDomain = new HashMap<>(); - taskToDomain.put("T2", "mydomain"); - - // Other options ... - // taskToDomain.put("*", "mydomain, NO_DOMAIN") - // taskToDomain.put("T2", "mydomain, fallbackDomain1, fallbackDomain2") - - StartWorkflowRequest swr = new StartWorkflowRequest(); - swr.withName(“myWorkflow”) - .withCorrelationId(“corr1”) - .withVersion(1) - .withInput(input) - .withTaskToDomain(taskToDomain); - - wfclient.startWorkflow(swr); - -``` - -#### REST call -`POST /workflow` - -```json -{ - "name": "myWorkflow", - "version": 1, - "correlatonId": "corr1" - "input": { - "wf_input1": "one" - }, - "taskToDomain": { - "*": "mydomain", - "some_task_x":"NO_DOMAIN", - "some_task_y": "someDomain, NO_DOMAIN" - } -} - -``` - diff --git a/docs/docs/configuration/workerdef.md b/docs/docs/configuration/workerdef.md deleted file mode 100644 index 34347a2ad..000000000 --- a/docs/docs/configuration/workerdef.md +++ /dev/null @@ -1,14 +0,0 @@ -# Worker Definition - -A worker is responsible for executing a task. Operator and System tasks are handled by the Conductor server, while user -defined tasks needs to have a worker created that awaits the work to be scheduled by the server for it to be executed. -Workers can be implemented in any language, and Conductor provides support for Java, Golang and Python worker framework that provides features such as -polling threads, metrics and server communication that makes creating workers each. - -Each worker embodies Microservice design pattern and follows certain basic principles: - -1. Workers are stateless and do not implement a workflow specific logic. -2. Each worker executes a very specific task and produces well defined output given specific inputs. -3. Workers are meant to be idempotent (or should handle cases where the task that partially executed gets rescheduled due to timeouts etc.) -4. Workers do not implement the logic to handle retries etc, that is taken care by the Conductor server. - \ No newline at end of file diff --git a/docs/docs/configuration/workflowdef.md b/docs/docs/configuration/workflowdef.md deleted file mode 100644 index 2a4a50c09..000000000 --- a/docs/docs/configuration/workflowdef.md +++ /dev/null @@ -1,229 +0,0 @@ -# Workflow Definition - -## What are Workflows? - -At a high level, a workflow is the Conductor primitive that encompasses the definition and flow of your business logic. -A workflow is a collection (graph) of tasks and sub-workflows. A workflow definition specifies the order of execution of -these [Tasks](taskdef.md). It also specifies how data/state is passed from one task to the other (using the -input/output parameters). These are then combined to give you the final result. This orchestration of Tasks can -happen in a hybrid ecosystem that includes microservices, serverless functions, and monolithic applications. They can -also span across any public cloud and on-premise data center footprints. In addition, the orchestration of tasks can be -across any programming language since Conductor is also language agnostic. - -One key benefit of this approach is that you can build a complex application using simple and granular tasks that do not -need to be aware of or keep track of the state of your application's execution flow. Conductor keeps track of the state, -calls tasks in the right order (sequentially or in parallel, as defined by you), retry calls if needed, handle failure -scenarios gracefully, and outputs the final result. - -Leveraging workflows in Conductor enables developers to truly focus on their core mission - building their application -code in the languages of their choice. Conductor does the heavy lifting associated with ensuring high -reliability, transactional consistency, and long durability of their workflows. Simply put, wherever your application's -component lives and whichever languages they were written in, you can build a workflow in Conductor to orchestrate their -execution in a reliable & scalable manner. - -## What does a Workflow look like? - -Let's start with a basic workflow and understand what are the different aspects of it. In particular, we will talk about -two stages of a workflow, *defining* a workflow and *executing* a workflow - -### Simple Workflow Example - -Assume your business logic is to simply to get some shipping information and then do the shipping. You start by -logically partitioning them into two tasks: - -* **shipping_info** -* **shipping_task** - -First we would build these two task definitions. Let's assume that ```shipping info``` takes an account number, and returns a name and address. - -**Example** -```json -{ - "name": "mail_a_box", - "description": "shipping Workflow", - "version": 1, - "tasks": [ - { - "name": "shipping_info", - "taskReferenceName": "shipping_info_ref", - "inputParameters": { - "account": "${workflow.input.accountNumber}" - }, - "type": "SIMPLE" - }, - { - "name": "shipping_task", - "taskReferenceName": "shipping_task_ref", - "inputParameters": { - "name": "${shipping_info_ref.output.name}", - "streetAddress": "${shipping_info_ref.output.streetAddress}", - "city": "${shipping_info_ref.output.city}", - "state": "${shipping_info_ref.output.state}", - "zipcode": "${shipping_info_ref.output.zipcode}", - }, - "type": "SIMPLE" - } - ], - "outputParameters": { - "trackingNumber": "${shipping_task_ref.output.trackinNumber}" - }, - "failureWorkflow": "shipping_issues", - "restartable": true, - "workflowStatusListenerEnabled": true, - "ownerEmail": "conductor@example.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "variables": {}, - "inputTemplate": {} -} -``` - -The mail_a_box workflow has 2 tasks: - 1. The first task takes the provided account number, and outputs an address. - 2. The 2nd task takes the address info and generates a shipping label. - - Upon completion of the 2 tasks, the workflow outputs the trackking number generated in the 2nd task. If the workflow fails, a second workflow named ```shipping_issues``` is run. - -## Fields in a Workflow - -| Field | Description | Notes | -|:------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------| -| name | Name of the workflow || -| description | Description of the workflow | optional | -| version | Numeric field used to identify the version of the schema. Use incrementing numbers | When starting a workflow execution, if not specified, the definition with highest version is used | -| tasks | An array of task definitions. | [Task properties](#tasks-within-workflow) | -| inputParameters | List of input parameters. Used for documenting the required inputs to workflow | optional | -| inputTemplate | Default input values. See [Using inputTemplate](#using-inputtemplate) | optional | -| outputParameters | JSON template used to generate the output of the workflow | If not specified, the output is defined as the output of the _last_ executed task | -| failureWorkflow | String; Workflow to be run on current Workflow failure. Useful for cleanup or post actions on failure. | optional | -| schemaVersion | Current Conductor Schema version. schemaVersion 1 is discontinued. | Must be 2 | -| restartable | Boolean flag to allow Workflow restarts | defaults to true | -| workflowStatusListenerEnabled | If true, every workflow that gets terminated or completed will send a notification. See [workflow notifictions](#workflow-notifications) | optional (false by default) | - -## Tasks within Workflow -```tasks``` property in a workflow execution defines an array of tasks to be executed in that order. - -| Field | Description | Notes | -|:------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------| -| name | Name of the task. MUST be registered as a task with Conductor before starting the workflow || -| taskReferenceName | Alias used to refer the task within the workflow. MUST be unique within workflow. || -| type | Type of task. SIMPLE for tasks executed by remote workers, or one of the system task types || -| description | Description of the task | optional | -| optional | true or false. When set to true - workflow continues even if the task fails. The status of the task is reflected as `COMPLETED_WITH_ERRORS` | Defaults to `false` | -| inputParameters | JSON template that defines the input given to the task | See [Wiring Inputs and Outputs](#wiring-inputs-and-outputs) for details | -| domain | See [Task Domains](/configuration/taskdomains.html) for more information. | optional | - -In addition to these parameters, System Tasks have their own parameters. Checkout [System Tasks](/configuration/systask.html) for more information. - -## Wiring Inputs and Outputs - -Workflows are supplied inputs by client when a new execution is triggered. -Workflow input is a JSON payload that is available via ```${workflow.input...}``` expressions. - -Each task in the workflow is given input based on the ```inputParameters``` template configured in workflow definition. ```inputParameters``` is a JSON fragment with value containing parameters for mapping values from input or output of a workflow or another task during the execution. - -Syntax for mapping the values follows the pattern as: - -__${SOURCE.input/output.JSONPath}__ - -| field | description | -|--------------|--------------------------------------------------------------------------| -| SOURCE | can be either "workflow" or any of the task reference name | -| input/output | refers to either the input or output of the source | -| JSONPath | JSON path expression to extract JSON fragment from source's input/output | - - -!!! note "JSON Path Support" - Conductor supports [JSONPath](http://goessner.net/articles/JsonPath/) specification and uses Java implementation from [here](https://github.com/jayway/JsonPath). - -!!! note "Escaping expressions" - To escape an expression, prefix it with an extra _$_ character (ex.: ```$${workflow.input...}```). - -**Example** - -Consider a task with input configured to use input/output parameters from workflow and a task named __loc_task__. - -```json -{ - "inputParameters": { - "movieId": "${workflow.input.movieId}", - "url": "${workflow.input.fileLocation}", - "lang": "${loc_task.output.languages[0]}", - "http_request": { - "method": "POST", - "url": "http://example.com/${loc_task.output.fileId}/encode", - "body": { - "recipe": "${workflow.input.recipe}", - "params": { - "width": 100, - "height": 100 - } - }, - "headers": { - "Accept": "application/json", - "Content-Type": "application/json" - } - } - } -} -``` - -Consider the following as the _workflow input_ - -```json -{ - "movieId": "movie_123", - "fileLocation":"s3://moviebucket/file123", - "recipe":"png" -} -``` -And the output of the _loc_task_ as the following; - -```json -{ - "fileId": "file_xxx_yyy_zzz", - "languages": ["en","ja","es"] -} -``` - -When scheduling the task, Conductor will merge the values from workflow input and loc_task's output and create the input to the task as follows: - -```json -{ - "movieId": "movie_123", - "url": "s3://moviebucket/file123", - "lang": "en", - "http_request": { - "method": "POST", - "url": "http://example.com/file_xxx_yyy_zzz/encode", - "body": { - "recipe": "png", - "params": { - "width": 100, - "height": 100 - } - }, - "headers": { - "Accept": "application/json", - "Content-Type": "application/json" - } - } -} -``` - -### Using inputTemplate - -* `inputTemplate` allows to define default values, which can be overridden by values provided in Workflow. -* Eg: In your Workflow Definition, you can define your inputTemplate as: - -```json -"inputTemplate": { - "url": "https://some_url:7004" -} -``` - -And `url` would be `https://some_url:7004` if no `url` was provided as input to your workflow. - -## Workflow notifications - -Conductor can be configured to publish notifications to external systems upon completion/termination of workflows. See [extending conductor](/extend.html) for details. diff --git a/docs/docs/css/custom.css b/docs/docs/css/custom.css deleted file mode 100644 index 3e4728cd5..000000000 --- a/docs/docs/css/custom.css +++ /dev/null @@ -1,313 +0,0 @@ -:root { - /*--main-text-color: #212121;*/ - --brand-blue: #1976d2; - --brand-dark-blue: #242A36; - --caption-color: #4f4f4f; - --brand-lt-blue: #f0f5fb; - --brand-gray: rgb(118, 118, 118); - --brand-lt-gray: rgb(203,204,207); - --brand-red: #e50914; -} -body { - color: var(--brand-dark-blue); - font-family: "Roboto", sans-serif; - font-weight: 400; -} - -body::before { - background: none; - display: none; -} -body > .container { - padding-top: 30px; -} - -.bg-primary { - background: #fff !important; -} - -/* Navbar */ -.navbar { - box-shadow: 0 4px 8px 0 rgb(0 0 0 / 10%), 0 0 2px 0 rgb(0 0 0 / 10%); - padding-left: 30px; - padding-right: 30px; - height: 80px; -} -.navbar-brand { - background-image: url(/img/logo.svg); - background-size: cover; - color: transparent !important; - padding: 0; - text-shadow: none; - margin-top: -6px; - height: 37px; - width: 175px; -} -.navbar-nav { - margin-left: 50px; -} -.navbar-nav > .navitem, .navbar-nav > .dropdown { - margin-left: 30px; -} -.navbar-nav > li .nav-link{ - font-size: 15px; -} - -.navbar-nav .nav-link { - color: #242A36 !important; - font-family: "Inter"; - font-weight: 700; -} - -.navbar-nav.ml-auto > li:first-child { - display: none; -} -.navbar-nav.ml-auto .nav-link{ - font-size: 0px; -} -.navbar-nav.ml-auto .nav-link .fa{ - font-size: 30px; -} -.navbar-nav .dropdown-item { - color: var(--brand-dark-blue); - font-family: "Inter"; - font-weight: 500; - font-size: 14px; - background-color: transparent; -} -.navbar-nav .dropdown-menu > li:hover { - background-color: var(--brand-blue); -} -.navbar-nav .dropdown-menu > li:hover > .dropdown-item { - color: #fff; -} -.navbar-nav .dropdown-submenu:hover > .dropdown-item { - background-color: var(--brand-blue); -} - - -.navbar-nav .dropdown-menu li { - margin: 0px; - padding-top: 5px; - padding-bottom: 5px; -} -.navbar-nav .dropdown-item.active { - background-color: transparent; -} - -.brand-darkblue { - background: #242A36 !important; -} - -.brand-gray { - background: rgb(245,245,245); -} -.brand-blue { - background: #1976D2; -} -.brand-white { - background: #fff; -} -.logo { - height: 444px; -} - -/* Fonts */ -h1, h2, h3, h4, h5, h6 { - color: var(--brand-dark-blue); - margin-bottom: 20px; -} -h1:first-child { - margin-top: 0; -} - -h1 { - font-family: "Inter", sans-serif; - font-size: 32px; - font-weight: 700; - margin-top: 50px; -} - -h2 { - font-family: "Inter", sans-serif; - font-size: 24px; - font-weight: 700; - margin-top: 40px; -} - -h3 { - font-family: "Roboto", sans-serif; - font-size: 20px; - font-weight: 500; - margin-top: 30px; -} - -h4 { - font-family: "Roboto", sans-serif; - font-size: 18px; - font-weight: 400; - margin-top: 20px; -} - -.main li { - margin-bottom: 15px; -} - - -.btn { - font-family: "Roboto", sans-serif; - font-size: 14px; -} -.btn-primary { - background: #1976D2; - border: none; -} - -.hero { - padding-top: 100px; - padding-bottom: 100px; -} - -.hero .heading { - font-size: 56px; - font-weight: 900; - line-height: 68px; -} - -.hero .btn { - font-size: 16px; - padding: 10px 20px; -} - -.hero .illustration { - margin-left: 35px; -} - - -.bullets .heading, .module .heading { - font-family: "Inter", sans-serif; - font-size: 26px; - font-weight: 700; -} -.bullets .row { - margin-bottom: 60px; -} -.bullets .caption { - padding-top: 10px; - padding-right: 30px; -} -.icon { - height: 25px; - margin-right: 5px; - vertical-align: -3px; -} - -.caption { - font-weight: 400; - font-size: 17px; - line-height: 24px; - color: var(--caption-color); -} - -.module { - margin-top: 80px; - margin-bottom: 80px; - padding-top: 50px; - padding-bottom: 50px; -} - -.module .caption { - padding-top: 10px; - padding-right: 80px; -} -.module .screenshot { - width: 600px; - height: 337px; - box-shadow:inset 0 1px 0 rgba(255,255,255,.6), 0 22px 70px 4px rgba(0,0,0,0.56), 0 0 0 1px rgba(0, 0, 0, 0.0); - border-radius: 5px; - background-size: cover; -} - -/* Footer */ -footer { - margin: 0px; - padding: 0px !important; - text-align: left; - font-weight: 400; -} -.footer { - background-color: var(--brand-dark-blue); - padding: 50px 0px; - color: #fff; - font-size: 14px; - margin-top: 50px; -} -.footer a { - color: var(--brand-lt-gray); -} -.footer .subhead { - font-weight: 700; - color: #fff; - font-size: 15px; - margin-bottom: 10px; -} -.footer .red { - color: var(--brand-red); -} -.footer .fr { - text-align: right; -} - -/* TOC menu */ -.toc ul { - list-style: none; - padding: 0px; -} -.toc > ul > li li { - padding-left: 15px; - font-weight: 400; - font-size: 14px; -} -.toc > ul > li { - font-size: 15px; - font-weight: 500; -} -.toc .toc-link { - margin-bottom: 5px; - display: block; - color: var(--brand-dark-blue); -} -.toc .toc-link.active { - font-weight: 700; -} - -/* Homepage Overrides */ -.homepage > .container { - max-width: none; -} -.homepage .toc { - display: none; -} - -/* Comparison block */ -.compare { - background-color: var(--brand-lt-blue); - padding-top: 80px; - padding-bottom: 80px; - margin: 0px -15px; -} -.compare .heading { - margin-bottom: 30px; - margin-top: 0px; -} -.compare .bubble { - background: #fff; - border-radius: 10px; - padding: 30px; - height: 100%; -} - -.compare .caption { - font-size: 15px; - line-height: 22px; -} diff --git a/docs/docs/extend.md b/docs/docs/extend.md deleted file mode 100644 index 82895f667..000000000 --- a/docs/docs/extend.md +++ /dev/null @@ -1,61 +0,0 @@ -# Extending Conductor - -## Backend -Conductor provides a pluggable backend. The current implementation uses Dynomite. - -There are 4 interfaces that need to be implemented for each backend: - -```java -//Store for workflow and task definitions -com.netflix.conductor.dao.MetadataDAO -``` - -```java -//Store for workflow executions -com.netflix.conductor.dao.ExecutionDAO -``` - -```java -//Index for workflow executions -com.netflix.conductor.dao.IndexDAO -``` - -```java -//Queue provider for tasks -com.netflix.conductor.dao.QueueDAO -``` - -It is possible to mix and match different implementations for each of these. -For example, SQS for queueing and a relational store for others. - - -## System Tasks -To create system tasks follow the steps below: - -* Extend ```com.netflix.conductor.core.execution.tasks.WorkflowSystemTask``` -* Instantiate the new class as part of the startup (eager singleton) -* Implement the ```TaskMapper``` [interface](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapper.java) -* Add this implementation to the map identified by [TaskMappers](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java#L70) - -## External Payload Storage -To configure conductor to externalize the storage of large payloads: - -* Implement the `ExternalPayloadStorage` [interface](https://github.com/Netflix/conductor/blob/master/common/src/main/java/com/netflix/conductor/common/utils/ExternalPayloadStorage.java). -* Add the storage option to the enum [here](https://github.com/Netflix/conductor/blob/master/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java#L39). -* Set this JVM system property ```workflow.external.payload.storage``` to the value of the enum element added above. -* Add a binding similar to [this](https://github.com/Netflix/conductor/blob/master/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java#L120-L127). - -## Workflow Status Listener -To provide a notification mechanism upon completion/termination of workflows: - -* Implement the ```WorkflowStatusListener``` [interface](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/execution/WorkflowStatusListener.java) -* This can be configured to plugin custom notification/eventing upon workflows reaching a terminal state. - -## Locking Service - -By default, Conductor Server module loads Zookeeper lock module. If you'd like to provide your own locking implementation module, -for eg., with Dynomite and Redlock: - -* Implement ```Lock``` interface. -* Add a binding similar to [this](https://github.com/Netflix/conductor/blob/master/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java#L115-L129) -* Enable locking service: ```conductor.app.workflowExecutionLockEnabled: true``` diff --git a/docs/docs/externalpayloadstorage.md b/docs/docs/externalpayloadstorage.md deleted file mode 100644 index 097f0b8fc..000000000 --- a/docs/docs/externalpayloadstorage.md +++ /dev/null @@ -1,115 +0,0 @@ -# External Payload Storage - -!!!warning - The external payload storage is currently only implemented to be used to by the Java client. Client libraries in other languages need to be modified to enable this. - Contributions are welcomed. - -## Context -Conductor can be configured to enforce barriers on the size of workflow and task payloads for both input and output. -These barriers can be used as safeguards to prevent the usage of conductor as a data persistence system and to reduce the pressure on its datastore. - -## Barriers -Conductor typically applies two kinds of barriers: - -* Soft Barrier -* Hard Barrier - - -#### Soft Barrier - -The soft barrier is used to alleviate pressure on the conductor datastore. In some special workflow use-cases, the size of the payload is warranted enough to be stored as part of the workflow execution. -In such cases, conductor externalizes the storage of such payloads to S3 and uploads/downloads to/from S3 as needed during the execution. This process is completely transparent to the user/worker process. - - -#### Hard Barrier -The hard barriers are enforced to safeguard the conductor backend from the pressure of having to persist and deal with voluminous data which is not essential for workflow execution. -In such cases, conductor will reject such payloads and will terminate/fail the workflow execution with the reasonForIncompletion set to an appropriate error message detailing the payload size. - -## Usage - -### Barriers setup - -Set the following properties to the desired values in the JVM system properties: - -| Property | Description | default value | -| -- | -- | -- | -| conductor.workflow.input.payload.threshold.kb | Soft barrier for workflow input payload in KB | 5120 | -| conductor.max.workflow.input.payload.threshold.kb | Hard barrier for workflow input payload in KB | 10240 | -| conductor.workflow.output.payload.threshold.kb | Soft barrier for workflow output payload in KB | 5120 | -| conductor.max.workflow.output.payload.threshold.kb | Hard barrier for workflow output payload in KB | 10240 | -| conductor.task.input.payload.threshold.kb | Soft barrier for task input payload in KB | 3072 | -| conductor.max.task.input.payload.threshold.kb | Hard barrier for task input payload in KB | 10240 | -| conductor.task.output.payload.threshold.kb | Soft barrier for task output payload in KB | 3072 | -| conductor.max.task.output.payload.threshold.kb | Hard barrier for task output payload in KB | 10240 | - -### Amazon S3 - -Conductor provides an implementation of [Amazon S3](https://aws.amazon.com/s3/) used to externalize large payload storage. -Set the following property in the JVM system properties: -``` -workflow.external.payload.storage=S3 -``` - -!!!note - This [implementation](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/utils/S3PayloadStorage.java#L44-L45) assumes that S3 access is configured on the instance. - -Set the following properties to the desired values in the JVM system properties: - -| Property | Description | default value | -| --- | --- | --- | -| workflow.external.payload.storage.s3.bucket | S3 bucket where the payloads will be stored | | -| workflow.external.payload.storage.s3.signedurlexpirationseconds | The expiration time in seconds of the signed url for the payload | 5 | - -The payloads will be stored in the bucket configured above in a `UUID.json` file at locations determined by the type of the payload. See [here](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/utils/S3PayloadStorage.java#L149-L167) for information about how the object key is determined. - -### Azure Blob Storage - -ProductLive provides an implementation of [Azure Blob Storage](https://azure.microsoft.com/services/storage/blobs/) used to externalize large payload storage. - -To build conductor with azure blob feature read the [README.md](https://github.com/Netflix/conductor/blob/master/azureblob-storage/README.md) in `azureblob-storage` module - -!!!note - This implementation assumes that you have an [Azure Blob Storage account's connection string or SAS Token](https://github.com/Azure/azure-sdk-for-java/blob/master/sdk/storage/azure-storage-blob/README.md). - If you want signed url to expired you must specify a Connection String. - -Set the following properties to the desired values in the JVM system properties: - -| Property | Description | default value | -| --- | --- | --- | -| workflow.external.payload.storage.azure_blob.connection_string | Azure Blob Storage connection string. Required to sign Url. | | -| workflow.external.payload.storage.azure_blob.endpoint | Azure Blob Storage endpoint. Optional if connection_string is set. | | -| workflow.external.payload.storage.azure_blob.sas_token | Azure Blob Storage SAS Token. Must have permissions `Read` and `Write` on Resource `Object` on Service `Blob`. Optional if connection_string is set. | | -| workflow.external.payload.storage.azure_blob.container_name | Azure Blob Storage container where the payloads will be stored | `conductor-payloads` | -| workflow.external.payload.storage.azure_blob.signedurlexpirationseconds | The expiration time in seconds of the signed url for the payload | 5 | -| workflow.external.payload.storage.azure_blob.workflow_input_path | Path prefix where workflows input will be stored with an random UUID filename | workflow/input/ | -| workflow.external.payload.storage.azure_blob.workflow_output_path | Path prefix where workflows output will be stored with an random UUID filename | workflow/output/ | -| workflow.external.payload.storage.azure_blob.task_input_path | Path prefix where tasks input will be stored with an random UUID filename | task/input/ | -| workflow.external.payload.storage.azure_blob.task_output_path | Path prefix where tasks output will be stored with an random UUID filename | task/output/ | - -The payloads will be stored as done in [Amazon S3](https://github.com/Netflix/conductor/blob/master/core/src/main/java/com/netflix/conductor/core/utils/S3PayloadStorage.java#L149-L167). - -### PostgreSQL Storage - -Frinx provides an implementation of [PostgreSQL Storage](https://www.postgresql.org/) used to externalize large payload storage. - -!!!note -This implementation assumes that you have an [PostgreSQL database server with all required credentials](https://jdbc.postgresql.org/documentation/94/connect.html). - -Set the following properties to your application.properties: - -| Property | Description | default value | -|-------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------| -| conductor.external-payload-storage.postgres.conductor-url | URL, that can be used to pull the json configurations, that will be downloaded from PostgreSQL to the conductor server. For example: for local development it is `http://localhost:8080` | `""` | -| conductor.external-payload-storage.postgres.url | PostgreSQL database connection URL. Required to connect to database. | | -| conductor.external-payload-storage.postgres.username | Username for connecting to PostgreSQL database. Required to connect to database. | | -| conductor.external-payload-storage.postgres.password | Password for connecting to PostgreSQL database. Required to connect to database. | | -| conductor.external-payload-storage.postgres.table-name | The PostgreSQL schema and table name where the payloads will be stored | `external.external_payload` | -| conductor.external-payload-storage.postgres.max-data-rows | Maximum count of data rows in PostgreSQL database. After overcoming this limit, the oldest data will be deleted. | Long.MAX_VALUE (9223372036854775807L) | -| conductor.external-payload-storage.postgres.max-data-days | Maximum count of days of data age in PostgreSQL database. After overcoming limit, the oldest data will be deleted. | 0 | -| conductor.external-payload-storage.postgres.max-data-months | Maximum count of months of data age in PostgreSQL database. After overcoming limit, the oldest data will be deleted. | 0 | -| conductor.external-payload-storage.postgres.max-data-years | Maximum count of years of data age in PostgreSQL database. After overcoming limit, the oldest data will be deleted. | 1 | - -The maximum date age for fields in the database will be: `years + months + days` -The payloads will be stored in PostgreSQL database with key (externalPayloadPath) `UUID.json` and you can generate -URI for this data using `external-postgres-payload-resource` rest controller. -To make this URI work correctly, you must correctly set the conductor-url property. \ No newline at end of file diff --git a/docs/docs/faq.md b/docs/docs/faq.md deleted file mode 100644 index 858d0ecfe..000000000 --- a/docs/docs/faq.md +++ /dev/null @@ -1,84 +0,0 @@ -# Frequently asked Questions - -### How do you schedule a task to be put in the queue after some time (e.g. 1 hour, 1 day etc.) - -After polling for the task update the status of the task to `IN_PROGRESS` and set the `callbackAfterSeconds` value to the desired time. The task will remain in the queue until the specified second before worker polling for it will receive it again. - -If there is a timeout set for the task, and the `callbackAfterSeconds` exceeds the timeout value, it will result in task being TIMED_OUT. - - -### How long can a workflow be in running state? Can I have a workflow that keeps running for days or months? - -Yes. As long as the timeouts on the tasks are set to handle long running workflows, it will stay in running state. - - -### My workflow fails to start with missing task error - -Ensure all the tasks are registered via `/metadata/taskdefs` APIs. Add any missing task definition (as reported in the error) and try again. - - -### Where does my worker run? How does conductor run my tasks? - -Conductor does not run the workers. When a task is scheduled, it is put into the queue maintained by Conductor. Workers are required to poll for tasks using `/tasks/poll` API at periodic interval, execute the business logic for the task and report back the results using `POST /tasks` API call. -Conductor, however will run [system tasks](/configuration/systask.html) on the Conductor server. - - -### How can I schedule workflows to run at a specific time? - -Netflix Conductor itself does not provide any scheduling mechanism. But there is a community project [_Schedule Conductor Workflows_](https://github.com/jas34/scheduledwf) which provides workflow scheduling capability as a pluggable module as well as workflow server. -Other way is you can use any of the available scheduling systems to make REST calls to Conductor to start a workflow. Alternatively, publish a message to a supported eventing system like SQS to trigger a workflow. -More details about [eventing](/configuration/eventhandlers.html). - - -### How do I setup Dynomite cluster? - -Visit Dynomite's [Github page](https://github.com/Netflix/dynomite) to find details on setup and support mechanism. - - -### Can I use conductor with Ruby / Go / Python? - -Yes. Workers can be written any language as long as they can poll and update the task results via HTTP endpoints. - -Conductor provides frameworks for Java and Python to simplify the task of polling and updating the status back to Conductor server. - -**Note:** Python and Go clients have been contributed by the community. - - -### How can I get help with Dynomite? - -Visit Dynomite's [Github page](https://github.com/Netflix/dynomite) to find details on setup and support mechanism. - - -### My workflow is running and the task is SCHEDULED but it is not being processed. - -Make sure that the worker is actively polling for this task. Navigate to the `Task Queues` tab on the Conductor UI and select your task name in the search box. Ensure that `Last Poll Time` for this task is current. - -In Conductor 3.x, ```conductor.redis.availabilityZone``` defaults to ```us-east-1c```. Ensure that this matches where your workers are, and that it also matches```conductor.redis.hosts```. - -### How do I configure a notification when my workflow completes or fails? - -When a workflow fails, you can configure a "failure workflow" to run using the```failureWorkflow``` parameter. By default, three parameters are passed: - -* reason -* workflowId: use this to pull the details of the failed workflow. -* failureStatus - -You can also use the Workflow Status Listener: - -* Set the workflowStatusListenerEnabled field in your workflow definition to true which enables [notifications](/configuration/workflowdef.html#workflow-notifications). -* Add a custom implementation of the Workflow Status Listener. Refer [this](/extend.html#workflow-status-listener). -* This notification can be implemented in such a way as to either send a notification to an external system or to send an event on the conductor queue to complete/fail another task in another workflow as described [here](/configuration/eventhandlers.html). - -Refer to this [documentation](/configuration/workflowdef.html#workflow-notifications) to extend conductor to send out events/notifications upon workflow completion/failure. - - - - -### I want my worker to stop polling and executing tasks when the process is being terminated. (Java client) - -In a `PreDestroy` block within your application, call the `shutdown()` method on the `TaskRunnerConfigurer` instance that you have created to facilitate a graceful shutdown of your worker in case the process is being terminated. - - -### Can I exit early from a task without executing the configured automatic retries in the task definition? - -Set the status to `FAILED_WITH_TERMINAL_ERROR` in the TaskResult object within your worker. This would mark the task as FAILED and fail the workflow without retrying the task as a fail-fast mechanism. diff --git a/docs/docs/gettingstarted/basicconcepts.md b/docs/docs/gettingstarted/basicconcepts.md deleted file mode 100644 index 6592c0423..000000000 --- a/docs/docs/gettingstarted/basicconcepts.md +++ /dev/null @@ -1,37 +0,0 @@ -# Basic Concepts - -## Definitions (aka Metadata or Blueprints) -Conductor definitions are like class definitions in OOP paradigm, or templates. You define this once, and use for each workflow execution. Definitions to Executions have 1:N relationship. - -## Tasks -Tasks are the building blocks of Workflow. There must be at least one task in a Workflow. -Tasks can be categorized into two types: - - * [System tasks](/configuration/systask.html) - executed by Conductor server. - * [Worker tasks](/configuration/workerdef.html) - executed by your own workers. - -## Workflow -A Workflow is the container of your process flow. It could include several different types of Tasks, Sub-Workflows, inputs and outputs connected to each other, to effectively achieve the desired result. The tasks are either control tasks (fork, conditional etc) or application tasks (e.g. encode a file) that are executed on a remote machine. - -[Detailed description](/configuration/workflowdef.html) - -## Task Definition -Task definitions help define Task level parameters like inputs and outputs, timeouts, retries etc. - -* All tasks need to be registered before they can be used by active workflows. -* A task can be re-used within multiple workflows. - -[Detailed description](/configuration/taskdef.html) - -## System Tasks -System tasks are executed within the JVM of the Conductor server and managed by Conductor for its execution and scalability. - -See [Systems tasks](/configuration/systask.html) for list of available Task types, and instructions for using them. - -!!! Note - Conductor provides an API to create user defined tasks that are executed in the same JVM as the engine. See [WorkflowSystemTask](https://github.com/Netflix/conductor/blob/main/core/src/main/java/com/netflix/conductor/core/execution/tasks/WorkflowSystemTask.java) interface for details. - -## Worker Tasks -Worker tasks are implemented by your application(s) and run in a separate environment from Conductor. The worker tasks can be implemented in any language. These tasks talk to Conductor server via REST/gRPC to poll for tasks and update its status after execution. - -Worker tasks are identified by task type __SIMPLE__ in the blueprint. diff --git a/docs/docs/gettingstarted/client.md b/docs/docs/gettingstarted/client.md deleted file mode 100644 index fd7f537bd..000000000 --- a/docs/docs/gettingstarted/client.md +++ /dev/null @@ -1,74 +0,0 @@ -# Using the Client -Conductor tasks that are executed by remote workers communicate over HTTP endpoints/gRPC to poll for the task and update the status of the execution. - -## Client APIs -Conductor provides the following java clients to interact with the various APIs - -| Client | Usage | -|-----------------|---------------------------------------------------------------------------| -| Metadata Client | Register / Update workflow and task definitions | -| Workflow Client | Start a new workflow / Get execution status of a workflow | -| Task Client | Poll for task / Update task result after execution / Get status of a task | - -## Java - -#### Worker -Conductor provides an automated framework to poll for tasks, manage the execution thread and update the status of the execution back to the server. - -Implement the [Worker](https://github.com/Netflix/conductor/blob/main/client/src/main/java/com/netflix/conductor/client/worker/Worker.java) interface to execute the task. - -#### TaskRunnerConfigurer -The TaskRunnerConfigurer can be used to register the worker(s) and initialize the polling loop. -Manages the task workers thread pool and server communication (poll and task update). - -Use the [Builder](https://github.com/Netflix/conductor/blob/master/client/src/main/java/com/netflix/conductor/client/automator/TaskRunnerConfigurer.java#L62) to create an instance of the TaskRunnerConfigurer. The builder accepts the following parameters: - -Initialize the Builder with the following: - - TaskClient - | TaskClient used to communicate to the Conductor server | -| Workers | Workers that will be used for polling work and task execution. | - -| Parameter | Description | Default | -|--------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------| -| withEurekaClient | EurekaClient is used to identify if the server is in discovery or not. When the server goes out of discovery, the polling is stopped unless `pollOutOfDiscovery` is set to true. If passed null, discovery check is not done. | provided by platform | -| withThreadCount | Number of threads assigned to the workers. Should be at-least the size of taskWorkers to avoid starvation in a busy system. | Number of registered workers | -| withSleepWhenRetry | Time in milliseconds, for which the thread should sleep when task update call fails, before retrying the operation. | 500 | -| withUpdateRetryCount | Number of attempts to be made when updating task status when update status call fails. | 3 | -| withWorkerNamePrefix | String prefix that will be used for all the workers. | workflow-worker- | -| withShutdownGracePeriodSeconds | Waiting seconds before forcing shutdown of your worker | 10 | - -Once an instance is created, call `init()` method to initialize the TaskPollExecutor and begin the polling and execution of tasks. - -!!! tip "Note" - To ensure that the TaskRunnerConfigurer stops polling for tasks when the instance becomes unhealthy, call the provided `shutdown()` hook in a `PreDestroy` block. - -**Properties** -The worker behavior can be further controlled by using these properties: - -| Property | Type | Description | Default | -|--------------------|---------|--------------------------------------------------------------------------------------------------------------------------------------------|---------| -| paused | boolean | If set to true, the worker stops polling. | false | -| pollInterval | int | Interval in milliseconds at which the server should be polled for tasks. | 1000 | -| pollOutOfDiscovery | boolean | If set to true, the instance will poll for tasks regardless of the discovery
    status. This is useful while running on a dev machine. | false | - -Further, these properties can be set either by Worker implementation or by setting the following system properties in the JVM: - -| Name | Description | -|---------------------------------------------|------------------------------------------------------------------| -| `conductor.worker.` | Applies to ALL the workers in the JVM. | -| `conductor.worker..` | Applies to the specified worker. Overrides the global property. | - -**Examples** - -* [Sample Worker Implementation](https://github.com/Netflix/conductor/blob/main/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java) -* [Example](https://github.com/Netflix/conductor/blob/main/client/src/test/java/com/netflix/conductor/client/sample/Main.java) - - -## Python -[https://github.com/Netflix/conductor/tree/main/polyglot-clients/python](https://github.com/Netflix/conductor/tree/main/polyglot-clients/python) - -Follow the example as documented in the readme or take a look at [kitchensink_workers.py](https://github.com/Netflix/conductor/blob/main/polyglot-clients/python/kitchensink_workers.py) - -!!!warning - Python client is a community contribution. We encourage you to test it out and let us know the feedback. Pull Requests with fixes or enhancements are welcomed! diff --git a/docs/docs/gettingstarted/docker.md b/docs/docs/gettingstarted/docker.md deleted file mode 100644 index ad202dc00..000000000 --- a/docs/docs/gettingstarted/docker.md +++ /dev/null @@ -1,171 +0,0 @@ - -# Running Conductor using Docker - -In this article we will explore how you can set up Netflix Conductor on your local machine using Docker compose. -The docker compose will bring up the following: - -1. Conductor API Server -2. Conductor UI -3. Elasticsearch for searching workflows - -## Prerequisites -1. Docker: [https://docs.docker.com/get-docker/](https://docs.docker.com/get-docker/) -2. Recommended host with CPU and RAM to be able to run multiple docker containers (at-least 16GB RAM) - -## Steps - -### 1. Clone the Conductor Code - -```shell -$ git clone https://github.com/Netflix/conductor.git -``` - -### 2. Build the Docker Compose - -```shell -$ cd conductor -conductor $ cd docker -docker $ docker-compose build -``` - -### 3. Run Docker Compose - -```shell -docker $ docker-compose up -``` - -Once up and running, you will see the following in your Docker dashboard: - -1. Elasticsearch -2. Conductor UI -3. Conductor Server - -You can access the UI & Server on your browser to verify that they are running correctly: - -#### Conductor Server URL -[http://localhost:8080](http://localhost:8080) - - - -#### Conductor UI URL -[http://localhost:5000/](http://localhost:5000) - - - - -### 4. Exiting Compose -`Ctrl+c` will exit docker compose. - -To ensure images are stopped execute: `docker-compose down`. - -## Alternative Persistence Engines -By default `docker-compose.yaml` uses `config-local.properties`. This configures the `memory` database, where data is lost when the server terminates. This configuration is useful for testing or demo only. - -A selection of `docker-compose-*.yaml` and `config-*.properties` files are provided demonstrating the use of alternative persistence engines. - -| File | Containers | -|--------------------------------|-----------------------------------------------------------------------------------------| -| docker-compose.yaml |

    1. In Memory Conductor Server
    2. Elasticsearch
    3. UI
    | -| docker-compose-dynomite.yaml |
    1. Conductor Server
    2. Elasticsearch
    3. UI
    4. Dynomite Redis for persistence
    | -| docker-compose-postgres.yaml |
    1. Conductor Server
    2. Elasticsearch
    3. UI
    4. Postgres persistence
    | -| docker-compose-prometheus.yaml | Brings up Prometheus server | - -For example this will start the server instance backed by a PostgreSQL DB. -``` -docker-compose -f docker-compose.yaml -f docker-compose-postgres.yaml up -``` - -## Standalone Server Image -To build and run the server image, without using `docker-compose`, from the `docker` directory execute: -``` -docker build -t conductor:server -f server/Dockerfile ../ -docker run -p 8080:8080 -d --name conductor_server conductor:server -``` -This builds the image `conductor:server` and runs it in a container named `conductor_server`. The API should now be accessible at `localhost:8080`. - -To 'login' to the running container, use the command: -``` -docker exec -it conductor_server /bin/sh -``` - -## Standalone UI Image -From the `docker` directory, -``` -docker build -t conductor:ui -f ui/Dockerfile ../ -docker run -p 5000:5000 -d --name conductor_ui conductor:ui -``` -This builds the image `conductor:ui` and runs it in a container named `conductor_ui`. The UI should now be accessible at `localhost:5000`. - -### Note -* In order for the UI to do anything useful the Conductor Server must already be running on port 8080, either in a Docker container (see above), or running directly in the local JRE. -* Additionally, significant parts of the UI will not be functional without Elastisearch being available. Using the `docker-compose` approach alleviates these considerations. - -## Monitoring with Prometheus - -Start Prometheus with: -`docker-compose -f docker-compose-prometheus.yaml up -d` - -Go to [http://127.0.0.1:9090](http://127.0.0.1:9090). - -## Combined Server & UI Docker Image -This image at `/docker/serverAndUI` is provided to illustrate starting both the server & UI within the same container. The UI is hosted using nginx. - -### Building the combined image -`docker build -t conductor:serverAndUI .` - -### Running the combined image - - With interal DB: `docker run -p 8080:8080 -p 80:5000 -d -t conductor:serverAndUI` - - With external DB: `docker run -p 8080:8080 -p 80:5000 -d -t -e "CONFIG_PROP=config.properties" conductor:serverAndUI` - - - -## Potential problem when using Docker Images - -#### Not enough memory - - 1. You will need at least 16 GB of memory to run everything. You can modify the docker compose to skip using - Elasticsearch if you have no option to run this with your memory options. - 2. To disable Elasticsearch using Docker Compose - follow the steps listed here: **TODO LINK** - -#### Elasticsearch fails to come up in arm64 based CPU machines - - 1. As of writing this article, Conductor relies on 6.8.x version of Elasticsearch. This version doesn't have an - arm64 based Docker image. You will need to use Elasticsearch 7.x which requires a bit of customization to get up - and running - -#### Elasticsearch remains in Yellow health - - 1. When you run Elasticsearch, sometimes the health remains in Yellow state. Conductor server by default requires - Green state to run when indexing is enabled. To work around this, you can use the following property: - `conductor.elasticsearch.clusteHealthColor=yellow` Reference: [Issue 2262](https://github.com/Netflix/conductor/issues/2262) - - - -#### Elasticsearch timeout -Standalone(single node) elasticsearch has a yellow status which will cause timeout for conductor server (Required: Green). -Spin up a cluster (more than one) to prevent timeout or use config option `conductor.elasticsearch.clusteHealthColor=yellow`. - -See issue: https://github.com/Netflix/conductor/issues/2262 - -#### Changes in config-*.properties do not take effect -Config is copy into image during docker build. You have to rebuild the image or better, link a volume to it to reflect new changes. - -#### To troubleshoot a failed startup -Check the log of the server, which is located at `/app/logs` (default directory in dockerfile) - -#### Unable to access to conductor:server API on port 8080 -It may takes some time for conductor server to start. Please check server log for potential error. - -#### Elasticsearch -Elasticsearch is optional, please be aware that disable it will make most of the conductor UI not functional. - -##### How to enable Elasticsearch -* Set `workflow.indexing.enabled=true` in your_config.properties -* Add config related to elasticsearch - E.g.: `conductor.elasticsearch.url=http://es:9200` - -##### How to disable Elasticsearch -* Set `workflow.indexing.enabled=false` in your_config.properties -* Comment out all the config related to elasticsearch -E.g.: `conductor.elasticsearch.url=http://es:9200` - diff --git a/docs/docs/gettingstarted/intro.md b/docs/docs/gettingstarted/intro.md deleted file mode 100644 index 789a7db4b..000000000 --- a/docs/docs/gettingstarted/intro.md +++ /dev/null @@ -1,28 +0,0 @@ -# Why Conductor? -## Conductor was built to help Netflix orchestrate microservices based process flows with the following features: - -* A distributed server ecosystem, which stores workflow state information efficiently. -* Allow creation of process / business flows in which each individual task can be implemented by the same / different microservices. -* A DAG (Directed Acyclic Graph) based workflow definition. -* Workflow definitions are decoupled from the service implementations. -* Provide visibility and traceability into these process flows. -* Simple interface to connect workers, which execute the tasks in workflows. -* Workers are language agnostic, allowing each microservice to be written in the language most suited for the service. -* Full operational control over workflows with the ability to pause, resume, restart, retry and terminate. -* Allow greater reuse of existing microservices providing an easier path for onboarding. -* User interface to visualize, replay and search the process flows. -* Ability to scale to millions of concurrently running process flows. -* Backed by a queuing service abstracted from the clients. -* Be able to operate on HTTP or other transports e.g. gRPC. -* Event handlers to control workflows via external actions. -* Client implementations in Java, Python and other languages. -* Various configurable properties with sensible defaults to fine tune workflow and task executions like rate limiting, concurrent execution limits etc. - -## Why not peer to peer choreography? - -With peer to peer task choreography, we found it was harder to scale with growing business needs and complexities. -Pub/sub model worked for simplest of the flows, but quickly highlighted some of the issues associated with the approach: - -* Process flows are “embedded” within the code of multiple application. -* Often, there is tight coupling and assumptions around input/output, SLAs etc, making it harder to adapt to changing needs. -* Almost no way to systematically answer “How much are we done with process X”? diff --git a/docs/docs/gettingstarted/source.md b/docs/docs/gettingstarted/source.md deleted file mode 100644 index 360caae59..000000000 --- a/docs/docs/gettingstarted/source.md +++ /dev/null @@ -1,95 +0,0 @@ -# Building Conductor From Source -## Build and Run - -In this article we will explore how you can set up Netflix Conductor on your local machine for trying out some of its -features. - -### Prerequisites -1. JDK 11 or greater -2. (Optional) Docker if you want to run tests. You can install docker from [here](https://www.docker.com/get-started/). -3. Node for building and running UI. Instructions at [https://nodejs.org](https://nodejs.org). -4. Yarn for building and running UI. Instructions at [https://classic.yarnpkg.com/en/docs/install](https://classic.yarnpkg.com/en/docs/install). - -### Steps to build Conductor Server - -#### 1. Checkout the code -Clone conductor code from the repo: https://github.com/Netflix/conductor - -```shell -$ git clone https://github.com/Netflix/conductor.git -``` -#### 2. Build and run Server - -> **NOTE for Mac users**: If you are using a new Mac with an Apple Silicon Chip, you must make a small change to ```conductor/grpc/build.gradle``` - adding "osx-x86_64" to two lines: -``` -protobuf { - protoc { - artifact = "com.google.protobuf:protoc:${revProtoBuf}:osx-x86_64" - } - plugins { - grpc { - artifact = "io.grpc:protoc-gen-grpc-java:${revGrpc}:osx-x86_64" - } - } -... -} -``` - -You may also need to install rosetta: - -```shell -softwareupdate --install-rosetta -``` - -```shell -$ cd conductor -conductor $ cd server -server $ ../gradlew bootRun -``` - -Navigate to the swagger API docs: -[http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config](http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config) - - - -## Download and Run -As an alternative to building from source, you can download and run the pre-compiled JAR. - -```shell -export CONDUCTOR_VER=3.3.4 -export REPO_URL=https://repo1.maven.org/maven2/com/netflix/conductor/conductor-server -curl $REPO_URL/$CONDUCTOR_VER/conductor-server-$CONDUCTOR_VER-boot.jar \ ---output conductor-server-$CONDUCTOR_VER-boot.jar; java -jar conductor-server-$CONDUCTOR_VER-boot.jar -``` -Navigate to the swagger URL: [http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config](http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config) - - - -## Build and Run UI - -### Conductor UI from Source - -The UI is a standard `create-react-app` React Single Page Application (SPA). To get started, with Node 14 and `yarn` installed, first run `yarn install` from within the `/ui` directory to retrieve package dependencies. - - -```shell -$ cd conductor/ui -ui $ yarn install -``` - -There is no need to "build" the project unless you require compiled assets to host on a production web server. If the latter is true, the project can be built with the command `yarn build`. - -To run the UI on the bundled development server, run `yarn run start`. Navigate your browser to `http://localhost:5000`. The server must already be running on port 8080. - -```shell -ui $ yarn run start -``` - -Launch UI [http://localhost:5000](http://localhost:5000) - - - -## Summary -1. By default in-memory persistance is used, so any workflows created or excuted will be wiped out once the server is terminated. -2. Without indexing configured, the search functionality in UI will not work and will result an empty set. -3. See how to install Conductor using [Docker](docker.md) with persistence and indexing. \ No newline at end of file diff --git a/docs/docs/gettingstarted/startworkflow.md b/docs/docs/gettingstarted/startworkflow.md deleted file mode 100644 index 2b285c0a7..000000000 --- a/docs/docs/gettingstarted/startworkflow.md +++ /dev/null @@ -1,88 +0,0 @@ -# Starting a Workflow -## Start Workflow Endpoint -When starting a Workflow execution with a registered definition, `/workflow` accepts following parameters: - -| Field | Description | Notes | -|:--------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------| -| name | Name of the Workflow. MUST be registered with Conductor before starting workflow | | -| version | Workflow version | defaults to latest available version | -| input | JSON object with key value params, that can be used by downstream tasks | See [Wiring Inputs and Outputs](/configuration/workflowdef.html#wiring-inputs-and-outputs) for details | -| correlationId | Unique Id that correlates multiple Workflow executions | optional | -| taskToDomain | See [Task Domains](/configuration/taskdomains.html) for more information. | optional | -| workflowDef | An adhoc [Workflow Definition](/configuration/workflowdef.html) to run, without registering. See [Dynamic Workflows](#dynamic-workflows). | optional | -| externalInputPayloadStoragePath | This is taken care of by Java client. See [External Payload Storage](/externalpayloadstorage.html) for more info. | optional | -| priority | Priority level for the tasks within this workflow execution. Possible values are between 0 - 99. | optional | - -**Example:** - -Send a `POST` request to `/workflow` with payload like: -```json -{ - "name": "encode_and_deploy", - "version": 1, - "correlationId": "my_unique_correlation_id", - "input": { - "param1": "value1", - "param2": "value2" - } -} -``` - -## Dynamic Workflows - -If the need arises to run a one-time workflow, and it doesn't make sense to register Task and Workflow definitions in Conductor Server, as it could change dynamically for each execution, dynamic workflow executions can be used. - -This enables you to provide a workflow definition embedded with the required task definitions to the Start Workflow Request in the `workflowDef` parameter, avoiding the need to register the blueprints before execution. - -**Example:** - -Send a `POST` request to `/workflow` with payload like: -```json -{ - "name": "my_adhoc_unregistered_workflow", - "workflowDef": { - "ownerApp": "my_owner_app", - "ownerEmail": "my_owner_email@test.com", - "createdBy": "my_username", - "name": "my_adhoc_unregistered_workflow", - "description": "Test Workflow setup", - "version": 1, - "tasks": [ - { - "name": "fetch_data", - "type": "HTTP", - "taskReferenceName": "fetch_data", - "inputParameters": { - "http_request": { - "connectionTimeOut": "3600", - "readTimeOut": "3600", - "uri": "${workflow.input.uri}", - "method": "GET", - "accept": "application/json", - "content-Type": "application/json", - "headers": { - } - } - }, - "taskDefinition": { - "name": "fetch_data", - "retryCount": 0, - "timeoutSeconds": 3600, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 0, - "responseTimeoutSeconds": 3000 - } - } - ], - "outputParameters": { - } - }, - "input": { - "uri": "http://www.google.com" - } -} -``` - -!!! Note - If the `taskDefinition` is defined with Metadata API, it doesn't have to be added in above dynamic workflow definition. diff --git a/docs/docs/gettingstarted/steps.md b/docs/docs/gettingstarted/steps.md deleted file mode 100644 index 695aa6277..000000000 --- a/docs/docs/gettingstarted/steps.md +++ /dev/null @@ -1,36 +0,0 @@ - -# High Level Steps -Steps required for a new workflow to be registered and get executed - -1. Define task definitions used by the workflow. -2. Create the workflow definition -3. Create task worker(s) that polls for scheduled tasks at regular interval - -### Trigger Workflow Execution - -``` -POST /workflow/{name} -{ - ... //json payload as workflow input -} -``` - -### Polling for a task - -``` -GET /tasks/poll/batch/{taskType} -``` - -### Update task status - -``` -POST /tasks -{ - "outputData": { - "encodeResult":"success", - "location": "http://cdn.example.com/file/location.png" - //any task specific output - }, - "status": "COMPLETED" -} -``` diff --git a/docs/docs/googleba55068fa3e0e553.html b/docs/docs/googleba55068fa3e0e553.html deleted file mode 100644 index 0344c087e..000000000 --- a/docs/docs/googleba55068fa3e0e553.html +++ /dev/null @@ -1 +0,0 @@ -google-site-verification: googleba55068fa3e0e553.html \ No newline at end of file diff --git a/docs/docs/how-tos/Monitoring/Conductor-LogLevel.md b/docs/docs/how-tos/Monitoring/Conductor-LogLevel.md deleted file mode 100644 index c38480a59..000000000 --- a/docs/docs/how-tos/Monitoring/Conductor-LogLevel.md +++ /dev/null @@ -1,18 +0,0 @@ -# Conductor Log Level - -Conductor is based on Spring Boot, so the log levels are set via [Spring Boot properties](https://docs.spring.io/spring-boot/docs/2.1.13.RELEASE/reference/html/boot-features-logging.html): - -From the Spring Boot Docs: - - -> All the supported logging systems can have the logger levels set in the Spring Environment (for example, in application.properties) by using ```logging.level.=``` where level is one of TRACE, DEBUG, INFO, WARN, ERROR, FATAL, or OFF. The ```root``` logger can be configured by using logging.level.root. - -> The following example shows potential logging settings in ```application.properties```: - -``` -logging.level.root=warn -logging.level.org.springframework.web=debug -logging.level.org.hibernate=error -``` - -It’s also possible to set logging levels using environment variables. For example, ```LOGGING_LEVEL_ORG_SPRINGFRAMEWORK_WEB=DEBUG``` will set ```org.springframework.web``` to `DEBUG`. diff --git a/docs/docs/how-tos/Tasks/creating-tasks.md b/docs/docs/how-tos/Tasks/creating-tasks.md deleted file mode 100644 index 54ae74e91..000000000 --- a/docs/docs/how-tos/Tasks/creating-tasks.md +++ /dev/null @@ -1,35 +0,0 @@ -# Creating Task Definitions - -Tasks can be created using the tasks metadata API - -`POST /api/metadata/taskdefs` - -This API takes an array of new task definitions. - -### Example using curl - -```shell -curl 'http://localhost:8080/api/metadata/taskdefs' \ - -H 'accept: */*' \ - -H 'content-type: application/json' \ - --data-raw '[{"createdBy":"user","name":"sample_task_name_1","description":"This is a sample task for demo","responseTimeoutSeconds":10,"timeoutSeconds":30,"inputKeys":[],"outputKeys":[],"timeoutPolicy":"TIME_OUT_WF","retryCount":3,"retryLogic":"FIXED","retryDelaySeconds":5,"inputTemplate":{},"rateLimitPerFrequency":0,"rateLimitFrequencyInSeconds":1}]' -``` - -### Example using node fetch - -```javascript -fetch("http://localhost:8080/api/metadata/taskdefs", { - "headers": { - "accept": "*/*", - "content-type": "application/json", - }, - "body": "[{\"createdBy\":\"user\",\"name\":\"sample_task_name_1\",\"description\":\"This is a sample task for demo\",\"responseTimeoutSeconds\":10,\"timeoutSeconds\":30,\"inputKeys\":[],\"outputKeys\":[],\"timeoutPolicy\":\"TIME_OUT_WF\",\"retryCount\":3,\"retryLogic\":\"FIXED\",\"retryDelaySeconds\":5,\"inputTemplate\":{},\"rateLimitPerFrequency\":0,\"rateLimitFrequencyInSeconds\":1}]", - "method": "POST" -}); -``` -## Best Practices - -1. You can update a set of tasks together in this API -2. Task configurations are important attributes that controls the behavior of this task in a Workflow. Refer to [Task Configurations](/configuration/taskdef.html) for all the options and details' -3. You can also use the Conductor Swagger UI to update the tasks - diff --git a/docs/docs/how-tos/Tasks/dynamic-vs-switch-tasks.md b/docs/docs/how-tos/Tasks/dynamic-vs-switch-tasks.md deleted file mode 100644 index e81327d49..000000000 --- a/docs/docs/how-tos/Tasks/dynamic-vs-switch-tasks.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Dynamic vs Switch Tasks - -Learn more about - -1. [Dynamic Tasks](/reference-docs/dynamic-task.html) -2. [Switch Tasks](/reference-docs/switch-task.html) - -Dynamic Tasks are useful in situations when need to run a task of which the task type is determined at runtime instead -of during the configuration. It is similar to the [SWITCH](/reference-docs/switch-task.html) use case but with `DYNAMIC` -we won't need to preconfigure all case options in the workflow definition itself. Instead, we can mark the task -as `DYNAMIC` and determine which underlying task does it run during the workflow execution itself. - -1. Use DYNAMIC task as a replacement for SWITCH if you have too many case options -2. DYNAMIC task is an option when you want to programmatically determine the next task to run instead of using expressions -3. DYNAMIC task simplifies the workflow execution UI view which will now only show the selected task -4. SWITCH task visualization is helpful as a documentation - showing you all options that the workflow could have - taken -5. SWITCH task comes with a default task option which can be useful in some use cases diff --git a/docs/docs/how-tos/Tasks/extending-system-tasks.md b/docs/docs/how-tos/Tasks/extending-system-tasks.md deleted file mode 100644 index 5661d040c..000000000 --- a/docs/docs/how-tos/Tasks/extending-system-tasks.md +++ /dev/null @@ -1,98 +0,0 @@ -# Extending System Tasks - -[System tasks](/configuration/systask.html) allow Conductor to run simple tasks on the server - removing the need to build (and deploy) workers for basic tasks. This allows for automating more mundane tasks without building specific microservices for them. - -However, sometimes it might be necessary to add additional parameters to a System Task to gain the behavior that is desired. - -## Example HTTP Task - -```json -{ - "name": "get_weather_90210", - "version": 1, - "tasks": [ - { - "name": "get_weather_90210", - "taskReferenceName": "get_weather_90210", - "inputParameters": { - "http_request": { - "uri": "https://weatherdbi.herokuapp.com/data/weather/90210", - "method": "GET", - "connectionTimeOut": 1300, - "readTimeOut": 1300 - } - }, - "type": "HTTP", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": { - "data": "${get_weather_ref.output.response.body.currentConditions.comment}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "ownerEmail": "conductor@example.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "variables": {}, - "inputTemplate": {} -} - -``` - -This very simple workflow has a single HTTP Task inside. No parameters need to be passed, and when run, the HTTP task will return the weather in Beverly Hills, CA (Zip code = 90210). - -> This API has a very slow response time. In the HTTP task, the connection is set to time out after 1300ms, which is *too short* for this API, resulting in a timeout. This API *will* work if we allowed for a longer timeout, but in order to demonstrate adding retries to the HTTP Task, we will artificially force the API call to fail. - -When this workflow is run - it fails, as expected. - -Now, sometimes an API call might fail due to an issue on the remote server, and retrying the call will result in a response. With many Conductor tasks, ```retryCount```, ```retryDelaySeconds``` and ```retryLogic``` fields can be applied to retry the worker (with the desired parameters). - -By default, the [HTTP Task](/reference-docs/http-task.html) does not have ```retryCount```, ```retryDelaySeconds``` or ```retryLogic``` built in. Attempting to add these parameters to a HTTP Task results in an error. - -## The Solution - -We can create a task with the same name with the desired parameters. Defining the following task (note that the ```name``` is identical to the one in the workflow): - -```json -{ - - "createdBy": "", - "name": "get_weather_90210", - "description": "editing HTTP task", - "retryCount": 3, - "timeoutSeconds": 5, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 5, - "responseTimeoutSeconds": 5, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 -} - -``` - -We've added the three parameters: ```retryCount: 3, retryDelaySeconds: 5, retryLogic: FIXED``` - -The ```get_weather_90210``` task will now run 4 times (it will fail once, and then retry 3 times), with a ```FIXED``` 5 second delay between attempts. - -Re-running the task (and looking at the timeline view) shows that this is what occurs. There are 4 attempts, with a 5 second delay between them. - -If we change the ```retryLogic``` to EXPONENTIAL_BACKOFF, the delay between attempts grows exponentially: - -1. 5*2^0 = 5 seconds -2. 5*2^1 = 10 seconds -3. 5*2^2 = 20 seconds diff --git a/docs/docs/how-tos/Tasks/monitoring-task-queues.md b/docs/docs/how-tos/Tasks/monitoring-task-queues.md deleted file mode 100644 index 584b89102..000000000 --- a/docs/docs/how-tos/Tasks/monitoring-task-queues.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Monitoring Task Queues - -Conductor offers an API and UI interface to monitor the task queues. This is useful to see details of the number of -workers polling and monitoring the queue backlog. - -### Using the UI - -```http request -/taskQueue -``` - -Access this screen via - Home > Task Queues - -On this screen you can select and view the details of the task queue. The following information is shown: - -1. Queue Size - The number of tasks waiting to be executed -2. Workers - The count and list of works and their instance reference who are polling for work for this task - -### Using APIs - -To see the size of the task queue via API: - -```shell -curl 'http://localhost:8080/api/tasks/queue/sizes?taskType=' \ - -H 'accept: */*' -``` - -To see the worker poll information of the task queue via API: - -```shell -curl 'http://localhost:8080/api/tasks/queue/polldata?taskType=' \ - -H 'accept: */*' -``` - -> Replace `` with your task name diff --git a/docs/docs/how-tos/Tasks/reusing-tasks.md b/docs/docs/how-tos/Tasks/reusing-tasks.md deleted file mode 100644 index 7923a2e43..000000000 --- a/docs/docs/how-tos/Tasks/reusing-tasks.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Reusing Tasks - -A powerful feature of Conductor is that it supports and enables re-usability out of the box. Task workers typically -perform a unit of work and is usually a part of a larger workflow. Such workers are often re-usable in multiple -workflows. Once a task is defined, you can use it across as any workflow. - -When re-using tasks, it's important to think of situations that a multi-tenant system faces. All the work assigned to -this worker by default goes to the same task scheduling queue. This could result in your worker not being polled quickly -if there is a noisy neighbour in the ecosystem. One way you can tackle this situation is by re-using the worker code, -but having different task names registered for different use cases. And for each task name, you can run an appropriate -number of workers based on expected load. - - diff --git a/docs/docs/how-tos/Tasks/task-configurations.md b/docs/docs/how-tos/Tasks/task-configurations.md deleted file mode 100644 index 1fef6c709..000000000 --- a/docs/docs/how-tos/Tasks/task-configurations.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Task Configurations - -Refer to [Task Definitions](/configuration/taskdef.html) for details on how to configure task definitions - -### Example - -Here is a task template payload with commonly used fields: - -```json -{ - "createdBy": "user", - "name": "sample_task_name_1", - "description": "This is a sample task for demo", - "responseTimeoutSeconds": 10, - "timeoutSeconds": 30, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 5, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 -} -``` - -### Best Practices - -1. Refer to [Task Timeouts](/how-tos/Tasks/task-timeouts.html) for additional information on how the various timeout settings work -2. Refer to [Monitoring Task Queues](/how-tos/Tasks/monitoring-task-queues.html) on how to monitor task queues diff --git a/docs/docs/how-tos/Tasks/task-inputs.md b/docs/docs/how-tos/Tasks/task-inputs.md deleted file mode 100644 index 41f6df61a..000000000 --- a/docs/docs/how-tos/Tasks/task-inputs.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Task Inputs - -Task inputs can be provided in multiple ways. This is configured in the workflow definition when a task is participating -in the workflow. - -### Inputs referred from Workflow inputs - -When we start a workflow, we can provide inputs to the workflow in a json format. For example: - -```json -{ - "worfklowInputNumberExample": 1, - "worfklowInputTextExample": "SAMPLE", - "worfklowInputJsonExample": { - "nestedKey": "nestedValue" - } -} -``` - -These values can be referred as inputs into your task using the following expression: - -```json -{ - "taskInput1Key": "${workflow.input.worfklowInputNumberExample}", - "taskInput2Key": "${workflow.input.worfklowInputJsonExample.nestedKey}" -} -``` - -In this example, the tasks will receive the following inputs after they are evaluated: -```json -{ - "taskInput1Key": 1, - "taskInput2Key": "nestedValue" -} -``` - -### Inputs referred from other Task outputs - -Similar to how we can refer to workflow inputs, we can also refer to an output field that was generated by a task that -executed before. - -Let's assume a task with the task reference name `previousTaskReference` executed and produced the following output: - -```json -{ - "taskOutputKey1": "outputValue", - "taskOutputKey2": { - "nestedKey1": "outputValue-1" - } -} -``` - -We can refer to these as the new task's input by using the following expression: - -```json -{ - "taskInput1Key": "${previousTaskReference.output.taskOutputKey1}", - "taskInput2Key": "${previousTaskReference.output.taskOutputKey2.nestedKey1}" -} -``` - -The expression format is based on [Json Path](https://goessner.net/articles/JsonPath/) and you can construct complex -input params based on the syntax. - -### Hard coded inputs - -Task inputs can also be hard coded in the workflow definitions. This is useful when you have a re-usable task which has -configurable options that can be applied in different workflow contexts. - -```json -{ - "taskInput1": "OPTION_A", - "taskInput2": 100 -} -``` diff --git a/docs/docs/how-tos/Tasks/task-timeouts.md b/docs/docs/how-tos/Tasks/task-timeouts.md deleted file mode 100644 index 74aa51ab6..000000000 --- a/docs/docs/how-tos/Tasks/task-timeouts.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Task Timeouts - -Tasks can be configured to handle various scenarios of timeouts. Here are some scenarios and the relevance configuration -fields. - -| Scenario | Configuration | -|-----------------------------------------------------------------------------------------------------------|--------------------------| -| A task worker picked up the task, but fails to respond back with an update | `responseTimeoutSeconds` | -| A task worker picked up the task and updates progress, but fails to complete within an expected timeframe | `timeoutSeconds` | -| A task is stuck in a retry loop with repeated failures beyond an expected timeframe | `timeoutSeconds` | -| Task doesn't get picked by any workers for a specific amount of time | `pollTimeoutSeconds` | -| Task isn't completed within a specified amount of time despite being picked up by task workers | `timeoutSeconds` | - -> `timeoutSeconds` should always be greater than `responseTimeoutSeconds` - -### Timeout Seconds - -```json -"timeoutSeconds" : 30 -``` - -When configured with a value > `0`, the system will wait for this task to complete successfully up until this number of -seconds from when the task is first polled. We can use this to fail a workflow when a task breaches the overall SLA for -completion. - -### Response Timeout Seconds - -```json -"responseTimeoutSeconds" : 10 -``` - -When configured with a value > `0`, the system will wait for this number of seconds from when the task is polled before -the worker updates back with a status. The worker can keep the task in `IN_PROGRESS` state if it requires more time to -complete. - -### Poll Timeout Seconds - -```json -"pollTimeoutSeconds" : 10 -``` - -When configured with a value > `0`, the system will wait for this number of seconds for the task to be picked up by a -task worker. Useful when you want to detect a backlogged task queue with not enough workers. diff --git a/docs/docs/how-tos/Tasks/updating-tasks.md b/docs/docs/how-tos/Tasks/updating-tasks.md deleted file mode 100644 index 4978e80b3..000000000 --- a/docs/docs/how-tos/Tasks/updating-tasks.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Updating Task Definitions - -Updates to the task definitions can be made using the following API - -```http - -PUT /api/metadata/taskdefs -``` - -This API takes a single task definition and updates itself. - - -### Example using curl - -```shell -curl 'http://localhost:8080/api/metadata/taskdefs' \ - -X 'PUT' \ - -H 'accept: */*' \ - -H 'content-type: application/json' \ - --data-raw '{"createdBy":"user","name":"sample_task_name_1","description":"This is a sample task for demo","responseTimeoutSeconds":10,"timeoutSeconds":30,"inputKeys":[],"outputKeys":[],"timeoutPolicy":"TIME_OUT_WF","retryCount":3,"retryLogic":"FIXED","retryDelaySeconds":5,"inputTemplate":{},"rateLimitPerFrequency":0,"rateLimitFrequencyInSeconds":1}' -``` - -### Example using node fetch - -```javascript -fetch("http://localhost:8080/api/metadata/taskdefs", { - "headers": { - "accept": "*/*", - "content-type": "application/json", - }, - "body": "{\"createdBy\":\"user\",\"name\":\"sample_task_name_1\",\"description\":\"This is a sample task for demo\",\"responseTimeoutSeconds\":10,\"timeoutSeconds\":30,\"inputKeys\":[],\"outputKeys\":[],\"timeoutPolicy\":\"TIME_OUT_WF\",\"retryCount\":3,\"retryLogic\":\"FIXED\",\"retryDelaySeconds\":5,\"inputTemplate\":{},\"rateLimitPerFrequency\":0,\"rateLimitFrequencyInSeconds\":1}", - "method": "PUT" -}); -``` -## Best Practices - -1. You can also use the Conductor Swagger UI to update the tasks -2. Task configurations are important attributes that controls the behavior of this task in a Workflow. Refer to [Task Configurations](/how-tos/Tasks/task-configurations.html) for all the options and details' diff --git a/docs/docs/how-tos/Workers/build-a-golang-task-worker.md b/docs/docs/how-tos/Workers/build-a-golang-task-worker.md deleted file mode 100644 index 306ffc12b..000000000 --- a/docs/docs/how-tos/Workers/build-a-golang-task-worker.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Build a Go Task Worker - -## Install -```shell -go get github.com/netflix/conductor/client/go -``` -This will create a Go project under $GOPATH/src and download any dependencies. - -## Implementing a Task a Worker -`task`package provies the types used to implement the worker. Here is a reference worker implementation: - -```go -package task - -import ( - "fmt" -) - -// Implementation for "task_1" -func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { - log.Println("Executing Task_1_Execution_Function for", t.TaskType) - - //Do some logic - taskResult = task.NewTaskResult(t) - - output := map[string]interface{}{"task":"task_1", "key2":"value2", "key3":3, "key4":false} - taskResult.OutputData = output - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} -``` - -## Worker Polling -Here is an example that shows how to start polling for tasks after defining the tasks. - -```go -package main - -import ( - "github.com/netflix/conductor/client/go" - "github.com/netflix/conductor/client/go/task/sample" -) - -func main() { - c := conductor.NewConductorWorker("http://localhost:8080", 1, 10000) - - c.Start("task_1", "", sample.Task_1_Execution_Function, false) - c.Start("task_2", "mydomain", sample.Task_2_Execution_Function, true) -} -``` -### `NewConductorWoker` parameters -1. baseUrl: Server address. -2. threadCount: No. of threads. Number of threads should be at-least same as the number of workers -3. pollingInterval: Time in millisecond between subsequent polls diff --git a/docs/docs/how-tos/Workers/build-a-java-task-worker.md b/docs/docs/how-tos/Workers/build-a-java-task-worker.md deleted file mode 100644 index 65f5e8f2f..000000000 --- a/docs/docs/how-tos/Workers/build-a-java-task-worker.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Build a Java Task Worker -This guide provides introduction to building Task Workers in Java. - - -## Dependencies -Conductor provides java client libraries, which we will use to build a simple task worker. -### Maven Dependency -```xml - - com.netflix.conductor - conductor-client - 3.3.4 - -``` - -### Gradle -```groovy -implementation group: 'com.netflix.conductor', name: 'conductor-client', version: '3.3.4' -``` - -## Implementing a Task a Worker -To create a worker, implement the `Worker` interface. -```java -public class SampleWorker implements Worker { - - private final String taskDefName; - - public SampleWorker(String taskDefName) { - this.taskDefName = taskDefName; - } - - @Override - public String getTaskDefName() { - return taskDefName; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - result.setStatus(Status.COMPLETED); - - //Register the output of the task - result.getOutputData().put("outputKey1", "value"); - result.getOutputData().put("oddEven", 1); - result.getOutputData().put("mod", 4); - - return result; - } -} -``` -### Implementing worker's logic -Worker's core implementation logic goes in the `execute` method. Upon completion, set the `TaskResult` with status as one of the following: -1. **COMPLETED**: If the task has completed successfully. -2. **FAILED**: If there are failures - business or system failures. Based on the task's configuration, when a task fails, it maybe retried. - -`getTaskDefName()` method returns the name of the task for which this worker provides the execution logic. - -See [SampleWorker.java](https://github.com/Netflix/conductor/blob/main/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java) for the complete example. - -## Configuring polling using TaskRunnerConfigurer -The TaskRunnerConfigurer can be used to register the worker(s) and initialize the polling loop. -Manages the task workers thread pool and server communication (poll and task update). - -Use the [Builder](https://github.com/Netflix/conductor/blob/main/client/src/main/java/com/netflix/conductor/client/automator/TaskRunnerConfigurer.java#L64) to create an instance of the TaskRunnerConfigurer. The builder accepts the following parameters: - -```java - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI("http://localhost:8080/api/"); //Point this to the server API - - int threadCount = 2; //number of threads used to execute workers. To avoid starvation, should be same or more than number of workers - - Worker worker1 = new SampleWorker("task_1"); - Worker worker2 = new SampleWorker("task_5"); - - // Create TaskRunnerConfigurer - TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(taskClient, Arrays.asList(worker1, worker2)) - .withThreadCount(threadCount) - .build(); - - // Start the polling and execution of tasks - configurer.init(); -``` -See [Sample](https://github.com/Netflix/conductor/blob/main/client/src/test/java/com/netflix/conductor/client/sample/Main.java) for full example. - -### Configuration Details -Initialize the Builder with the following: - TaskClient | TaskClient used to communicate to the Conductor server | -| Workers | Workers that will be used for polling work and task execution. | - - -| Parameter | Description | Default | -| --- | --- | --- | -| withEurekaClient | EurekaClient is used to identify if the server is in discovery or not. When the server goes out of discovery, the polling is stopped unless `pollOutOfDiscovery` is set to true. If passed null, discovery check is not done. | provided by platform | -| withThreadCount | Number of threads assigned to the workers. Should be at-least the size of taskWorkers to avoid starvation in a busy system. | Number of registered workers | -| withSleepWhenRetry | Time in milliseconds, for which the thread should sleep when task update call fails, before retrying the operation. | 500 | -| withUpdateRetryCount | Number of attempts to be made when updating task status when update status call fails. | 3 | -| withWorkerNamePrefix | String prefix that will be used for all the workers. | workflow-worker- | - -Once an instance is created, call `init()` method to initialize the TaskPollExecutor and begin the polling and execution of tasks. - -!!! tip "Note" - To ensure that the TaskRunnerConfigurer stops polling for tasks when the instance becomes unhealthy, call the provided `shutdown()` hook in a `PreDestroy` block. - -**Properties** -The worker behavior can be further controlled by using these properties: - -| Property | Type | Description | Default | -| --- | --- | --- | --- | -| paused | boolean | If set to true, the worker stops polling.| false | -| pollInterval | int | Interval in milliseconds at which the server should be polled for tasks. | 1000 | -| pollOutOfDiscovery | boolean | If set to true, the instance will poll for tasks regardless of the discovery
    status. This is useful while running on a dev machine. | false | - -Further, these properties can be set either by Worker implementation or by setting the following system properties in the JVM: - -| Name | Description | -| --- | --- | -| `conductor.worker.` | Applies to ALL the workers in the JVM. | -| `conductor.worker..` | Applies to the specified worker. Overrides the global property. | diff --git a/docs/docs/how-tos/Workers/build-a-python-task-worker.md b/docs/docs/how-tos/Workers/build-a-python-task-worker.md deleted file mode 100644 index d45657fd1..000000000 --- a/docs/docs/how-tos/Workers/build-a-python-task-worker.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Build a Python Task Worker -## Install the python client -```shell - virtualenv conductorclient - source conductorclient/bin/activate - cd ../conductor/client/python - python setup.py install -``` - -## Implement a Task Worker -[ConductorWorker](https://github.com/Netflix/conductor/blob/main/polyglot-clients/python/conductor/ConductorWorker.py#L36) -class is used to implement task workers. -The following script shows how to bring up two task workers named `book_flight` and `book_car`: - -```python -from __future__ import print_function -from conductor.ConductorWorker import ConductorWorker - -def book_flight_task(task): - return {'status': 'COMPLETED', 'output': {'booking_ref': 2341111, 'airline': 'delta'}, 'logs': ['trying delta', 'skipping aa']} - -def book_car_task(task): - return {'status': 'COMPLETED', 'output': {'booking_ref': "84545fdfd", 'agency': 'hertz'}, 'logs': ['trying hertz']} - -def main(): - print('Starting Travel Booking workflows') - cc = ConductorWorker('http://localhost:8080/api', 1, 0.1) - cc.start('book_flight', book_flight_task, False) - cc.start('book_car', book_car_task, True) - -if __name__ == '__main__': - main() -``` -### `ConductorWorker` parameters -```python -server_url: str - The url to the server hosting the conductor api. - Ex: 'http://localhost:8080/api' - -thread_count: int - The number of threads that will be polling for and - executing tasks in case of using the start method. - -polling_interval: float - The number of seconds that each worker thread will wait - between polls to the conductor server. - -worker_id: str, optional - The worker_id of the worker that is going to execute the - task. For further details, refer to the documentation - By default, it is set to hostname of the machine -``` -### `start` method parameters -```pythhon -taskType: str - The name of the task that the worker is looking to execute - -exec_function: function - The function that the worker will execute. The function - must return a dict with the `status`, `output` and `logs` - keys present. If this is not present, an Exception will be - raised - -wait: bool - Whether the worker will block execution of further code. - Since the workers are being run in daemon threads, when the - program completes execution, all the threads are destroyed. - Setting wait to True prevents the program from ending. - If multiple workers are being called from the same program, - all but the last start call but have wait set to False. - The last start call must always set wait to True. If a - single worker is being called, set wait to True. - -domain: str, optional - The domain of the task under which the worker will run. For - further details refer to the conductor server documentation - By default, it is set to None -``` - -See -[https://github.com/Netflix/conductor/tree/main/polyglot-clients/python](https://github.com/Netflix/conductor/tree/main/polyglot-clients/python) -for the source code. diff --git a/docs/docs/how-tos/Workflows/debugging-workflows.md b/docs/docs/how-tos/Workflows/debugging-workflows.md deleted file mode 100644 index 189dbcd91..000000000 --- a/docs/docs/how-tos/Workflows/debugging-workflows.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Debugging Workflows - -Conductor UI is a tool that we can leverage for debugging issues. Refer to the following articles to search and view -your workflow execution. - -1. Searching Workflows (coming soon!) -2. View Workflow Executions (coming soon!) - - -## Debugging Executions - -Open the **Tasks > Diagram** tab to see the diagram of the overall workflow execution - -If there is a failure, you will them on the view marked as red. In most cases it should be clear what went wrong from -the view itself. To see details of the failure, you can click on the failed task. - -The following fields are useful in debugging - -| Field Name | Description | -|-------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| -| Task Detail > Summary > Reason for Incompletion | If an exception was thrown by the worker, it will be captured and displayed here | -| Task Detail > Summary > Worker | The worker instance id where this failure last occurred. Useful to dig for detailed logs if not already captured by Conductor | -| Task Detail > Input | Verify if the task inputs were computed and provided correctly to the task | -| Task Detail > Output | If output of a previous task is used as an input to your next task, refer here for what was produced | -| Task Detail > Logs | If your task is supplying logs, we can look at that here | -| Task Detail > Retried Task - Select an instance | If your task was retried, we can see all the attempts and correponding details here | - -Note: We can also access the task list from **Tasks > Task List** tab. - -Here is a screen grab of the fields referred above. - -![Debugging Wowkflow Execution](/img/tutorial/workflow_debugging.png) - -## Recovering From Failures - -Once we have resolved the underlying issue of workflow execution failure, we might want to replay or retry failed -workflows. The UI has functions that would allow us to do this: - -The **Actions** button provides the following options: - -|Action Name|Description| -|---|---| -| Restart with Current Definitions | Restart this workflow from the beginning using the same version of the workflow definition that originally ran this workflow execution. This is useful if the workflow definition has changed and we want to retain this instance to the original version| -| Restart with Latest Definitions | Restart this workflow from the beginning using the latest definition of the workflow. If we made changes to definition, we can use this option to re-run this flow with the latest version| -| Retry - From failed task | Retry this workflow from the failed task| - -
    - -> **Note:** Conductor configurations allow your tasks to be retried automatically for transient failures. -> Refer to the task configuration options on how to leverage this. diff --git a/docs/docs/how-tos/Workflows/handling-errors.md b/docs/docs/how-tos/Workflows/handling-errors.md deleted file mode 100644 index b58fe1e91..000000000 --- a/docs/docs/how-tos/Workflows/handling-errors.md +++ /dev/null @@ -1,59 +0,0 @@ -# Handling Errors - -When a workflow fails, there are 2 ways to handle the exception. - -## Set ```failureWorkflow``` in Workflow Definition - -In your main workflow definition, you can configure a workflow to run upon failure, by adding the following parameter to the workflow: - -```json -"failureWorkflow": "", - "method": "POST", - "body": { - "text": "workflow: ${workflow.input.workflowId} failed. ${workflow.input.reason}" - }, - "connectionTimeOut": 5000, - "readTimeOut": 5000 - } - }, - "type": "HTTP", - "retryCount": 3 - } - ], - "restartable": true, - "workflowStatusListenerEnabled": false, - "ownerEmail": "conductor@example.com", - "timeoutPolicy": "ALERT_ONLY", -} -``` - -## Set ```workfowStatusListenerEnabled``` - -When this is enabled, notifications are now possible, and by building a custom implementation of the Workflow Status Listener, a notification can be sent to an external service. [More details.](https://github.com/Netflix/conductor/issues/1017#issuecomment-468869173) \ No newline at end of file diff --git a/docs/docs/how-tos/Workflows/searching-workflows.md b/docs/docs/how-tos/Workflows/searching-workflows.md deleted file mode 100644 index be1f82c56..000000000 --- a/docs/docs/how-tos/Workflows/searching-workflows.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Searching Workflows - -In this article we will learn how to search through workflow executions via the UI. - -### Prerequisites - -1. Conductor app and UI installed and running in an environment. If required we can look at the following options to get - an environment up and running. - - 1. [Build and Run Conductor Locally](/gettingstarted/local.html) - 2. [Running via Docker Compose](/gettingstarted/docker.html) - -## UI Workflows View - -Open the home page of the UI installation. It will take you to the `Workflow Executions` view. This is where we can look -at available workflow executions. - -### Basic Search - -The following fields are available for searching for workflows. - -| Search Field Name | Description | -|-------------------|---------------------------------------------------------------------------------------------------------| -| Workflow Name | Use this field to filter workflows by the configured name | -| Workflow ID | Use this field to filter to a specific workflow by its id | -| Status | Use this field to filter by status - available options are presented as a multi-select option | -| Start Time - From | Use this field to filter workflows that started on or after the time specified | -| Start Time - To | Use this field to filter workflows that started on or before the time specified | -| Lookback (days) | Use this field to filter workflows that ran in the last given number of days | -| Free Text Query | If you have indexing enabled, you can query by values that was part of your workflow inputs and outputs | - -The table listing has options to -1. Select columns for display -2. Sort by column value - -At the bottom of the table, there are options to -1. Select number of rows per page -2. Navigating through pages - -### Find by Tasks - -In addition to the options listed in **Basic Search** view, we have the following options in the **Find by Tasks** view. - -| Search Field Name | Description | -|--------------------|--------------------------------------------------------------------------------------------------------------| -| Include Task ID | Use this field to filter workflows that contains a task with this id | -| Include Task Name | Use this field to filter workflows that contains a task with name | -| Free Text in Tasks | If you have indexing enabled, you can query by values that was part of your workflow task inputs and outputs | - diff --git a/docs/docs/how-tos/Workflows/starting-workflows.md b/docs/docs/how-tos/Workflows/starting-workflows.md deleted file mode 100644 index e35cc6004..000000000 --- a/docs/docs/how-tos/Workflows/starting-workflows.md +++ /dev/null @@ -1,42 +0,0 @@ -# Starting Workflows - -Workflow executions can be started by using the following API: - -```http request -POST /api/workflow/{name} -``` - -`{name}` is the placeholder for workflow name. The POST API body is your workflow input parameters which can be empty if -there are none. - -### Using Client SDKs - -Conductor offers client SDKs for popular languages which has library methods that can be used to make this API call. -Refer to the SDK documentation to configure a client in your selected language to invoke workflow executions. - -### Example using curl - -```shell -curl 'https://localhost:8080/api/workflow/sample_workflow' \ - -H 'accept: text/plain' \ - -H 'content-type: application/json' \ - --data-raw '{"service":"fedex"}' -``` - -In this example we are specifying one input param called `service` with a value of `fedex` and the name of the workflow -is `sample_workflow` - -### Example using node fetch - -```javascript -fetch("https://localhost:8080/api/workflow/sample_workflow", { - "headers": { - "accept": "text/plain", - "content-type": "application/json", - }, - "body": "{\"service\":\"fedex\"}", - "method": "POST", -}); -``` - - diff --git a/docs/docs/how-tos/Workflows/updating-workflows.md b/docs/docs/how-tos/Workflows/updating-workflows.md deleted file mode 100644 index beb67a404..000000000 --- a/docs/docs/how-tos/Workflows/updating-workflows.md +++ /dev/null @@ -1,35 +0,0 @@ -# Updating Workflows - -Workflows can be created or updated using the workflow metadata API - -```html -PUT /api/metadata/workflow -``` - -### Example using curl - -```shell -curl 'http://localhost:8080/api/metadata/workflow' \ - -X 'PUT' \ - -H 'accept: */*' \ - -H 'content-type: application/json' \ - --data-raw '[{"name":"sample_workflow","version":1,"tasks":[{"name":"ship_via_fedex","taskReferenceName":"ship_via_fedex","type":"SIMPLE"}],"schemaVersion":2}]' -``` - -### Example using node fetch - -```javascript -fetch("http://localhost:8080/api/metadata/workflow", { - "headers": { - "accept": "*/*", - "content-type": "application/json" - }, - "body": "[{\"name\":\"sample_workflow\",\"version\":1,\"tasks\":[{\"name\":\"ship_via_fedex\",\"taskReferenceName\":\"ship_via_fedex\",\"type\":\"SIMPLE\"}],\"schemaVersion\":2}]", - "method": "PUT" -}); -``` -## Best Practices - -1. If you are updating the workflow with new tasks, remember to register the task definitions first -2. You can also use the Conductor Swagger UI to update the workflows - diff --git a/docs/docs/how-tos/Workflows/versioning-workflows.md b/docs/docs/how-tos/Workflows/versioning-workflows.md deleted file mode 100644 index e44485176..000000000 --- a/docs/docs/how-tos/Workflows/versioning-workflows.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Versioning Workflows - -Every workflow has a version number (this number **must** be an integer.) - -Versioning allows you to run different versions of the same workflow simultaneously. - - -## Summary - -> Use Case: A new version of your core workflow will add a capability that is required for *veryImportantCustomer*. However, *otherVeryImportantCustomer* will not be ready to implement this code for another 6 months. - - -## Version 1 - -```json -{ - "name": "Core_workflow", - "description": "Very_important_business", - "version": 1, - "tasks": [ - { - - } - ], - "outputParameters": { - } -} -``` - -## Version 2 - -```json -{ - "name": "Core_workflow", - "description": "Very_important_business", - "version": 2, - "tasks": [ - { - - } - ], - "outputParameters": { - } -} -``` - -### Version 2 launch -Initially, both customers are on version 1 of the workflow. - -* **veryImportantCustomer* may begin transitioning traffic onto version 2. Any tasks that remain unfinished on version 1 *stay* on version 1. -* *otherVeryImportantCustomer* remains on version 1. - - -### 6 months later - -* All *veryImportantCustomer* workflows are on version 2. -* *otherVeryImportantCustomer* may begin transitioning traffic onto version 2. Any tasks that remain unfinished on version 1 *stay* on version 1. \ No newline at end of file diff --git a/docs/docs/how-tos/Workflows/view-workflow-executions.md b/docs/docs/how-tos/Workflows/view-workflow-executions.md deleted file mode 100644 index 445fc9b29..000000000 --- a/docs/docs/how-tos/Workflows/view-workflow-executions.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -sidebar_position: 1 ---- - -# View Workflow Executions - -In this article we will learn how to view workflow executions via the UI. - -### Prerequisites - -1. Conductor app and UI installed and running in an environment. If required we can look at the following options to get - an environment up and running. - - 1. [Build and Run Conductor Locally](/gettingstarted/local.html) - 2. [Running via Docker Compose](/gettingstarted/docker.html) - -### Viewing a Workflow Execution - -Refer to [Searching Workflows](/how-tos/Workflows/searching-workflows.html) to filter and find an execution you want to -view. Click on the workflow id hyperlink to open the Workflow Execution Details page. - -The following tabs are available to view the details of the Workflow Execution - -| Tab Name | Description | -|-----------------------|-------------------------------------------------------------------------------------------------------------------| -| Tasks | Shows a view with the sub tabs **Diagram**, **Task List** and **Timeline** | -| Tasks > Diagram | Visual view of the workflow and its tasks. | -| Tasks > Task List | Tabular view of the task executions under this workflow. If there were failures, we will be able to see that here | -| Tasks > Timeline | Shows the time each of the tasks took for execution in a timeline view | -| Summary | Summary view of the workflow execution | -| Workflow Input/Output | Shows the input and output payloads of the workflow. Enable copy mode to copy all or parts of the payload | -| JSON | Full JSON payload of the workflow including all tasks, inputs and outputs. Useful for detailed debugging. | - -### Viewing a Workflow Task Detail - -From both the **Tasks > Diagram** and **Tasks > Task List** views, we can click to see a task execution detail. This -opens a flyout panel from the side and contains the following tabs. - -| Tab Name | Description | -|------------|------------------------------------------------------------------------------------------------------------------------------------------| -| Summary | Summary info of the task execution | -| Input | Task input payload - refer to this tab to see computed inputs passed into the task. Enable copy mode to copy all or parts of the payload | -| Output | Shows the output payload produced by the executed task. Enable copy mode to copy all or parts of the payload | -| Log | Any log messages logged by the task worked will show up here | -| JSON | Complete JSON payload for debugging issues | -| Definition | Task definition used when executing this task | - -### Execution Path - -An exciting feature of conductor is the ability to see the exact execution path of a workflow. The executed paths are -shown in green and is easy to follow like the example below. The alternative paths are greyed out for reference - -![Conductor UI - Workflow Run](/img/tutorial/workflow_execution_view.png) - -Errors will be visible on the UI in ref such as the example below - -![Conductor UI - Failed Task](/img/tutorial/workflow_task_fail.png) diff --git a/docs/docs/how-tos/python-sdk.md b/docs/docs/how-tos/python-sdk.md deleted file mode 100644 index c245dee3e..000000000 --- a/docs/docs/how-tos/python-sdk.md +++ /dev/null @@ -1,195 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Python SDK - -Software Development Kit for Netflix Conductor, written on and providing support for Python. - -The code for the Python SDk is available on [Github](https://github.com/conductor-sdk/conductor-python). Please feel free to file PRs, issues, etc. there. - -## Quick Guide - -1. Create a virtual environment - - $ virtualenv conductor - $ source conductor/bin/activate - $ python3 -m pip list - Package Version - ---------- ------- - pip 22.0.3 - setuptools 60.6.0 - wheel 0.37.1 - -2. Install latest version of `conductor-python` from pypi - - $ python3 -m pip install conductor-python - Collecting conductor-python - Collecting certifi>=14.05.14 - Collecting urllib3>=1.15.1 - Requirement already satisfied: setuptools>=21.0.0 in ./conductor/lib/python3.8/site-packages (from conductor-python) (60.6.0) - Collecting six>=1.10 - Installing collected packages: certifi, urllib3, six, conductor-python - Successfully installed certifi-2021.10.8 conductor-python-1.0.7 six-1.16.0 urllib3-1.26.8 - -3. Create a worker capable of executing a `Task`. Example: - - from conductor.client.worker.worker_interface import WorkerInterface - - class SimplePythonWorker(WorkerInterface): - def execute(self, task): - task_result = self.get_task_result_from_task(task) - task_result.add_output_data('key', 'value') - task_result.status = 'COMPLETED' - return task_result - - - * The `add_output_data` is the most relevant part, since you can store information in a dictionary, which will be sent within `TaskResult` as your execution response to Conductor - -4. Create a main method to start polling tasks to execute with your worker. Example: - - from conductor.client.automator.task_handler import TaskHandler - from conductor.client.configuration.configuration import Configuration - from conductor.client.worker.sample.faulty_execution_worker import FaultyExecutionWorker - from conductor.client.worker.sample.simple_python_worker import SimplePythonWorker - - - def main(): - configuration = Configuration(debug=True) - task_definition_name = 'python_example_task' - workers = [ - SimplePythonWorker(task_definition_name), - FaultyExecutionWorker(task_definition_name) - ] - with TaskHandler(workers, configuration) as task_handler: - task_handler.start() - - - if __name__ == '__main__': - main() - - * This example contains two workers, each with a different execution method, capable of running the same `task_definition_name` - -5. Now that you have implemented the example, you can start the Conductor server locally: - 1. Clone [Netflix Conductor repository](https://github.com/Netflix/conductor): - - $ git clone https://github.com/Netflix/conductor.git - $ cd conductor/ - - 2. Start the Conductor server: - - /conductor$ ./gradlew bootRun - - 3. Start Conductor UI: - - /conductor$ cd ui/ - /conductor/ui$ yarn install - /conductor/ui$ yarn run start - - You should be able to access: - * Conductor API: - * http://localhost:8080/swagger-ui/index.html - * Conductor UI: - * http://localhost:5000 - -6. Create a `Task` within `Conductor`. Example: - - $ curl -X 'POST' \ - 'http://localhost:8080/api/metadata/taskdefs' \ - -H 'accept: */*' \ - -H 'Content-Type: application/json' \ - -d '[ - { - "name": "python_task_example", - "description": "Python task example", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF", - "responseTimeoutSeconds": 180, - "ownerEmail": "example@example.com" - } - ]' - -7. Create a `Workflow` within `Conductor`. Example: - - $ curl -X 'POST' \ - 'http://localhost:8080/api/metadata/workflow' \ - -H 'accept: */*' \ - -H 'Content-Type: application/json' \ - -d '{ - "createTime": 1634021619147, - "updateTime": 1630694890267, - "name": "workflow_with_python_task_example", - "description": "Workflow with Python Task example", - "version": 1, - "tasks": [ - { - "name": "python_task_example", - "taskReferenceName": "python_task_example_ref_1", - "inputParameters": {}, - "type": "SIMPLE" - } - ], - "inputParameters": [], - "outputParameters": { - "workerOutput": "${python_task_example_ref_1.output}" - }, - "schemaVersion": 2, - "restartable": true, - "ownerEmail": "example@example.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 - }' - -8. Start a new workflow: - - $ curl -X 'POST' \ - 'http://localhost:8080/api/workflow/workflow_with_python_task_example' \ - -H 'accept: text/plain' \ - -H 'Content-Type: application/json' \ - -d '{}' - - You should receive a *Workflow ID* at the *Response body* - * *Workflow ID* example: `8ff0bc06-4413-4c94-b27a-b3210412a914` - - Now you must be able to see its execution through the UI. - * Example: `http://localhost:5000/execution/8ff0bc06-4413-4c94-b27a-b3210412a914` - -9. Run your Python file with the `main` method - -### Unit Tests - -#### Simple validation - -```shell -/conductor-python/src$ python3 -m unittest -v -test_execute_task (tst.automator.test_task_runner.TestTaskRunner) ... ok -test_execute_task_with_faulty_execution_worker (tst.automator.test_task_runner.TestTaskRunner) ... ok -test_execute_task_with_invalid_task (tst.automator.test_task_runner.TestTaskRunner) ... ok - ----------------------------------------------------------------------- -Ran 3 tests in 0.001s - -OK -``` - -#### Run with code coverage - -```shell -/conductor-python/src$ python3 -m coverage run --source=conductor/ -m unittest -``` - -Report: - -```shell -/conductor-python/src$ python3 -m coverage report -``` - -Visual coverage results: - -```shell -/conductor-python/src$ python3 -m coverage html -``` - diff --git a/docs/docs/img/ResponseTimeoutSeconds.png b/docs/docs/img/ResponseTimeoutSeconds.png deleted file mode 100644 index 9900ac6e4..000000000 Binary files a/docs/docs/img/ResponseTimeoutSeconds.png and /dev/null differ diff --git a/docs/docs/img/Switch_Fedex.png b/docs/docs/img/Switch_Fedex.png deleted file mode 100644 index 63cfc50c3..000000000 Binary files a/docs/docs/img/Switch_Fedex.png and /dev/null differ diff --git a/docs/docs/img/TaskFailure.png b/docs/docs/img/TaskFailure.png deleted file mode 100644 index d3eb47487..000000000 Binary files a/docs/docs/img/TaskFailure.png and /dev/null differ diff --git a/docs/docs/img/Terminate_Task.png b/docs/docs/img/Terminate_Task.png deleted file mode 100644 index b045dd9ab..000000000 Binary files a/docs/docs/img/Terminate_Task.png and /dev/null differ diff --git a/docs/docs/img/TimeoutSeconds.png b/docs/docs/img/TimeoutSeconds.png deleted file mode 100644 index 378981526..000000000 Binary files a/docs/docs/img/TimeoutSeconds.png and /dev/null differ diff --git a/docs/docs/img/conductor-architecture.png b/docs/docs/img/conductor-architecture.png deleted file mode 100644 index cf591142a..000000000 Binary files a/docs/docs/img/conductor-architecture.png and /dev/null differ diff --git a/docs/docs/img/dag_workflow.png b/docs/docs/img/dag_workflow.png deleted file mode 100644 index 5e231e62f..000000000 Binary files a/docs/docs/img/dag_workflow.png and /dev/null differ diff --git a/docs/docs/img/dag_workflow2.png b/docs/docs/img/dag_workflow2.png deleted file mode 100644 index fd547b209..000000000 Binary files a/docs/docs/img/dag_workflow2.png and /dev/null differ diff --git a/docs/docs/img/directed_graph.png b/docs/docs/img/directed_graph.png deleted file mode 100644 index 103189a67..000000000 Binary files a/docs/docs/img/directed_graph.png and /dev/null differ diff --git a/docs/docs/img/dynamic-task-diagram.png b/docs/docs/img/dynamic-task-diagram.png deleted file mode 100644 index 3d928c003..000000000 Binary files a/docs/docs/img/dynamic-task-diagram.png and /dev/null differ diff --git a/docs/docs/img/favicon.svg b/docs/docs/img/favicon.svg deleted file mode 100644 index 1cd90c0ca..000000000 --- a/docs/docs/img/favicon.svg +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - diff --git a/docs/docs/img/fork-task-diagram.png b/docs/docs/img/fork-task-diagram.png deleted file mode 100644 index 324bb1195..000000000 Binary files a/docs/docs/img/fork-task-diagram.png and /dev/null differ diff --git a/docs/docs/img/icons/brackets.svg b/docs/docs/img/icons/brackets.svg deleted file mode 100644 index 606a48db3..000000000 --- a/docs/docs/img/icons/brackets.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/docs/docs/img/icons/modular.svg b/docs/docs/img/icons/modular.svg deleted file mode 100644 index e8e393496..000000000 --- a/docs/docs/img/icons/modular.svg +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - - - diff --git a/docs/docs/img/icons/network.svg b/docs/docs/img/icons/network.svg deleted file mode 100644 index 7360cb36e..000000000 --- a/docs/docs/img/icons/network.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/docs/docs/img/icons/osi.svg b/docs/docs/img/icons/osi.svg deleted file mode 100644 index 3b14c8b03..000000000 --- a/docs/docs/img/icons/osi.svg +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - diff --git a/docs/docs/img/icons/server.svg b/docs/docs/img/icons/server.svg deleted file mode 100644 index b480e7599..000000000 --- a/docs/docs/img/icons/server.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/docs/docs/img/icons/shield.svg b/docs/docs/img/icons/shield.svg deleted file mode 100644 index 4cb8af5e4..000000000 --- a/docs/docs/img/icons/shield.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/docs/docs/img/icons/wrench.svg b/docs/docs/img/icons/wrench.svg deleted file mode 100644 index 42d654300..000000000 --- a/docs/docs/img/icons/wrench.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/docs/docs/img/kitchensink.png b/docs/docs/img/kitchensink.png deleted file mode 100644 index f14981af0..000000000 Binary files a/docs/docs/img/kitchensink.png and /dev/null differ diff --git a/docs/docs/img/logo.png b/docs/docs/img/logo.png deleted file mode 100644 index 132d52cde..000000000 Binary files a/docs/docs/img/logo.png and /dev/null differ diff --git a/docs/docs/img/logo.svg b/docs/docs/img/logo.svg deleted file mode 100644 index 57feb5b0f..000000000 --- a/docs/docs/img/logo.svg +++ /dev/null @@ -1,93 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/docs/img/logo_dark_background.png b/docs/docs/img/logo_dark_background.png deleted file mode 100644 index 013020f12..000000000 Binary files a/docs/docs/img/logo_dark_background.png and /dev/null differ diff --git a/docs/docs/img/netflix-oss.png b/docs/docs/img/netflix-oss.png deleted file mode 100644 index d0e8275cd..000000000 Binary files a/docs/docs/img/netflix-oss.png and /dev/null differ diff --git a/docs/docs/img/netflix.png b/docs/docs/img/netflix.png deleted file mode 100755 index 151775b3b..000000000 Binary files a/docs/docs/img/netflix.png and /dev/null differ diff --git a/docs/docs/img/overview.png b/docs/docs/img/overview.png deleted file mode 100644 index 62a454a03..000000000 Binary files a/docs/docs/img/overview.png and /dev/null differ diff --git a/docs/docs/img/pirate_graph.gif b/docs/docs/img/pirate_graph.gif deleted file mode 100644 index 41cbec8cb..000000000 Binary files a/docs/docs/img/pirate_graph.gif and /dev/null differ diff --git a/docs/docs/img/regular_graph.png b/docs/docs/img/regular_graph.png deleted file mode 100644 index 4d48b504a..000000000 Binary files a/docs/docs/img/regular_graph.png and /dev/null differ diff --git a/docs/docs/img/subworkflow_diagram.png b/docs/docs/img/subworkflow_diagram.png deleted file mode 100644 index 0e51a1094..000000000 Binary files a/docs/docs/img/subworkflow_diagram.png and /dev/null differ diff --git a/docs/docs/img/task_states.png b/docs/docs/img/task_states.png deleted file mode 100644 index 22ebfbcad..000000000 Binary files a/docs/docs/img/task_states.png and /dev/null differ diff --git a/docs/docs/img/task_states.svg b/docs/docs/img/task_states.svg deleted file mode 100644 index 3fd55079b..000000000 --- a/docs/docs/img/task_states.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/docs/docs/img/timeline.png b/docs/docs/img/timeline.png deleted file mode 100644 index ed092b5a0..000000000 Binary files a/docs/docs/img/timeline.png and /dev/null differ diff --git a/docs/docs/img/tutorial/ShippingWorkflow.png b/docs/docs/img/tutorial/ShippingWorkflow.png deleted file mode 100644 index 82aa43d97..000000000 Binary files a/docs/docs/img/tutorial/ShippingWorkflow.png and /dev/null differ diff --git a/docs/docs/img/tutorial/ShippingWorkflowRunning.png b/docs/docs/img/tutorial/ShippingWorkflowRunning.png deleted file mode 100644 index 8bdd63501..000000000 Binary files a/docs/docs/img/tutorial/ShippingWorkflowRunning.png and /dev/null differ diff --git a/docs/docs/img/tutorial/ShippingWorkflowUPS.png b/docs/docs/img/tutorial/ShippingWorkflowUPS.png deleted file mode 100644 index 68b5ea93f..000000000 Binary files a/docs/docs/img/tutorial/ShippingWorkflowUPS.png and /dev/null differ diff --git a/docs/docs/img/tutorial/SubWorkflow.png b/docs/docs/img/tutorial/SubWorkflow.png deleted file mode 100644 index f91865abd..000000000 Binary files a/docs/docs/img/tutorial/SubWorkflow.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Sub_Workflow_Run.png b/docs/docs/img/tutorial/Sub_Workflow_Run.png deleted file mode 100644 index b1635f2b4..000000000 Binary files a/docs/docs/img/tutorial/Sub_Workflow_Run.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Switch_Default.png b/docs/docs/img/tutorial/Switch_Default.png deleted file mode 100644 index b501c469b..000000000 Binary files a/docs/docs/img/tutorial/Switch_Default.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Switch_Fedex.png b/docs/docs/img/tutorial/Switch_Fedex.png deleted file mode 100644 index 63cfc50c3..000000000 Binary files a/docs/docs/img/tutorial/Switch_Fedex.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Switch_UPS.png b/docs/docs/img/tutorial/Switch_UPS.png deleted file mode 100644 index ca9bc1d8f..000000000 Binary files a/docs/docs/img/tutorial/Switch_UPS.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Switch_Workflow.png b/docs/docs/img/tutorial/Switch_Workflow.png deleted file mode 100644 index dbac1b051..000000000 Binary files a/docs/docs/img/tutorial/Switch_Workflow.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Terminate_Task.png b/docs/docs/img/tutorial/Terminate_Task.png deleted file mode 100644 index b045dd9ab..000000000 Binary files a/docs/docs/img/tutorial/Terminate_Task.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Terminate_Task_Run.png b/docs/docs/img/tutorial/Terminate_Task_Run.png deleted file mode 100644 index 353c0db57..000000000 Binary files a/docs/docs/img/tutorial/Terminate_Task_Run.png and /dev/null differ diff --git a/docs/docs/img/tutorial/Terminate_Task_Successful.png b/docs/docs/img/tutorial/Terminate_Task_Successful.png deleted file mode 100644 index 411f531e3..000000000 Binary files a/docs/docs/img/tutorial/Terminate_Task_Successful.png and /dev/null differ diff --git a/docs/docs/img/tutorial/conductorConsole.png b/docs/docs/img/tutorial/conductorConsole.png deleted file mode 100644 index d5ae6477a..000000000 Binary files a/docs/docs/img/tutorial/conductorConsole.png and /dev/null differ diff --git a/docs/docs/img/tutorial/conductorHome.png b/docs/docs/img/tutorial/conductorHome.png deleted file mode 100644 index 314b216ba..000000000 Binary files a/docs/docs/img/tutorial/conductorHome.png and /dev/null differ diff --git a/docs/docs/img/tutorial/conductorUI.png b/docs/docs/img/tutorial/conductorUI.png deleted file mode 100644 index 5f5ec8b56..000000000 Binary files a/docs/docs/img/tutorial/conductorUI.png and /dev/null differ diff --git a/docs/docs/img/tutorial/conductorUIHome.png b/docs/docs/img/tutorial/conductorUIHome.png deleted file mode 100644 index c21d9ab8e..000000000 Binary files a/docs/docs/img/tutorial/conductorUIHome.png and /dev/null differ diff --git a/docs/docs/img/tutorial/dockerHome.png b/docs/docs/img/tutorial/dockerHome.png deleted file mode 100644 index 59ac72ec3..000000000 Binary files a/docs/docs/img/tutorial/dockerHome.png and /dev/null differ diff --git a/docs/docs/img/tutorial/docsVersionDropdown.png b/docs/docs/img/tutorial/docsVersionDropdown.png deleted file mode 100644 index ff1cbe688..000000000 Binary files a/docs/docs/img/tutorial/docsVersionDropdown.png and /dev/null differ diff --git a/docs/docs/img/tutorial/elasticSearchHome.png b/docs/docs/img/tutorial/elasticSearchHome.png deleted file mode 100644 index f417b0c1d..000000000 Binary files a/docs/docs/img/tutorial/elasticSearchHome.png and /dev/null differ diff --git a/docs/docs/img/tutorial/firstWorkerWorkflow.png b/docs/docs/img/tutorial/firstWorkerWorkflow.png deleted file mode 100644 index 1f983b96e..000000000 Binary files a/docs/docs/img/tutorial/firstWorkerWorkflow.png and /dev/null differ diff --git a/docs/docs/img/tutorial/localeDropdown.png b/docs/docs/img/tutorial/localeDropdown.png deleted file mode 100644 index d7163f967..000000000 Binary files a/docs/docs/img/tutorial/localeDropdown.png and /dev/null differ diff --git a/docs/docs/img/tutorial/metadataWorkflowPost.png b/docs/docs/img/tutorial/metadataWorkflowPost.png deleted file mode 100644 index af7aa2c33..000000000 Binary files a/docs/docs/img/tutorial/metadataWorkflowPost.png and /dev/null differ diff --git a/docs/docs/img/tutorial/metadataWorkflowRun.png b/docs/docs/img/tutorial/metadataWorkflowRun.png deleted file mode 100644 index 3ed61f5be..000000000 Binary files a/docs/docs/img/tutorial/metadataWorkflowRun.png and /dev/null differ diff --git a/docs/docs/img/tutorial/successfulWorkerExecution.png b/docs/docs/img/tutorial/successfulWorkerExecution.png deleted file mode 100644 index 8cd2774e7..000000000 Binary files a/docs/docs/img/tutorial/successfulWorkerExecution.png and /dev/null differ diff --git a/docs/docs/img/tutorial/swagger.png b/docs/docs/img/tutorial/swagger.png deleted file mode 100644 index 13a23bc4d..000000000 Binary files a/docs/docs/img/tutorial/swagger.png and /dev/null differ diff --git a/docs/docs/img/tutorial/uiWorkflowDefinition.png b/docs/docs/img/tutorial/uiWorkflowDefinition.png deleted file mode 100644 index 608461474..000000000 Binary files a/docs/docs/img/tutorial/uiWorkflowDefinition.png and /dev/null differ diff --git a/docs/docs/img/tutorial/uiWorkflowDefinitionVisual.png b/docs/docs/img/tutorial/uiWorkflowDefinitionVisual.png deleted file mode 100644 index 1f0aa1f23..000000000 Binary files a/docs/docs/img/tutorial/uiWorkflowDefinitionVisual.png and /dev/null differ diff --git a/docs/docs/img/tutorial/workflowLoaded.png b/docs/docs/img/tutorial/workflowLoaded.png deleted file mode 100644 index 729ad7ac5..000000000 Binary files a/docs/docs/img/tutorial/workflowLoaded.png and /dev/null differ diff --git a/docs/docs/img/tutorial/workflowRunIdCopy.png b/docs/docs/img/tutorial/workflowRunIdCopy.png deleted file mode 100644 index c8de99e53..000000000 Binary files a/docs/docs/img/tutorial/workflowRunIdCopy.png and /dev/null differ diff --git a/docs/docs/img/tutorial/workflow_debugging.png b/docs/docs/img/tutorial/workflow_debugging.png deleted file mode 100644 index ed64f648f..000000000 Binary files a/docs/docs/img/tutorial/workflow_debugging.png and /dev/null differ diff --git a/docs/docs/img/tutorial/workflow_execution_view.png b/docs/docs/img/tutorial/workflow_execution_view.png deleted file mode 100644 index 871b8c0ce..000000000 Binary files a/docs/docs/img/tutorial/workflow_execution_view.png and /dev/null differ diff --git a/docs/docs/img/tutorial/workflow_task_fail.png b/docs/docs/img/tutorial/workflow_task_fail.png deleted file mode 100644 index e7c8211db..000000000 Binary files a/docs/docs/img/tutorial/workflow_task_fail.png and /dev/null differ diff --git a/docs/docs/img/workflow.svg b/docs/docs/img/workflow.svg deleted file mode 100644 index e00e8928d..000000000 --- a/docs/docs/img/workflow.svg +++ /dev/null @@ -1,615 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/docs/img/workflow_fork.png b/docs/docs/img/workflow_fork.png deleted file mode 100644 index 293b34ece..000000000 Binary files a/docs/docs/img/workflow_fork.png and /dev/null differ diff --git a/docs/docs/index.md b/docs/docs/index.md deleted file mode 100644 index d7cf5081d..000000000 --- a/docs/docs/index.md +++ /dev/null @@ -1,153 +0,0 @@ - - - -
    -
    -
    -
    - Open Source -
    -
    - Apache-2.0 license for commercial and non-commerical use. Freedom to deploy, modify and contribute back. -
    -
    -
    -
    - Modular -
    -
    - A fully abstracted backend enables you choose your own database persistance layer and queueing service. -
    -
    -
    -
    - Proven -
    -
    - Enterprise ready, Java Spring based platform that has been battle tested in production systems at Netflix and elsewhere. -
    -
    -
    - - -
    -
    -
    - Control -
    -
    - Powerful flow control constructs including Decisions, Dynamic Fork-Joins and Subworkflows. Variables and templates are supported. -
    -
    -
    -
    - Polyglot -
    -
    - Client libraries in multiple languages allows workers to be implemented in Java, Node JS, Python and C#. -
    -
    -
    -
    - Scalable -
    -
    - Distributed architecture for both orchestrator and workers scalable from a single workflow to millions of concurrent processes. -
    -
    -
    -
    - -
    -
    -
    -
    - Developer Experience -
    -
    -
      -
    • Discover and visualize the process flows from the bundled UI
    • -
    • Integrated interface to create, refine and validate workflows
    • -
    • JSON based workflow definition DSL
    • -
    • Full featured API for custom automation
    • - -
    -
    -
    -
    -
    -
    -
    - -
    -
    -
    -
    - Observability -
    -
    -
      -
    • Understand, debug and iterate on task and workflow executions.
    • -
    • Fine grain operational control over workflows with the ability to pause, resume, restart, retry and terminate
    • -
    -
    -
    -
    -
    -
    -
    -
    - - -
    -
    -
    -
    -

    Why Conductor?

    -
    -
    -
    -
    -
    -

    - Service Orchestration -

    -
    -

    Workflow definitions are decoupled from task implementations. This allows the creation of process flows in which each individual task can be implemented - by an encapsulated microservice.

    -

    Desiging a workflow orchestrator that is resilient and horizontally scalable is not a simple problem. At Netflix we have developed a solution in Conductor.

    -
    -
    -
    -
    -
    -

    - Service Choreography -

    -
    - Process flows are implicitly defined across multiple service implementations, often with - tight peer-to-peer coupling between services. Multiple event buses and complex - pub/sub models limit observability around process progress and capacity. -
    -
    -
    -
    -
    -
    diff --git a/docs/docs/labs/beginner.md b/docs/docs/labs/beginner.md deleted file mode 100644 index 3953cd525..000000000 --- a/docs/docs/labs/beginner.md +++ /dev/null @@ -1,439 +0,0 @@ -# Beginner Lab -## Hands on mode -Please feel free to follow along using any of these resources: - -- Using cURL -- Postman or similar REST client - -## Creating a Workflow - -Let's create a simple workflow that adds Netflix Idents to videos. We'll be mocking the adding Idents part and focusing on actually executing this process flow. - -!!!info "What are Netflix Idents?" - Netflix Idents are those 4 second videos with Netflix logo, which appears at the beginning and end of shows. You might have also noticed they're different for Animation and several other genres. - -!!!warning "Disclaimer" - Obviously, this is not how Netflix adds Idents. Those Workflows are indeed very complex. But, it should give you an idea about how Conductor can be used to implement similar features. - -The workflow in this lab will look like this: - -![img](img/bgnr_complete_workflow.png) - -This workflow contains the following: - -* Worker Task `verify_if_idents_are_added` to verify if Idents are already added. - -* [Switch Task](/reference-docs/switch-task.html) that takes output from the previous task, and decides whether to schedule the `add_idents` task. - -* `add_idents` task which is another worker Task. - -### Creating Task definitions - - -Let's create the [task definition](/configuration/taskdef.html) for `verify_if_idents_are_added` in JSON. This task will be a *SIMPLE* task which is supposed to be executed by an Idents microservice. We'll be mocking the Idents microservice part. - - - -**Note** that at this point, we don't have to specify whether it is a System task or Worker task. We are only specifying the required configurations for the task, like number of times it should be retried, timeouts etc. We shall start by using `name` parameter for task name. -```json -{ - "name": "verify_if_idents_are_added" -} -``` - -We'd like this task to be retried 3 times on failure. - -```json -{ - "name": "verify_if_idents_are_added", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10 -} -``` - -And to timeout after 300 seconds. -i.e. if the task doesn't finish execution within this time limit after transitioning to `IN_PROGRESS` state, the Conductor server cancels this task and schedules a new execution of this task in the queue. - -```json -{ - "name": "verify_if_idents_are_added", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF" -} -``` - -And a [responseTimeout](/architecture/tasklifecycle.html#response-timeout-seconds) of 180 seconds. - -```json -{ - "name": "verify_if_idents_are_added", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF", - "responseTimeoutSeconds": 180 -} -``` - - -We can define several other fields defined [here](/configuration/taskdef.html), but this is a good place to start with. - - -Similarly, create another task definition: `add_idents`. - -```json -{ - "name": "add_idents", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF", - "responseTimeoutSeconds": 180 -} -``` - -Send a `POST` request to `/metadata/taskdefs` endpoint to register these tasks. You can use Swagger, Postman, CURL or similar tools. - -!!!info "Why is the Switch Task not registered?" - System Tasks that are part of control flow do not need to be registered. However, some system tasks where the retries, rate limiting and other mechanisms are required, like `HTTP` Task, are to be registered though. - -!!! Important - Task and Workflow Definition names are unique. The names we use below might have already been registered. For this lab, add a prefix with your username, `{my_username}_verify_if_idents_are_added` for example. This is definitely not recommended for Production usage though. - - -**Example** -``` -curl -X POST \ - http://localhost:8080/api/metadata/taskdefs \ - -H 'Content-Type: application/json' \ - -d '[ - { - "name": "verify_if_idents_are_added", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF", - "responseTimeoutSeconds": 180, - "ownerEmail": "type your email here" - }, - { - "name": "add_idents", - "retryCount": 3, - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "timeoutSeconds": 300, - "timeoutPolicy": "TIME_OUT_WF", - "responseTimeoutSeconds": 180, - "ownerEmail": "type your email here" - } -]' -``` - -### Creating Workflow Definition - -Creating Workflow definition is almost similar. We shall use the Task definitions created above. Note that same Task definitions can be used in multiple workflows, or for multiple times in same Workflow (that's where `taskReferenceName` is useful). - -A workflow without any tasks looks like this: -```json -{ - "name": "add_netflix_identation", - "description": "Adds Netflix Identation to video files.", - "version": 1, - "schemaVersion": 2, - "tasks": [] -} -``` - -Add the first task that this workflow has to execute. All the tasks must be added to the `tasks` array. - -```json -{ - "name": "add_netflix_identation", - "description": "Adds Netflix Identation to video files.", - "version": 1, - "schemaVersion": 2, - "tasks": [ - { - "name": "verify_if_idents_are_added", - "taskReferenceName": "ident_verification", - "inputParameters": { - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - } - ] -} -``` - -**Wiring Input/Outputs** - -Notice how we were using `${workflow.input.contentId}` to pass inputs to this task. Conductor can wire inputs between workflow and tasks, and between tasks. -i.e The task `verify_if_idents_are_added` is wired to accept inputs from the workflow input using JSONPath expression `${workflow.input.param}`. - - -Learn more about wiring inputs and outputs [here](/configuration/workflowdef.html#wiring-inputs-and-outputs). - -Let's define `decisionCases` now. - - ->Note: in earlier versions of this tutorial, the "decision" task was used. This has been deprecated. - -Checkout the Switch task structure [here](/reference-docs/switch-task.html). - -A Switch task is specified by the `evaulatorType`, `expression` (the expression that defines the Switch) and `decisionCases` which lists all the branches of Switch task. - -In this case, we'll use `"evaluatorType": "value-param"`, meaning that we'll just use the value inputted to make the decision. Alternatively, there is a `"evaluatorType": "JavaScript"` that can be used for more complicated evaluations. - -Adding the switch task (without any decision cases): -```json -{ - "name": "add_netflix_identation", - "description": "Adds Netflix Identation to video files.", - "version": 2, - "schemaVersion": 2, - "tasks": [ - { - "name": "verify_if_idents_are_added", - "taskReferenceName": "ident_verification", - "inputParameters": { - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - }, - { - "name": "switch_task", - "taskReferenceName": "is_idents_added", - "inputParameters": { - "case_value_param": "${ident_verification.output.is_idents_added}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case_value_param", - "decisionCases": { - - } - } - ] -} -``` - -Each switch task can have multiple tasks, so it has to be defined as an array. -```json -{ - "name": "add_netflix_identation", - "description": "Adds Netflix Identation to video files.", - "version": 2, - "schemaVersion": 2, - "tasks": [ - { - "name": "verify_if_idents_are_added", - "taskReferenceName": "ident_verification", - "inputParameters": { - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - }, - { - "name": "switch_task", - "taskReferenceName": "is_idents_added", - "inputParameters": { - "case_value_param": "${ident_verification.output.is_idents_added}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case_value_param", - "decisionCases": { - "false": [ - { - "name": "add_idents", - "taskReferenceName": "add_idents_by_type", - "inputParameters": { - "identType": "${workflow.input.identType}", - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - } - ] - } - } - ] -} -``` - -Just like the task definitions, register this workflow definition by sending a POST request to `/workflow` endpoint. - -**Example** -``` -curl -X POST \ - http://localhost:8080/api/metadata/workflow \ - -H 'Content-Type: application/json' \ - -d '{ - "name": "add_netflix_identation", - "description": "Adds Netflix Identation to video files.", - "version": 2, - "schemaVersion": 2, - "tasks": [ - { - "name": "verify_if_idents_are_added", - "taskReferenceName": "ident_verification", - "inputParameters": { - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - }, - { - "name": "switch_task", - "taskReferenceName": "is_idents_added", - "inputParameters": { - "case_value_param": "${ident_verification.output.is_idents_added}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case_value_param", - "decisionCases": { - "false": [ - { - "name": "add_idents", - "taskReferenceName": "add_idents_by_type", - "inputParameters": { - "identType": "${workflow.input.identType}", - "contentId": "${workflow.input.contentId}" - }, - "type": "SIMPLE" - } - ] - } - } - ] -}' -``` - -### Starting the Workflow - -Send a `POST` request to `/workflow` with: -```json -{ - "name": "add_netflix_identation", - "version": 2, - "correlationId": "my_netflix_identation_workflows", - "input": { - "identType": "animation", - "contentId": "my_unique_content_id" - } -} -``` - -Example: -``` -curl -X POST \ - http://localhost:8080/api/workflow/add_netflix_identation \ - -H 'Content-Type: application/json' \ - -d '{ - "identType": "animation", - "contentId": "my_unique_content_id" -}' -``` - -Successful POST request should return a workflow Id, which you can use to find the execution in the UI. - -### Conductor User Interface - -Open the UI and navigate to the RUNNING tab, the Workflow should be in the state as below: - -![img](img/bgnr_state_scheduled.png) - -Feel free to explore the various functionalities that the UI exposes. To elaborate on a few: - -* Workflow Task modals (Opens on clicking any of the tasks in the workflow), which includes task I/O, logs and task JSON. -* Task Details tab, which shows the sequence of task execution, status, start/end time, and link to worker details which executed the task. -* Input/Output tab shows workflow input and output. - - -### Poll for Worker task - -Now that `verify_if_idents_are_added` task is in `SCHEDULED` state, it is the worker's turn to fetch the task, execute it and update Conductor with final status of the task. - -Ideally, the workers implementing the [Client](/gettingstarted/client.html#worker) interface would do this process, executing the tasks on real microservices. But, let's mock this part. - -Send a `GET` request to `/poll` endpoint with your task type. - -For example: - -``` -curl -X GET \ - http://localhost:8080/api/tasks/poll/verify_if_idents_are_added -``` - - -### Return response, add logs - -We can respond to Conductor with any of the following states: - -* Task has COMPLETED. -* Task has FAILED. -* Call back after seconds [Process the task at a later time]. - -Considering our Ident Service has verified that the Ident's are not yet added to given Content Id, let's return the task status by sending the below `POST` request to `/tasks` endpoint, with payload: - -```json -{ - "workflowInstanceId": "{workflowId}", - "taskId": "{taskId}", - "reasonForIncompletion": "", - "callbackAfterSeconds": 0, - "workerId": "localhost", - "status": "COMPLETED", - "outputData": { - "is_idents_added": false - } -} -``` - -Example: - -``` -curl -X POST \ - http://localhost:8080/api/tasks \ - -H 'Content-Type: application/json' \ - -d '{ - "workflowInstanceId": "cb7c5041-aa85-4940-acb4-3bdcfa9f5c5c", - "taskId": "741f362b-ee9a-47b6-81b5-9bbbd5c4c992", - "reasonForIncompletion": "", - "callbackAfterSeconds": 0, - "workerId": "string", - "status": "COMPLETED", - "outputData": { - "is_idents_added": false - }, - "logs": [ - { - "log": "Ident verification successful for title: {some_title_name}, with Id: {some_id}", - "createdTime": 1550178825 - } - ] - }' -``` - -!!! Info "Check logs in UI" - You can find the logs we just sent by clicking the `verify_if_idents_are_added`, upon which a modal should open with `Logs` tab. - -### Why is System task executed, but Worker task is Scheduled. - -You will notice that Workflow is in the state as below after sending the POST request: - -![img](img/bgnr_systask_state.png) - -Conductor has executed `is_idents_added` all through it's lifecycle, without us polling, or returning the status of Task. If it is still unclear, `is_idents_added` is a System task, and System tasks are executed by Conductor Server. - -But, `add_idents` is a SIMPLE task. So, the complete lifecyle of this task (Poll, Update) should be handled by a worker to continue with W\workflow execution. When Conductor has finished executing all the tasks in given flow, the workflow will reach Terminal state (COMPLETED, FAILED, TIMED_OUT etc.) - -## Next steps - -You can play around this workflow by failing one of the Tasks, restarting or retrying the Workflow, or by tuning the number of retries, timeoutSeconds etc. diff --git a/docs/docs/labs/eventhandlers.md b/docs/docs/labs/eventhandlers.md deleted file mode 100644 index 304795af4..000000000 --- a/docs/docs/labs/eventhandlers.md +++ /dev/null @@ -1,179 +0,0 @@ -# Events and Event Handlers -## About - -In this Lab, we shall: - -* Publish an Event to Conductor using `Event` task. -* Subscribe to Events, and perform actions: - * Start a Workflow - * Complete Task - -Conductor Supports Eventing with two Interfaces: - -* [Event Task](/configuration/systask.html#event) -* [Event Handlers](/configuration/eventhandlers.html#event-handler) - -We shall create a simple cyclic workflow similar to this: - -![img](img/EventHandlerCycle.png) - -## Create Workflow Definitions - -Let's create two workflows: - -* `test_workflow_for_eventHandler` which will have an `Event` task to start another workflow, and a `WAIT` System task that will be completed by an event. -* `test_workflow_startedBy_eventHandler` which will have an `Event` task to generate an event to complete `WAIT` task in the above workflow. - -Send `POST` requests to `/metadata/workflow` endpoint with below payloads: - -```json -{ - "name": "test_workflow_for_eventHandler", - "description": "A test workflow to start another workflow with EventHandler", - "version": 1, - "tasks": [ - { - "name": "test_start_workflow_event", - "taskReferenceName": "start_workflow_with_event", - "type": "EVENT", - "sink": "conductor" - }, - { - "name": "test_task_tobe_completed_by_eventHandler", - "taskReferenceName": "test_task_tobe_completed_by_eventHandler", - "type": "WAIT" - } - ] -} -``` - -```json -{ - "name": "test_workflow_startedBy_eventHandler", - "description": "A test workflow which is started by EventHandler, and then goes on to complete task in another workflow.", - "version": 1, - "tasks": [ - { - "name": "test_complete_task_event", - "taskReferenceName": "complete_task_with_event", - "inputParameters": { - "sourceWorkflowId": "${workflow.input.sourceWorkflowId}" - }, - "type": "EVENT", - "sink": "conductor" - } - ] -} -``` - -### Event Tasks in Workflow - -`EVENT` task is a System task, and we shall define it just like other Tasks in Workflow, with `sink` parameter. Also, `EVENT` task doesn't have to be registered before using in Workflow. This is also true for the `WAIT` task. -Hence, we will not be registering any tasks for these workflows. - -## Events are sent, but they're not handled (yet) - -Once you try to start `test_workflow_for_eventHandler` workflow, you would notice that the event is sent successfully, but the second worflow `test_workflow_startedBy_eventHandler` is not started. We have sent the Events, but we also need to define `Event Handlers` for Conductor to take any `actions` based on the Event. Let's create `Event Handlers`. - -## Create Event Handlers - -Event Handler definitions are pretty much like Task or Workflow definitions. We start by name: - -```json -{ - "name": "test_start_workflow" -} -``` - -Event Handler should know the Queue it has to listen to. This should be defined in `event` parameter. - -When using Conductor queues, define `event` with format: - -```conductor:{workflow_name}:{taskReferenceName}``` - -And when using SQS, define with format: - -```sqs:{my_sqs_queue_name}``` - -```json -{ - "name": "test_start_workflow", - "event": "conductor:test_workflow_for_eventHandler:start_workflow_with_event" -} -``` - -Event Handler can perform a list of actions defined in `actions` array parameter, for this particular `event` queue. - -```json -{ - "name": "test_start_workflow", - "event": "conductor:test_workflow_for_eventHandler:start_workflow_with_event", - "actions": [ - "" - ], - "active": true -} -``` - -Let's define `start_workflow` action. We shall pass the name of workflow we would like to start. The `start_workflow` parameter can use any of the values from the general [Start Workflow Request](/gettingstarted/startworkflow.html). Here we are passing in the workflowId, so that the Complete Task Event Handler can use it. - -```json -{ - "action": "start_workflow", - "start_workflow": { - "name": "test_workflow_startedBy_eventHandler", - "input": { - "sourceWorkflowId": "${workflowInstanceId}" - } - } -} -``` - -Send a `POST` request to `/event` endpoint: - -```json -{ - "name": "test_start_workflow", - "event": "conductor:test_workflow_for_eventHandler:start_workflow_with_event", - "actions": [ - { - "action": "start_workflow", - "start_workflow": { - "name": "test_workflow_startedBy_eventHandler", - "input": { - "sourceWorkflowId": "${workflowInstanceId}" - } - } - } - ], - "active": true -} -``` - -Similarly, create another Event Handler to complete task. - -```json -{ - "name": "test_complete_task_event", - "event": "conductor:test_workflow_startedBy_eventHandler:complete_task_with_event", - "actions": [ - { - "action": "complete_task", - "complete_task": { - "workflowId": "${sourceWorkflowId}", - "taskRefName": "test_task_tobe_completed_by_eventHandler" - } - } - ], - "active": true -} -``` - -## Final flow of Workflow - -After wiring all of the above, starting the `test_workflow_for_eventHandler` should: - -1. Start `test_workflow_startedBy_eventHandler` workflow. -2. Sets `test_task_tobe_completed_by_eventHandler` WAIT task `IN_PROGRESS`. -3. `test_workflow_startedBy_eventHandler` event task would publish an Event to complete the WAIT task above. -4. Both the workflows would move to `COMPLETED` state. diff --git a/docs/docs/labs/img/EventHandlerCycle.png b/docs/docs/labs/img/EventHandlerCycle.png deleted file mode 100644 index 49f77aa47..000000000 Binary files a/docs/docs/labs/img/EventHandlerCycle.png and /dev/null differ diff --git a/docs/docs/labs/img/bgnr_complete_workflow.png b/docs/docs/labs/img/bgnr_complete_workflow.png deleted file mode 100644 index 0cf16491b..000000000 Binary files a/docs/docs/labs/img/bgnr_complete_workflow.png and /dev/null differ diff --git a/docs/docs/labs/img/bgnr_state_scheduled.png b/docs/docs/labs/img/bgnr_state_scheduled.png deleted file mode 100644 index 4559b7716..000000000 Binary files a/docs/docs/labs/img/bgnr_state_scheduled.png and /dev/null differ diff --git a/docs/docs/labs/img/bgnr_systask_state.png b/docs/docs/labs/img/bgnr_systask_state.png deleted file mode 100644 index 977cc56de..000000000 Binary files a/docs/docs/labs/img/bgnr_systask_state.png and /dev/null differ diff --git a/docs/docs/labs/kitchensink.md b/docs/docs/labs/kitchensink.md deleted file mode 100644 index d3f7f403f..000000000 --- a/docs/docs/labs/kitchensink.md +++ /dev/null @@ -1,259 +0,0 @@ -# Kitchen Sink -An example kitchensink workflow that demonstrates the usage of all the schema constructs. - -### Definition - -```json -{ - "name": "kitchensink", - "description": "kitchensink workflow", - "version": 1, - "tasks": [ - { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "event_task", - "taskReferenceName": "event_0", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "EVENT", - "sink": "conductor" - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${task_2.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_4", - "taskReferenceName": "task_4", - "inputParameters": { - "mod": "${task_2.output.mod}", - "oddEven": "${task_2.output.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${task_4.output.dynamicTasks}", - "input": "${task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input" - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN" - } - ], - "1": [ - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_10", - "taskReferenceName": "task_10", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - [ - { - "name": "task_11", - "taskReferenceName": "task_11", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "join2", - "type": "JOIN", - "joinOn": [ - "wf3", - "wf4" - ] - } - ] - } - }, - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_1", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "inputParameters": { - "statuses": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "type": "SIMPLE" - } - ], - "outputParameters": { - "statues": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "ownerEmail": "example@email.com", - "schemaVersion": 2 -} -``` -### Visual Flow -![img](/img/kitchensink.png) - -### Running Kitchensink Workflow -1. Start the server as documented [here](/gettingstarted/docker.html). Use ```-DloadSample=true``` java system property when launching the server. This will create a kitchensink workflow, related task definitions and kick off an instance of kitchensink workflow. -2. Once the workflow has started, the first task remains in the ```SCHEDULED``` state. This is because no workers are currently polling for the task. -3. We will use the REST endpoints directly to poll for tasks and updating the status. - -#### Start workflow execution -Start the execution of the kitchensink workflow by posting the following: - -```shell -curl -X POST --header 'Content-Type: application/json' --header 'Accept: text/plain' 'http://localhost:8080/api/workflow/kitchensink' -d ' -{ - "task2Name": "task_5" -} -' -``` -The response is a text string identifying the workflow instance id. - -#### Poll for the first task: - -```shell -curl http://localhost:8080/api/tasks/poll/task_1 -``` - -The response should look something like: - -```json -{ - "taskType": "task_1", - "status": "IN_PROGRESS", - "inputData": { - "mod": null, - "oddEven": null - }, - "referenceTaskName": "task_1", - "retryCount": 0, - "seq": 1, - "pollCount": 1, - "taskDefName": "task_1", - "scheduledTime": 1486580932471, - "startTime": 1486580933869, - "endTime": 0, - "updateTime": 1486580933902, - "startDelayInSeconds": 0, - "retried": false, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "b0d1a935-3d74-46fd-92b2-0ca1e388659f", - "taskId": "b9eea7dd-3fbd-46b9-a9ff-b00279459476", - "callbackAfterSeconds": 0, - "polledTime": 1486580933902, - "queueWaitTime": 1398 -} -``` -#### Update the task status -* Note the values for ```taskId``` and ```workflowInstanceId``` fields from the poll response -* Update the status of the task as ```COMPLETED``` as below: - -```json -curl -H 'Content-Type:application/json' -H 'Accept:application/json' -X POST http://localhost:8080/api/tasks/ -d ' -{ - "taskId": "b9eea7dd-3fbd-46b9-a9ff-b00279459476", - "workflowInstanceId": "b0d1a935-3d74-46fd-92b2-0ca1e388659f", - "status": "COMPLETED", - "outputData": { - "mod": 5, - "taskToExecute": "task_1", - "oddEven": 0, - "dynamicTasks": [ - { - "name": "task_1", - "taskReferenceName": "task_1_1", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_4", - "taskReferenceName": "wf_dyn", - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1" - } - } - ], - "inputs": { - "task_1_1": {}, - "wf_dyn": {} - } - } -}' -``` - -This will mark the task_1 as completed and schedule ```task_5``` as the next task. -Repeat the same process for the subsequently scheduled tasks until the completion. diff --git a/docs/docs/labs/running-first-workflow.md b/docs/docs/labs/running-first-workflow.md deleted file mode 100644 index 1abdc3787..000000000 --- a/docs/docs/labs/running-first-workflow.md +++ /dev/null @@ -1,159 +0,0 @@ -# A First Workflow - -In this article we will explore how we can run a really simple workflow that runs without deploying any new microservice. - -Conductor can orchestrate HTTP services out of the box without implementing any code. We will use that to create and run the first workflow. - -See [System Task](/configuration/systask.html) for the list of such built-in tasks. -Using system tasks is a great way to run a lot of our code in production. - -To bring up a local instance of Conductor follow one of the recommended steps: - -1. [Running Locally - From Code](/gettingstarted/local.html) -2. [Running Locally - Docker Compose](/gettingstarted/docker.html) - ---- - -## Configuring our First Workflow - -This is a sample workflow that we can leverage for our test. - -```json -{ - "name": "first_sample_workflow", - "description": "First Sample Workflow", - "version": 1, - "tasks": [ - { - "name": "get_population_data", - "taskReferenceName": "get_population_data", - "inputParameters": { - "http_request": { - "uri": "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - "method": "GET" - } - }, - "type": "HTTP" - } - ], - "inputParameters": [], - "outputParameters": { - "data": "${get_population_data.output.response.body.data}", - "source": "${get_population_data.output.response.body.source}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "ownerEmail": "example@email.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 -} -``` - -This is an example workflow that queries a publicly available JSON API to retrieve some data. This workflow doesn’t -require any worker implementation as the tasks in this workflow are managed by the system itself. This is an awesome -feature of Conductor. For a lot of typical work, we won’t have to write any code at all. - -Let's talk about this workflow a little more so that we can gain some context. - -```json -"name" : "first_sample_workflow" -``` - -This line here is how we name our workflow. In this case our workflow name is `first_sample_workflow` - -This workflow contains just one worker. The workers are defined under the key `tasks`. Here is the worker definition -with the most important values: - -```json -{ - "name": "get_population_data", - "taskReferenceName": "get_population_data", - "inputParameters": { - "http_request": { - "uri": "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - "method": "GET" - } - }, - "type": "HTTP" -} -``` - -Here is a list of fields and what it does: - -1. `"name"` : Name of our worker -2. `"taskReferenceName"` : This is a reference to this worker in this specific workflow implementation. We can have multiple - workers of the same name in our workflow, but we will need a unique task reference name for each of them. Task - reference name should be unique across our entire workflow. -3. `"inputParameters"` : These are the inputs into our worker. We can hard code inputs as we have done here. We can - also provide dynamic inputs such as from the workflow input or based on the output of another worker. We can find - examples of this in our documentation. -4. `"type"` : This is what defines what the type of worker is. In our example - this is `HTTP`. There are more task - types which we can find in the Conductor documentation. -5. `"http_request"` : This is an input that is required for tasks of type `HTTP`. In our example we have provided a well - known internet JSON API url and the type of HTTP method to invoke - `GET` - -We haven't talked about the other fields that we can use in our definitions as these are either just -metadata or more advanced concepts which we can learn more in the detailed documentation. - -Ok, now that we have walked through our workflow details, let's run this and see how it works. - -To configure the workflow, head over to the swagger API of conductor server and access the metadata workflow create API: - -[http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config#/metadata-resource/create](http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config#/metadata-resource/create) - -If the link doesn’t open the right Swagger section, we can navigate to Metadata-Resource -→ `POST /api/metadata/workflow` - -![Swagger UI - Metadata - Workflow](/img/tutorial/metadataWorkflowPost.png) - -Paste the workflow payload into the Swagger API and hit Execute. - -Now if we head over to the UI, we can see this workflow definition created: - -![Conductor UI - Workflow Definition](/img/tutorial/uiWorkflowDefinition.png) - -If we click through we can see a visual representation of the workflow: - -![Conductor UI - Workflow Definition - Visual Flow](/img/tutorial/uiWorkflowDefinitionVisual.png) - -## 2. Running our First Workflow - -Let’s run this workflow. To do that we can use the swagger API under the workflow-resources - -[http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config#/workflow-resource/startWorkflow_1](http://localhost:8080/swagger-ui/index.html?configUrl=/api-docs/swagger-config#/workflow-resource/startWorkflow_1) - -![Swagger UI - Metadata - Workflow - Run](/img/tutorial/metadataWorkflowRun.png) - -Hit **Execute**! - -Conductor will return a workflow id. We will need to use this id to load this up on the UI. If our UI installation has -search enabled we wouldn't need to copy this. If we don't have search enabled (using Elasticsearch) copy it from the -Swagger UI. - -![Swagger UI - Metadata - Workflow - Run](/img/tutorial/workflowRunIdCopy.png) - -Ok, we should see this running and get completed soon. Let’s go to the UI to see what happened. - -To load the workflow directly, use this URL format: - -``` -http://localhost:5000/execution/ -``` - -Replace `` with our workflow id from the previous step. We should see a screen like below. Click on the -different tabs to see all inputs and outputs and task list etc. Explore away! - -![Conductor UI - Workflow Run](/img/tutorial/workflowLoaded.png) - -## Summary - -In this blog post — we learned how to run a sample workflow in our Conductor installation. Concepts we touched on: - -1. Workflow creation -2. System tasks such as HTTP -3. Running a workflow via API - -Thank you for reading, and we hope you found this helpful. Please feel free to reach out to us for any questions and we -are happy to help in any way we can. - diff --git a/docs/docs/metrics/client.md b/docs/docs/metrics/client.md deleted file mode 100644 index 1e4bb731b..000000000 --- a/docs/docs/metrics/client.md +++ /dev/null @@ -1,23 +0,0 @@ -# Client Metrics - -When using the Java client, the following metrics are published: - -| Name | Purpose | Tags | -| ------------- |:-------------| -----| -| task_execution_queue_full | Counter to record execution queue has saturated | taskType| -| task_poll_error | Client error when polling for a task queue | taskType, includeRetries, status | -| task_paused | Counter for number of times the task has been polled, when the worker has been paused | taskType | -| task_execute_error | Execution error | taskType| -| task_ack_failed | Task ack failed | taskType | -| task_ack_error | Task ack has encountered an exception | taskType | -| task_update_error | Task status cannot be updated back to server | taskType | -| task_poll_counter | Incremented each time polling is done | taskType | -| task_poll_time | Time to poll for a batch of tasks | taskType | -| task_execute_time | Time to execute a task | taskType | -| task_result_size | Records output payload size of a task | taskType | -| workflow_input_size | Records input payload size of a workflow | workflowType, workflowVersion | -| external_payload_used | Incremented each time external payload storage is used | name, operation, payloadType | - -Metrics on client side supplements the one collected from server in identifying the network as well as client side issues. - -[1]: https://github.com/Netflix/spectator diff --git a/docs/docs/metrics/server.md b/docs/docs/metrics/server.md deleted file mode 100644 index e22d08e2c..000000000 --- a/docs/docs/metrics/server.md +++ /dev/null @@ -1,240 +0,0 @@ -# Server Metrics - -Conductor uses [spectator](https://github.com/Netflix/spectator) to collect the metrics. - -- To enable conductor serve to publish metrics, add this [dependency](http://netflix.github.io/spectator/en/latest/registry/metrics3/) to your build.gradle. -- Conductor Server enables you to load additional modules dynamically, this feature can be controlled using this [configuration](https://github.com/Netflix/conductor/blob/master/server/README.md#additional-modules-optional). -- Create your own AbstractModule that overides configure function and registers the Spectator metrics registry. -- Initialize the Registry and add it to the global registry via ```((CompositeRegistry)Spectator.globalRegistry()).add(...)```. - -The following metrics are published by the server. You can use these metrics to configure alerts for your workflows and tasks. - -| Name | Purpose | Tags | -| ------------- |:-------------| -----| -| workflow_server_error | Rate at which server side error is happening | methodName| -| workflow_failure | Counter for failing workflows|workflowName, status| -| workflow_start_error | Counter for failing to start a workflow|workflowName| -| workflow_running | Counter for no. of running workflows | workflowName, version| -| workflow_execution | Timer for Workflow completion | workflowName, ownerApp | -| task_queue_wait | Time spent by a task in queue | taskType| -| task_execution | Time taken to execute a task | taskType, includeRetries, status | -| task_poll | Time taken to poll for a task | taskType| -| task_poll_count | Counter for number of times the task is being polled | taskType, domain | -| task_queue_depth | Pending tasks queue depth | taskType, ownerApp | -| task_rate_limited | Current number of tasks being rate limited | taskType | -| task_concurrent_execution_limited | Current number of tasks being limited by concurrent execution limit | taskType | -| task_timeout | Counter for timed out tasks | taskType | -| task_response_timeout | Counter for tasks timedout due to responseTimeout | taskType | -| task_update_conflict | Counter for task update conflicts. Eg: when the workflow is in terminal state | workflowName, taskType, taskStatus, workflowStatus | -| event_queue_messages_processed | Counter for number of messages fetched from an event queue | queueType, queueName | -| observable_queue_error | Counter for number of errors encountered when fetching messages from an event queue | queueType | -| event_queue_messages_handled | Counter for number of messages executed from an event queue | queueType, queueName | -| external_payload_storage_usage | Counter for number of times external payload storage was used | name, operation, payloadType | - -[1]: https://github.com/Netflix/spectator - -## Collecting metrics with Log4j - -One way of collecting metrics is to push them into the logging framework (log4j). -Log4j supports various appenders that can print metrics into a console/file or even send them to remote metrics collectors over e.g. syslog channel. - -Conductor provides optional modules that connect metrics registry with the logging framework. -To enable these modules, configure following additional modules property in config.properties: - - conductor.metrics-logger.enabled = true - conductor.metrics-logger.reportPeriodSeconds = 15 - -This will push all available metrics into log4j every 15 seconds. - -By default, the metrics will be handled as a regular log message (just printed to console with default log4j.properties). -In order to change that, you can use following log4j configuration that prints metrics into a dedicated file: - - log4j.rootLogger=INFO,console,file - - log4j.appender.console=org.apache.log4j.ConsoleAppender - log4j.appender.console.layout=org.apache.log4j.PatternLayout - log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - log4j.appender.file=org.apache.log4j.RollingFileAppender - log4j.appender.file.File=/app/logs/conductor.log - log4j.appender.file.MaxFileSize=10MB - log4j.appender.file.MaxBackupIndex=10 - log4j.appender.file.layout=org.apache.log4j.PatternLayout - log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - # Dedicated file appender for metrics - log4j.appender.fileMetrics=org.apache.log4j.RollingFileAppender - log4j.appender.fileMetrics.File=/app/logs/metrics.log - log4j.appender.fileMetrics.MaxFileSize=10MB - log4j.appender.fileMetrics.MaxBackupIndex=10 - log4j.appender.fileMetrics.layout=org.apache.log4j.PatternLayout - log4j.appender.fileMetrics.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - log4j.logger.ConductorMetrics=INFO,console,fileMetrics - log4j.additivity.ConductorMetrics=false - -This configuration is bundled with conductor-server in file: log4j-file-appender.properties and can be utilized by setting env var: - - LOG4J_PROP=log4j-file-appender.properties - -This variable is used by _startup.sh_ script. - -### Integration with logstash using a log file - -The metrics collected by log4j can be further processed and pushed into a central collector such as ElasticSearch. -One way of achieving this is to use: log4j file appender -> logstash -> ElasticSearch. - -Considering the above setup, you can deploy logstash to consume the contents of /app/logs/metrics.log file, process it and send further to elasticsearch. - -Following configuration needs to be used in logstash to achieve it: - -pipeline.yml: - - - pipeline.id: conductor_metrics - path.config: "/usr/share/logstash/pipeline/logstash_metrics.conf" - pipeline.workers: 2 - -logstash_metrics.conf - - input { - - file { - path => ["/conductor-server-logs/metrics.log"] - codec => multiline { - pattern => "^%{TIMESTAMP_ISO8601} " - negate => true - what => previous - } - } - } - - filter { - kv { - field_split => ", " - include_keys => [ "name", "type", "count", "value" ] - } - mutate { - convert => { - "count" => "integer" - "value" => "float" - } - } - } - - output { - elasticsearch { - hosts => ["elasticsearch:9200"] - } - } - -Note: In addition to forwarding the metrics into ElasticSearch, logstash will extract following fields from each metric: name, type, count, value and set proper types - -### Integration with fluentd using a syslog channel - -Another example of metrics collection uses: log4j syslog appender -> fluentd -> prometheus. - -In this case, a specific log4j properties file needs to be used so that metrics are pushed into a syslog channel: - -``` - log4j.rootLogger=INFO,console,file - - log4j.appender.console=org.apache.log4j.ConsoleAppender - log4j.appender.console.layout=org.apache.log4j.PatternLayout - log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - log4j.appender.file=org.apache.log4j.RollingFileAppender - log4j.appender.file.File=/app/logs/conductor.log - log4j.appender.file.MaxFileSize=10MB - log4j.appender.file.MaxBackupIndex=10 - log4j.appender.file.layout=org.apache.log4j.PatternLayout - log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - # Syslog based appender streaming metrics into fluentd - log4j.appender.server=org.apache.log4j.net.SyslogAppender - log4j.appender.server.syslogHost=fluentd:5170 - log4j.appender.server.facility=LOCAL1 - log4j.appender.server.layout=org.apache.log4j.PatternLayout - log4j.appender.server.layout.ConversionPattern=%d{ISO8601} %5p [%t] (%C) - %m%n - - log4j.logger.ConductorMetrics=INFO,console,server - log4j.additivity.ConductorMetrics=false -``` - -And on the fluentd side you need following configuration: - -``` - - @type prometheus - - - - @type syslog - port 5170 - bind 0.0.0.0 - tag conductor - - ; only allow TIMER metrics of workflow execution and extract tenant ID - @type regexp - expression /^.*type=TIMER, name=workflow_execution.class-WorkflowMonitor.+workflowName-(?.*)_(?.+), count=(?\d+), min=(?[\d.]+), max=(?[\d.]+), mean=(?[\d.]+).*$/ - types count:integer,min:float,max:float,mean:float - - - - - @type prometheus - - name conductor_workflow_count - type gauge - desc The total number of executed workflows - key count - - workflow ${workflow} - tenant ${tenant} - user ${email} - - - - name conductor_workflow_max_duration - type gauge - desc Max duration in millis for a workflow - key max - - workflow ${workflow} - tenant ${tenant} - user ${email} - - - - name conductor_workflow_mean_duration - type gauge - desc Mean duration in millis for a workflow - key mean - - workflow ${workflow} - tenant ${tenant} - user ${email} - - - - - - @type stdout - -``` - -With above configuration, fluentd will: -- Listen to raw metrics on 0.0.0.0:5170 -- Collect only workflow_execution TIMER metrics -- Process the raw metrics and expose 3 prometheus specific metrics -- Expose prometheus metrics on http://fluentd:24231/metrics - -## Collecting metrics with Prometheus -Another way to collect metrics is using Prometheus client to push them to Prometheus server. - -Conductor provides optional modules that connect metrics registry with Prometheus. -To enable these modules, configure following additional module property in config.properties: - - conductor.metrics-prometheus.enabled = true - -This will simply push these metrics via Prometheus collector. -However, you need to configure your own Prometheus collector and expose the metrics via an endpoint. diff --git a/docs/docs/reference-docs/annotation-processor.md b/docs/docs/reference-docs/annotation-processor.md deleted file mode 100644 index 7ebfe84cf..000000000 --- a/docs/docs/reference-docs/annotation-processor.md +++ /dev/null @@ -1,33 +0,0 @@ -# Annotation Processor - -- Original Author: Vicent Martí - https://github.com/vmg -- Original Repo: https://github.com/vmg/protogen - -This module is strictly for code generation tasks during builds based on annotations. -Currently supports `protogen` - -### Usage - -See example below - -### Example - -This is an actual example of this module which is implemented in common/build.gradle - -```groovy -task protogen(dependsOn: jar, type: JavaExec) { - classpath configurations.annotationsProcessorCodegen - main = 'com.netflix.conductor.annotationsprocessor.protogen.ProtoGenTask' - args( - "conductor.proto", - "com.netflix.conductor.proto", - "github.com/netflix/conductor/client/gogrpc/conductor/model", - "${rootDir}/grpc/src/main/proto", - "${rootDir}/grpc/src/main/java/com/netflix/conductor/grpc", - "com.netflix.conductor.grpc", - jar.archivePath, - "com.netflix.conductor.common", - ) -} -``` - diff --git a/docs/docs/reference-docs/archival-of-workflows.md b/docs/docs/reference-docs/archival-of-workflows.md deleted file mode 100644 index 064d48e81..000000000 --- a/docs/docs/reference-docs/archival-of-workflows.md +++ /dev/null @@ -1,13 +0,0 @@ -# Archival Of Workflows - -Conductor has support for archiving workflow upon termination or completion. Enabling this will delete the workflow from the configured database, but leave the associated data in Elasticsearch so it is still searchable. - -To enable, set the `conductor.workflow-status-listener.type` property to `archive`. - -A number of additional properties are available to control archival. - -| Property | Default Value | Description | -| -- | -- | -- | -| conductor.workflow-status-listener.archival.ttlDuration | 0s | The time to live in seconds for workflow archiving module. Currently, only RedisExecutionDAO supports this | -| conductor.workflow-status-listener.archival.delayQueueWorkerThreadCount | 5 | The number of threads to process the delay queue in workflow archival | -| conductor.workflow-status-listener.archival.delaySeconds | 60 | The time to delay the archival of workflow | diff --git a/docs/docs/reference-docs/azureblob-storage.md b/docs/docs/reference-docs/azureblob-storage.md deleted file mode 100644 index 47a370a0a..000000000 --- a/docs/docs/reference-docs/azureblob-storage.md +++ /dev/null @@ -1,44 +0,0 @@ -# Azure Blob Storage - -The [AzureBlob storage](https://github.com/Netflix/conductor/tree/main/azureblob-storage) module uses azure blob to store and retrieve workflows/tasks input/output payload that -went over the thresholds defined in properties named `conductor.[workflow|task].[input|output].payload.threshold.kb`. - -**Warning** Azure Java SDK use libs already present inside `conductor` like `jackson` and `netty`. -You may encounter deprecated issues, or conflicts and need to adapt the code if the module is not maintained along with `conductor`. -It has only been tested with **v12.2.0**. - -## Configuration - -### Usage - -Cf. Documentation [External Payload Storage](https://netflix.github.io/conductor/externalpayloadstorage/#azure-blob-storage) - -### Example - -```properties -conductor.additional.modules=com.netflix.conductor.azureblob.AzureBlobModule -es.set.netty.runtime.available.processors=false - -workflow.external.payload.storage=AZURE_BLOB -workflow.external.payload.storage.azure_blob.connection_string=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;EndpointSuffix=localhost -workflow.external.payload.storage.azure_blob.signedurlexpirationseconds=360 -``` - -## Testing - -You can use [Azurite](https://github.com/Azure/Azurite) to simulate an Azure Storage. - -### Troubleshoots - -* When using **es5 persistance** you will receive an `java.lang.IllegalStateException` because the Netty lib will call `setAvailableProcessors` two times. To resolve this issue you need to set the following system property - -``` -es.set.netty.runtime.available.processors=false -``` - -If you want to change the default HTTP client of azure sdk, you can use `okhttp` instead of `netty`. -For that you need to add the following [dependency](https://github.com/Azure/azure-sdk-for-java/tree/master/sdk/storage/azure-storage-blob#default-http-client). - -``` -com.azure:azure-core-http-okhttp:${compatible version} -``` diff --git a/docs/docs/reference-docs/directed-acyclic-graph.md b/docs/docs/reference-docs/directed-acyclic-graph.md deleted file mode 100644 index 5c707eccf..000000000 --- a/docs/docs/reference-docs/directed-acyclic-graph.md +++ /dev/null @@ -1,54 +0,0 @@ -# Directed Acyclic Graph (DAG) -## What is a Directed Acyclic Graph (DAG)? -Conductor workflows are directed acyclic graphs (DAGs). But, what exactly is a DAG? - -To understand a DAG, we'll walk through each term (but not in order): - -### Graph - -A graph is "a collection of vertices (or point) and edges (or lines) that indicate connections between the vertices." - -By this definition, this is a graph - just not exactly correct in the context of DAGs: - -

    pirate vs global warming graph

    - -But in the context of workflows, we're thinking of a graph more like this: - -

    a regular graph (source: wikipedia)

    - -Imagine each vertex as a microservice, and the lines are how the microservices are connected together. However, this graph is not a directed graph - as there is no direction given to each connection. - -### Directed - -A directed graph means that there is a direction to each connection. For example, this graph is directed: - -

    directed graph

    - -Each arrow has a direction, Point "N" can proceed directly to "B", but "B" cannot proceed to "N" in the opposite direction. - -### Acyclic - -Acyclic means without circular or cyclic paths. In the directed example above, A -> B -> D -> A is a cyclic loop. - -So a Directed Acyclic Graph is a set of vertices where the connections are directed without any looping. DAG charts can only "move forward" and cannot redo a step (or series of steps.) - -Since a Conductor workflow is a series of vertices that can connect in only a specific direction and cannot loop, a Conductor workflow is thus a directed acyclic graph: - -

    Conductor Dag

    - -### Can a workflow have loops and still be a DAG? - -Yes. For example, Conductor workflows have Do-While loops: - -

    Conductor Dag

    - -This is still a DAG, because the loop is just shorthand for running the tasks inside the loop over and over again. For example, if the 2nd loop in the above image is run 3 times, the workflow path will be: - -1. zero_offset_fix_1 -2. post_to_orbit_ref_1 -3. zero_offset_fix_2 -4. post_to_orbit_ref_2 -5. zero_offset_fix_3 -6. post_to_orbit_ref_3 - -The path is directed forward, and the loop just makes it easier to define the workflow. diff --git a/docs/docs/reference-docs/do-while-task.md b/docs/docs/reference-docs/do-while-task.md deleted file mode 100644 index af4372002..000000000 --- a/docs/docs/reference-docs/do-while-task.md +++ /dev/null @@ -1,196 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Do-While -```json -"type" : "DO_WHILE" -``` -## Introduction -Sequentially execute a list of task as long as a condition is true. -The list of tasks is executed first, before the condition is checked (even for the first iteration). - -When scheduled, each task of this loop will see its `taskReferenceName` concatenated with __i, with i being the iteration number, starting at 1. Warning: taskReferenceName containing arithmetic operators must not be used. - -Each task output is stored as part of the DO_WHILE task, indexed by the iteration value (see example below), allowing the condition to reference the output of a task for a specific iteration (eg. $.LoopTask['iteration]['first_task']) - -The DO_WHILE task is set to `FAILED` as soon as one of the loopOver fails. In such case retry, iteration starts from 1. - -### Limitations -- Domain or isolation group execution is unsupported; - Nested DO_WHILE is unsupported; -- Since loopover tasks will be executed in loop inside scope of parent do while task, crossing branching outside of DO_WHILE task is not respected. -- Nested DO_WHILE tasks are not supported. However, DO_WHILE task supports SUB_WORKFLOW as loopOver task, so we can achieve similar functionality. - -Branching inside loopOver task is supported. - - - -## Configuration - -### Input Parameters: - -| name | type | description | -|---------------|------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| loopCondition | String | Condition to be evaluated after every iteration. This is a Javascript expression, evaluated using the Nashorn engine. If an exception occurs during evaluation, the DO_WHILE task is set to FAILED_WITH_TERMINAL_ERROR. | -| loopOver | List[Task] | List of tasks that needs to be executed as long as the condition is true. | - -### Output Parameters - -| name | type | description | -|-----------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| iteration | Integer | Iteration number: the current one while executing; the final one once the loop is finished | -| `i` | Map[String, Any] | Iteration number as a string, mapped to the task references names and their output. | -| * | Any | Any state can be stored here if the `loopCondition` does so. For example `storage` will exist if `loopCondition` is `if ($.LoopTask['iteration'] <= 10) {$.LoopTask.storage = 3; true } else {false}` | - -## Examples - -The following definition: -```json -{ - "name": "Loop Task", - "taskReferenceName": "LoopTask", - "type": "DO_WHILE", - "inputParameters": { - "value": "${workflow.input.value}" - }, - "loopCondition": "if ( ($.LoopTask['iteration'] < $.value ) || ( $.first_task['response']['body'] > 10)) { false; } else { true; }", - "loopOver": [ - { - "name": "first task", - "taskReferenceName": "first_task", - "inputParameters": { - "http_request": { - "uri": "http://localhost:8082", - "method": "POST" - } - }, - "type": "HTTP" - },{ - "name": "second task", - "taskReferenceName": "second_task", - "inputParameters": { - "http_request": { - "uri": "http://localhost:8082", - "method": "POST" - } - }, - "type": "HTTP" - } - ], - "startDelay": 0, - "optional": false -} -``` - -will produce the following execution, assuming 3 executions occurred (alongside `first_task__1`, `first_task__2`, `first_task__3`, -`second_task__1`, `second_task__2` and `second_task__3`): - -```json -{ - "taskType": "DO_WHILE", - "outputData": { - "iteration": 3, - "1": { - "first_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - }, - "second_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - } - }, - "2": { - "first_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - }, - "second_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - } - }, - "3": { - "first_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - }, - "second_task": { - "response": {}, - "headers": { - "Content-Type": "application/json" - } - } - } - } -} -``` - -## Example using iteration key - -Sometimes, you may want to use the iteration value/counter in the tasks used in the loop. In this example, an API call is made to GitHub (to the Netflix Conductor repository), but each loop increases the pagination. - -The Loop ```taskReferenceName``` is "get_all_stars_loop_ref". - -In the ```loopCondition``` the term ```$.get_all_stars_loop_ref['iteration']``` is used. - -In tasks embedded in the loop, ```${get_all_stars_loop_ref.output.iteration}``` is used. In this case, it is used to define which page of results the API should return. - -```json -{ - "name": "get_all_stars", - "taskReferenceName": "get_all_stars_loop_ref", - "inputParameters": { - "stargazers": "4000" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.get_all_stars_loop_ref['iteration'] < Math.ceil($.stargazers/100)) { true; } else { false; }", - "loopOver": [ - { - "name": "100_stargazers", - "taskReferenceName": "hundred_stargazers_ref", - "inputParameters": { - "counter": "${get_all_stars_loop_ref.output.iteration}", - "http_request": { - "uri": "https://api.github.com/repos/ntflix/conductor/stargazers?page=${get_all_stars_loop_ref.output.iteration}&per_page=100", - "method": "GET", - "headers": { - "Authorization": "token ${workflow.input.gh_token}", - "Accept": "application/vnd.github.v3.star+json" - } - } - }, - "type": "HTTP", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [], - "retryCount": 3 - } - ] - } - -``` diff --git a/docs/docs/reference-docs/dynamic-fork-task.md b/docs/docs/reference-docs/dynamic-fork-task.md deleted file mode 100644 index e8f8721f6..000000000 --- a/docs/docs/reference-docs/dynamic-fork-task.md +++ /dev/null @@ -1,250 +0,0 @@ -# Dynamic Fork -```json -"type" : "FORK_JOIN_DYNAMIC" -``` - -## Introduction - -A Fork operation in conductor, lets you run a specified list of other tasks or sub workflows in parallel after the fork -task. A fork task is followed by a join operation that waits on the forked tasks or sub workflows to finish. The `JOIN` -task also collects outputs from each of the forked tasks or sub workflows. - -In a regular fork operation (`FORK_JOIN` task), the list of tasks or sub workflows that need to be forked and run in -parallel are already known at the time of workflow definition creation time. However, there are cases when that list can -only be determined at run-time and that is when the dynamic fork operation (FORK_JOIN_DYNAMIC task) is needed. - -There are three things that are needed to configure a `FORK_JOIN_DYNAMIC` task. - -1. A list of tasks or sub-workflows that needs to be forked and run in parallel. -2. A list of inputs to each of these forked tasks or sub-workflows -3. A task prior to the `FORK_JOIN_DYNAMIC` tasks outputs 1 and 2 above that can be wired in as in input to - the `FORK_JOIN_DYNAMIC` tasks - -## Use Cases - -A `FORK_JOIN_DYNAMIC` is useful, when a set of tasks or sub-workflows needs to be executed and the number of tasks or -sub-workflows are determined at run time. E.g. Let's say we have a task that resizes an image, and we need to create a -workflow that will resize an image into multiple sizes. In this case, a task can be created prior to -the `FORK_JOIN_DYNAMIC` task that will prepare the input that needs to be passed into the `FORK_JOIN_DYNAMIC` task. The -single image resize task does one job. The `FORK_JOIN_DYNAMIC` and the following `JOIN` will manage the multiple -invokes of the single image resize task. Here, the responsibilities are clearly broken out, where the single image resize -task does the core job and `FORK_JOIN_DYNAMIC` manages the orchestration and fault tolerance aspects. - -## Configuration - -Here is an example of a `FORK_JOIN_DYNAMIC` task followed by a `JOIN` task - -```json -{ - "inputParameters": { - "dynamicTasks": "${fooBarTask.output.dynamicTasksJSON}", - "dynamicTasksInput": "${fooBarTask.output.dynamicTasksInputJSON}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "dynamicTasksInput" -}, -{ -"name": "image_multiple_convert_resize_join", -"taskReferenceName": "image_multiple_convert_resize_join_ref", -"type": "JOIN" -} -``` - -Dissecting into this example above, let's look at the three things that are needed to configured for -the `FORK_JOIN_DYNAMIC` task - -`dynamicForkTasksParam` This is a JSON array of task or sub-workflow objects that specifies the list of tasks or -sub-workflows that needs to be forked and run in parallel `dynamicForkTasksInputParamName` This is a JSON map of task or -sub-workflow objects that specifies the list of tasks or sub-workflows that needs to be forked and run in parallel -fooBarTask This is a task that is defined prior to the FORK_JOIN_DYNAMIC in the workflow definition. This task will need -to output (outputParameters) 1 and 2 above so that it can be wired into inputParameters of the FORK_JOIN_DYNAMIC -tasks. (dynamicTasks and dynamicTasksInput) - -## Input Configuration - - -| Attribute | Description | -| ----------- | ----------- | -| name | Task Name. A unique name that is descriptive of the task function | -| taskReferenceName | Task Reference Name. A unique reference to this task. There can be multiple references of a task within the same workflow definition | -| type | Task Type. In this case, `FORK_JOIN_DYNAMIC` | -| inputParameters | The input parameters that will be supplied to this task. | -| dynamicForkTasksParam | This is a JSON array of tasks or sub-workflow objects that needs to be forked and run in parallel (Note: This has a different format for ```SUB_WORKFLOW``` compared to ```SIMPLE``` tasks.) | -| dynamicForkTasksInputParamName | A JSON map, where the keys are task or sub-workflow names, and the values are its corresponding inputParameters | - - -## Example - -Let's say we have a task that resizes an image, and we need to create a workflow that will resize an image into multiple sizes. In this case, a task can be created prior to -the `FORK_JOIN_DYNAMIC` task that will prepare the input that needs to be passed into the `FORK_JOIN_DYNAMIC` task. These will be: - -* ```dynamicForkTasksParam``` the JSON array of tasks/subworkflows to be run in parallel. Each JSON object will have: - * A unique ```taskReferenceName```. - * The name of the Task/Subworkflow to be called (note - the location of this key:value is different for a subworkflow). - * The type of the task (This is optional for SIMPLE tasks). -* ```dynamicForkTasksInputParamName``` a JSON map of input parameters for each task. The keys will be the unique ```taskReferenceName``` defined in the first JSON array, and the values will be the specific input parameters for the task/subworkflow. - -The ```image_resize``` task works to resize just one image. The `FORK_JOIN_DYNAMIC` and the following `JOIN` will manage the multiple invocations of the single ```image_resize``` task. The responsibilities are clearly broken out, where the individual ```image_resize``` -tasks do the core job and `FORK_JOIN_DYNAMIC` manages the orchestration and fault tolerance aspects of handling multiple invocations of the task. - -## The workflow - -Here is an example of a `FORK_JOIN_DYNAMIC` task followed by a `JOIN` task. The fork is named and given a taskReferenceName, but all of the input parameters are JSON variables that we will discuss next: - -```json -{ - "name": "image_multiple_convert_resize_fork", - "taskReferenceName": "image_multiple_convert_resize_fork_ref", - "inputParameters": { - "dynamicTasks": "${fooBarTask.output.dynamicTasksJSON}", - "dynamicTasksInput": "${fooBarTask.output.dynamicTasksInputJSON}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "dynamicTasksInput" -}, -{ -"name": "image_multiple_convert_resize_join", -"taskReferenceName": "image_multiple_convert_resize_join_ref", -"type": "JOIN" -} -``` - -This appears in the UI as follows: - -![diagram of dynamic fork](/img/dynamic-task-diagram.png) - -Let's assume this data is sent to the workflow: - -``` -{ - "fileLocation": "https://pbs.twimg.com/media/FJY7ud0XEAYVCS8?format=png&name=900x900", - "outputFormats": ["png","jpg"], - - "outputSizes": [ - {"width":300, - "height":300}, - {"width":200, - "height":200} - ], - "maintainAspectRatio": "true" -} -``` - -With 2 file formats and 2 sizes in the input, we'll be creating 4 images total. The first task will generate the tasks and the parameters for these tasks: - -* `dynamicForkTasksParam` This is a JSON array of task or sub-workflow objects that specifies the list of tasks or sub-workflows that needs to be forked and run in parallel. This JSON varies depeding oon the type of task. - - -### ```dynamicForkTasksParam``` Simple task -In this case, our fork is running a SIMPLE task: ```image_convert_resize```: - -``` -{ "dynamicTasks": [ - { - "name": :"image_convert_resize", - "taskReferenceName": "image_convert_resize_png_300x300_0", - ... - }, - { - "name": :"image_convert_resize", - "taskReferenceName": "image_convert_resize_png_200x200_1", - ... - }, - { - "name": :"image_convert_resize", - "taskReferenceName": "image_convert_resize_jpg_300x300_2", - ... - }, - { - "name": :"image_convert_resize", - "taskReferenceName": "image_convert_resize_jpg_200x200_3", - ... - } -]} -``` -### ```dynamicForkTasksParam``` SubWorkflow task -In this case, our Dynamic fork is running a SUB_WORKFLOW task: ```image_convert_resize_subworkflow``` - -``` -{ "dynamicTasks": [ - { - "subWorkflowParam" : { - "name": :"image_convert_resize_subworkflow", - "version": "1" - }, - "type" : "SUB_WORKFLOW", - "taskReferenceName": "image_convert_resize_subworkflow_png_300x300_0", - ... - }, - { - "subWorkflowParam" : { - "name": :"image_convert_resize_subworkflow", - "version": "1" - }, - "type" : "SUB_WORKFLOW", - "taskReferenceName": "image_convert_resize_subworkflow_png_200x200_1", - ... - }, - { - "subWorkflowParam" : { - "name": :"image_convert_resize_subworkflow", - "version": "1" - }, - "type" : "SUB_WORKFLOW", - "taskReferenceName": "image_convert_resize_subworkflow_jpg_300x300_2", - ... - }, - { - "subWorkflowParam" : { - "name": :"image_convert_resize_subworkflow", - "version": "1" - }, - "type" : "SUB_WORKFLOW", - "taskReferenceName": "image_convert_resize_subworkflow_jpg_200x200_3", - ... - } -]} -``` - - - -* `dynamicForkTasksInputParamName` This is a JSON map of task or -sub-workflow objects and all the input parameters that these tasks will need to run. - -``` -"dynamicTasksInput":{ -"image_convert_resize_jpg_300x300_2":{ -"outputWidth":300 -"outputHeight":300 -"fileLocation":"https://pbs.twimg.com/media/FJY7ud0XEAYVCS8?format=png&name=900x900" -"outputFormat":"jpg" -"maintainAspectRatio":true -} -"image_convert_resize_jpg_200x200_3":{ -"outputWidth":200 -"outputHeight":200 -"fileLocation":"https://pbs.twimg.com/media/FJY7ud0XEAYVCS8?format=png&name=900x900" -"outputFormat":"jpg" -"maintainAspectRatio":true -} -"image_convert_resize_png_200x200_1":{ -"outputWidth":200 -"outputHeight":200 -"fileLocation":"https://pbs.twimg.com/media/FJY7ud0XEAYVCS8?format=png&name=900x900" -"outputFormat":"png" -"maintainAspectRatio":true -} -"image_convert_resize_png_300x300_0":{ -"outputWidth":300 -"outputHeight":300 -"fileLocation":"https://pbs.twimg.com/media/FJY7ud0XEAYVCS8?format=png&name=900x900" -"outputFormat":"png" -"maintainAspectRatio":true -} -``` - -### The Join - -The [JOIN](/reference-docs/join-task.html) task will run after all of the dynamic tasks, collecting the output for all of the tasks. \ No newline at end of file diff --git a/docs/docs/reference-docs/dynamic-task.md b/docs/docs/reference-docs/dynamic-task.md deleted file mode 100644 index ffe26b817..000000000 --- a/docs/docs/reference-docs/dynamic-task.md +++ /dev/null @@ -1,160 +0,0 @@ -# Dynamic -```json -"type" : "DYNAMIC" -``` - -### Introduction -Dynamic Task allows to execute one of the registered Tasks dynamically at run-time. -It accepts the task name to execute as `taskToExecute` in `inputParameters`. - -### Use Cases - -Consider a scenario, when we have to make decision of executing a task dynamically i.e. while the workflow is still -running. In such cases, Dynamic Task would be useful. - -### Configuration - -Dynamic task is defined directly inside the workflow with type `DYNAMIC`. - -#### Inputs - -Following are the input parameters : - -| name | description | -|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| dynamicTaskNameParam | Name of the parameter from the task input whose value is used to schedule the task. e.g. if the value of the parameter is ABC, the next task scheduled is of type 'ABC'. | - -#### Output - -TODO: Talk about output of the task, what to expect - - -### Examples - -Suppose in a workflow, we have to take decision to ship the courier with the shipping -service providers on the basis of Post Code. - -Following task `shipping_info` generates an output on the basis of which decision would be -taken to run the next task. - -```json -{ - "name": "shipping_info", - "retryCount": 3, - "timeoutSeconds": 600, - "pollTimeoutSeconds": 1200, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail":"abc@example.com", - "rateLimitPerFrequency": 1 -} -``` - -Following are the two worker tasks, one among them would execute on the basis of output generated -by the `shipping_info` task : - -```json -{ - "name": "ship_via_fedex", - "retryCount": 3, - "timeoutSeconds": 600, - "pollTimeoutSeconds": 1200, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail":"abc@example.com", - "rateLimitPerFrequency": 2 -}, -{ - "name": "ship_via_ups", - "retryCount": 3, - "timeoutSeconds": 600, - "pollTimeoutSeconds": 1200, - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail":"abc@example.com", - "rateLimitPerFrequency": 2 -} -``` - - -We will create the Workflow with the following definition : - -```json -{ - "name": "Shipping_Flow", - "description": "Ships smartly on the basis of Shipping info", - "version": 1, - "tasks": [ - { - "name": "shipping_info", - "taskReferenceName": "shipping_info", - "inputParameters": { - }, - "type": "SIMPLE" - }, - { - "name": "shipping_task", - "taskReferenceName": "shipping_task", - "inputParameters": { - "taskToExecute": "${shipping_info.output.shipping_service}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - } - - ], - "restartable": true, - "ownerEmail":"abc@example.com", - "workflowStatusListenerEnabled": true, - "schemaVersion": 2 -} -``` - -Workflow is the created as shown in the below diagram. - - -![Conductor UI - Workflow Diagram](/img/tutorial/ShippingWorkflow.png) - - -Note : `shipping_task` is a `DYNAMIC` task and the `taskToExecute` parameter can be set -with input value provided while running the workflow or with the output of previous tasks. -Here, it is set to the output provided by the previous task i.e. -`${shipping_info.output.shipping_service}`. - -If the input value is provided while running the workflow it can be accessed by -`${workflow.input.shipping_service}`. - -```json -{ - "shipping_service": "ship_via_fedex" -} -``` - -We can see in the below example that on the basis of Post Code the shipping service is being -decided. - -Based on given set of inputs i.e. Post Code starts with '9' hence, `ship_via_fedex` is executed - - -![Conductor UI - Workflow Run](/img/tutorial/ShippingWorkflowRunning.png) - -If the Post Code started with anything other than 9 `ship_via_ups` is executed - - -![Conductor UI - Workflow Run](/img/tutorial/ShippingWorkflowUPS.png) - -If the incorrect task name or the task that doesn't exist is provided then the workflow fails and -we get the error `"Invalid task specified. Cannot find task by name in the task definitions."` - -If the null reference is provided in the task name then also the workflow fails and we get the -error `"Cannot map a dynamic task based on the parameter and input. Parameter= taskToExecute, input= {taskToExecute=null}"` diff --git a/docs/docs/reference-docs/event-task.md b/docs/docs/reference-docs/event-task.md deleted file mode 100644 index f6c28fbd6..000000000 --- a/docs/docs/reference-docs/event-task.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -sidebar_position: 4 ---- - -# Event Task - -```json -"type" : "EVENT" -``` - -### Introduction -EVENT is a task used to publish an event into one of the supported eventing systems in Conductor. -Conductor supports the the following eventing models: - -1. Conductor internal events (type: conductor) -2. SQS (type: sqs) - -### Use Cases -Consider a use case where at some point in the execution, an event is published to an external eventing system such as SQS. -Event tasks are useful for creating event based dependencies for workflows and tasks. - -Consider an example where we want to publish an event into SQS to notify an external system. - -```json -{ - "type": "EVENT", - "sink": "sqs:sqs_queue_name", - "asyncComplete": false -} -``` - -An example where we want to publish a messase to conductor's internal queuing system. -```json -{ - "type": "EVENT", - "sink": "conductor:internal_event_name", - "asyncComplete": false -} -``` - - -### Configuration - -#### Input Configuration - -| Attribute | Description | -|-------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | Task Name. A unique name that is descriptive of the task function | -| taskReferenceName | Task Reference Name. A unique reference to this task. There can be multiple references of a task within the same workflow definition | -| type | Task Type. In this case, `EVENT` | -| sink | External event queue in the format of `prefix:location`. Prefix is either `sqs` or `conductor` and `location` specifies the actual queue name. e.g. "sqs:send_email_queue" | -| asyncComplete | Boolean | - -#### asyncComplete -* ```false``` to mark status COMPLETED upon execution -* ```true``` to keep it IN_PROGRESS, wait for an external event (via Conductor or SQS or EventHandler) to complete it. - -#### Output Configuration -Tasks's output are sent as a payload to the external event. In case of SQS the task's output is sent to the SQS message a a payload. - - -| name | type | description | -|--------------------|---------|---------------------------------------| -| workflowInstanceId | String | Workflow id | -| workflowType | String | Workflow Name | -| workflowVersion | Integer | Workflow Version | -| correlationId | String | Workflow CorrelationId | -| sink | String | Copy of the input data "sink" | -| asyncComplete | Boolean | Copy of the input data "asyncComplete | -| event_produced | String | Name of the event produced | - -The published event's payload is identical to the output of the task (except "event_produced"). - - -When producing an event with Conductor as sink, the event name follows the structure: -```conductor::``` - -For SQS, use the **name** of the queue and NOT the URI. Conductor looks up the URI based on the name. - -!!!warning - When using SQS add the [ContribsModule](https://github.com/Netflix/conductor/blob/master/contribs/src/main/java/com/netflix/conductor/contribs/ContribsModule.java) to the deployment. The module needs to be configured with AWSCredentialsProvider for Conductor to be able to use AWS APIs. - - -!!!warning - When using Conductor as sink, you have two options: defining the sink as `conductor` in which case the queue name will default to the taskReferenceName of the Event Task, or specifying the queue name in the sink, as `conductor:`. The queue name is in the `event` value of the event Handler, as `conductor::`. - - -### Supported Queuing Systems -Conductor has support for the following external event queueing systems as part of the OSS build - -1. SQS (prefix: sqs) -2. [NATS](https://github.com/Netflix/conductor/tree/main/contribs/src/main/java/com/netflix/conductor/contribs/queue/nats) (prefix: nats) -3. [AMQP](https://github.com/Netflix/conductor/tree/main/contribs/src/main/java/com/netflix/conductor/contribs/queue/amqp) (prefix: amqp_queue or amqp_exchange) -4. Internal Conductor (prefix: conductor) -To add support for other diff --git a/docs/docs/reference-docs/fork-task.md b/docs/docs/reference-docs/fork-task.md deleted file mode 100644 index 5e87bfcca..000000000 --- a/docs/docs/reference-docs/fork-task.md +++ /dev/null @@ -1,140 +0,0 @@ -# Fork -```json -"type" : "FORK_JOIN" -``` - -## Introduction - -A Fork operation lets you run a specified list of tasks or sub workflows in parallel. A fork task is -followed by a join operation that waits on the forked tasks or sub workflows to finish. The `JOIN` -task also collects outputs from each of the forked tasks or sub workflows. - -## Use Cases - -`FORK_JOIN` tasks are typically used when a list of tasks can be run in parallel. E.g In a notification workflow, there -could be multiple ways of sending notifications, i,e e-mail, SMS, HTTP etc.. These notifications are not dependent on -each other, and so they can be run in parallel. In such cases, you can create 3 sub-lists of forked tasks for each of -these operations. - -## Configuration - -A `FORK_JOIN` task has a `forkTasks` attribute that expects an array. Each array is a sub-list of tasks. Each of these -sub-lists are then invoked in parallel. The tasks defined within each sublist can be sequential or any other way as -desired. - -A FORK_JOIN task has to be followed by a JOIN operation. The `JOIN` operator specifies which of the forked tasks -to `joinOn` (wait for completion) -before moving to the next stage in the workflow. - -#### Input Configuration - -| Attribute | Description | -|-------------------|-----------------------------------------------------------------------------------------------------------------------------------------------| -| name | Task Name. A unique name that is descriptive of the task function | -| taskReferenceName | Task Reference Name. A unique reference to this task. There can be multiple references of a task within the same workflow definition | -| type | Task Type. In this case, `FORK_JOIN` | -| inputParameters | The input parameters that will be supplied to this task | -| forkTasks | A list of a list of tasks. Each of the outer list will be invoked in parallel. The inner list can be a graph of other tasks and sub-workflows | - -#### Output Configuration - -This is the output configuration of the `JOIN` task that is used in conjunction with the `FORK_JOIN` task. The output of -the -`JOIN` task is a map, where the keys are the names of the task reference names where were being `joinOn` and the keys -are the corresponding outputs of those tasks. - -| Attribute | Description | -|-----------------|-------------------------------------------------------------------------------------| -| task_ref_name_1 | A task reference name that was being `joinOn`. The value is the output of that task | -| task_ref_name_2 | A task reference name that was being `joinOn`. The value is the output of that task | -| ... | ... | -| task_ref_name_N | A task reference name that was being `joinOn`. The value is the output of that task | - - - -### Example - -Imagine a workflow that sends 3 notifications: email, SMS and HTTP. Since none of these steps are dependant on the others, they can be run in parallel with a fork. - -The diagram will appear as: - -![fork diagram](/img/fork-task-diagram.png) - -Here's the JSON definition for the workflow: - -```json -[ - { - "name": "fork_join", - "taskReferenceName": "my_fork_join_ref", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "process_notification_payload", - "taskReferenceName": "process_notification_payload_email", - "type": "SIMPLE" - }, - { - "name": "email_notification", - "taskReferenceName": "email_notification_ref", - "type": "SIMPLE" - } - ], - [ - { - "name": "process_notification_payload", - "taskReferenceName": "process_notification_payload_sms", - "type": "SIMPLE" - }, - { - "name": "sms_notification", - "taskReferenceName": "sms_notification_ref", - "type": "SIMPLE" - } - ], - [ - { - "name": "process_notification_payload", - "taskReferenceName": "process_notification_payload_http", - "type": "SIMPLE" - }, - { - "name": "http_notification", - "taskReferenceName": "http_notification_ref", - "type": "SIMPLE" - } - ] - ] - }, - { - "name": "notification_join", - "taskReferenceName": "notification_join_ref", - "type": "JOIN", - "joinOn": [ - "email_notification_ref", - "sms_notification_ref" - ] - } -] -``` -> Note: There are three parallel 'tines' to this fork, but only two of the outputs are required for the JOIN to continue. The diagram *does* draw an arrow from ```http_notification_ref``` to the ```notification_join```, but it is not required for the workflow to continue. - -Here is how the output of notification_join will look like. The output is a map, where the keys are the names of task -references that were being `joinOn`. The corresponding values are the outputs of those tasks. - -```json - -{ - "email_notification_ref": { - "email_sent_at": "2021-11-06T07:37:17+0000", - "email_sent_to": "test@example.com" - }, - "sms_notification_ref": { - "smm_sent_at": "2021-11-06T07:37:17+0129", - "sms_sen": "+1-425-555-0189" - } -} -``` - -See [JOIN](/reference-docs/join-task.html) for more details on the JOIN aspect of the FORK. diff --git a/docs/docs/reference-docs/http-task.md b/docs/docs/reference-docs/http-task.md deleted file mode 100644 index 35e558a0d..000000000 --- a/docs/docs/reference-docs/http-task.md +++ /dev/null @@ -1,144 +0,0 @@ ---- -sidebar_position: 1 ---- - -# HTTP Task - -```json -"type" : "HTTP" -``` - -### Introduction - -An HTTP task is useful when you have a requirements such as: - -1. Making calls to another service that exposes an API via HTTP -2. Fetch any resource or data present on an endpoint - -### Use Cases - -If we have a scenario where we need to make an HTTP call into another service, we can make use of HTTP tasks. You can -use the data returned from the HTTP call in your subsequent tasks as inputs. Using HTTP tasks you can avoid having to -write the code that talks to these services and instead let Conductor manage it directly. This can reduce the code you -have to maintain and allows for a lot of flexibility. - -### Configuration - -HTTP task is defined directly inside the workflow with the task type `HTTP`. - -| name | type | description | -|--------------|-------------|-------------------------| -| http_request | HttpRequest | JSON object (see below) | - -#### Inputs - -| Name | Type | Description | -|---------------------|------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| uri | String | URI for the service. Can be a partial when using vipAddress or includes the server address. | -| method | String | HTTP method. GET, PUT, POST, DELETE, OPTIONS, HEAD | -| accept | String | Accept header. Default: ```application/json``` | -| contentType | String | Content Type - supported types are ```text/plain```, ```text/html```, and ```application/json``` (Default) | -| headers | Map[String, Any] | A map of additional http headers to be sent along with the request. | -| body | Map[] | Request body | -| vipAddress | String | When using discovery based service URLs. | -| asyncComplete | Boolean | ```false``` to mark status COMPLETED upon execution ; ```true``` to keep it IN_PROGRESS, wait for an external event (via Conductor or SQS or EventHandler) to complete it. | -| oauthConsumerKey | String | [OAuth](https://oauth.net/core/1.0/) client consumer key | -| oauthConsumerSecret | String | [OAuth](https://oauth.net/core/1.0/) client consumer secret | -| connectionTimeOut | Integer | Connection Time Out in milliseconds. If set to 0, equivalent to infinity. Default: 100. | -| readTimeOut | Integer | Read Time Out in milliseconds. If set to 0, equivalent to infinity. Default: 150. | - -#### Output - -| name | type | description | -|--------------|------------------|-----------------------------------------------------------------------------| -| response | Map | JSON body containing the response if one is present | -| headers | Map[String, Any] | Response Headers | -| statusCode | Integer | [Http Status Code](https://en.wikipedia.org/wiki/List_of_HTTP_status_codes) | -| reasonPhrase | String | Http Status Code's reason phrase | - -### Examples - -Following is the example of HTTP task with `GET` method. - -We can use variables in our URI as show in the example below. - -```json -{ - "name": "Get Example", - "taskReferenceName": "get_example", - "inputParameters": { - "http_request": { - "uri": "https://jsonplaceholder.typicode.com/posts/${workflow.input.queryid}", - "method": "GET" - } - }, - "type": "HTTP" -} -``` - -Following is the example of HTTP task with `POST` method. - -> Here we are using variables for our POST body which happens to be data from a previous task. This is an example of how you can **chain** HTTP calls to make complex flows happen without writing any additional code. - -```json -{ - "name": "http_post_example", - "taskReferenceName": "post_example", - "inputParameters": { - "http_request": { - "uri": "https://jsonplaceholder.typicode.com/posts/", - "method": "POST", - "body": { - "title": "${get_example.output.response.body.title}", - "userId": "${get_example.output.response.body.userId}", - "action": "doSomething" - } - } - }, - "type": "HTTP" -} -``` - -Following is the example of HTTP task with `PUT` method. - -```json -{ - "name": "http_put_example", - "taskReferenceName": "put_example", - "inputParameters": { - "http_request": { - "uri": "https://jsonplaceholder.typicode.com/posts/1", - "method": "PUT", - "body": { - "title": "${get_example.output.response.body.title}", - "userId": "${get_example.output.response.body.userId}", - "action": "doSomethingDifferent" - } - } - }, - "type": "HTTP" -} -``` - -Following is the example of HTTP task with `DELETE` method. - -```json -{ - "name": "DELETE Example", - "taskReferenceName": "delete_example", - "inputParameters": { - "http_request": { - "uri": "https://jsonplaceholder.typicode.com/posts/1", - "method": "DELETE" - } - }, - "type": "HTTP" -} -``` - -### Best Practices - -1. Why are my HTTP tasks not getting picked up? - 1. We might have too many HTTP tasks in the queue. There is a concept called Isolation Groups that you can rely on - for prioritizing certain HTTP tasks over others. Read more here: [Isolation Groups](/configuration/isolationgroups.html) - diff --git a/docs/docs/reference-docs/inline-task.md b/docs/docs/reference-docs/inline-task.md deleted file mode 100644 index e2b8be3cf..000000000 --- a/docs/docs/reference-docs/inline-task.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -sidebar_position: 11 ---- - -# Inline Task - -```json -"type": "INLINE" -``` -### Introduction - -Inline Task helps execute necessary logic at Workflow run-time, -using an evaluator. There are two supported evaluators as of now: - -### Configuration -| name | description | -|-------------|---------------------------------------------------| -| value-param | Use a parameter directly as the value | -| javascript | Evaluate Javascript expressions and compute value | - - -### Use Cases - -Consider a scenario, we have to run simple evaluations in -Conductor server while creating Workers. Inline task can be used to run these -evaluations using an evaluator engine. - -### Example 1 - -```json -{ - "name": "inline_task_example", - "taskReferenceName": "inline_task_example", - "type": "INLINE", - "inputParameters": { - "value": "${workflow.input.value}", - "evaluatorType": "javascript", - "expression": "function e() { if ($.value == 1){return {\"result\": true}} else { return {\"result\": false}}} e();" - } -} -``` - -Following are the parameters in the above example : - -1. `"evaluatorType"` - Type of the evaluator. -Supported evaluators: value-param, javascript which evaluates -javascript expression. - -2. `"expression"` - Expression associated with the type of evaluator. -For javascript evaluator, Javascript evaluation engine is used to -evaluate expression defined as a string. Must return a value. - -Besides expression, any of the properties in the input values is accessible as `$.value` for the expression -to evaluate. - -The task output can then be referenced in downstream tasks -like: `"${inline_test.output.result}"` - -### Example 2 - -Perhaps a weather API sometimes returns Celcius, and sometimes returns Farenheit temperature values. This task ensures that the downstream tasks ONLY receive Celcius values: - -``` -{ - "name": "INLINE_TASK", - "taskReferenceName": "inline_test", - "type": "INLINE", - "inputParameters": { - "scale": "${workflow.input.tempScale}", - "temperature": "${workflow.input.temperature}", - "evaluatorType": "javascript", - "expression": "function SIvaluesOnly(){if ($.scale === "F"){ centigrade = ($.temperature -32)*5/9; return {temperature: centigrade} } else { return - {temperature: $.temperature} }} SIvaluesOnly();" - } -} -``` diff --git a/docs/docs/reference-docs/join-task.md b/docs/docs/reference-docs/join-task.md deleted file mode 100644 index 9a76d68d6..000000000 --- a/docs/docs/reference-docs/join-task.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Join -```json -"type" : "JOIN" -``` - -### Introduction - -A `JOIN` task is used in conjunction with a `FORK_JOIN` or `FORK_JOIN_DYNAMIC` task. When `JOIN` is used along with -a `FORK_JOIN` task, tt waits for a list of zero or more of the forked tasks to be completed. However, when used with -a `FORK_JOIN_DYNAMIC` task, it implicitly waits for all of the dynamically forked tasks to complete. - -### Use Cases - -[FORK_JOIN](/reference-docs/fork-task.html) and [FORK_JOIN_DYNAMIC](/reference-docs/dynamic-fork-task.html) task are used to execute a collection of other tasks or sub workflows in parallel. In -such cases, there is a need for these forked tasks to complete before moving to the next stage in the workflow. - -### Configuration - -#### Input Configuration - -| Attribute | Description | -|-------------------|--------------------------------------------------------------------------------------------------------------------------------------| -| name | Task Name. A unique name that is descriptive of the task function | -| taskReferenceName | Task Reference Name. A unique reference to this task. There can be multiple references of a task within the same workflow definition | -| type | Task Type. In this case, `JOIN` | -| joinOn | A list of task reference names, that this `JOIN` task will wait for completion | - -#### Output Configuration - -| Attribute | Description | -|-----------------|-------------------------------------------------------------------------------------| -| task_ref_name_1 | A task reference name that was being `joinOn`. The value is the output of that task | -| task_ref_name_2 | A task reference name that was being `joinOn`. The value is the output of that task | -| ... | ... | -| task_ref_name_N | A task reference name that was being `joinOn`. The value is the output of that task | - - - -### Examples - -#### Simple Example -Here is an example of a _`JOIN`_ task. This task will wait for the completion of tasks `my_task_ref_1` -and `my_task_ref_2` as specified by the `joinOn` attribute. - -```json -{ - "name": "join_task", - "taskReferenceName": "my_join_task_ref", - "type": "JOIN", - "joinOn": [ - "my_task_ref_1", - "my_task_ref_2" - ] -} -``` - - -#### Example - ignoring one fork -Here is an example of a `JOIN` task used in conjunction with a `FORK_JOIN` task. The 'FORK_JOIN' spawns 3 tasks. -An `email_notification` task, a `sms_notification` task and a `http_notification` task. Email and SMS are usually best -effort delivery systems. However, in case of a http based notification you get a return code and you can retry until it -succeeds or eventually give up. When you setup a notification workflow, you may decide to continue ,if you kicked off an -email and sms notification. Im that case, you can decide to `joinOn` those specific tasks. However, -the `http_notification` task will still continue to execute, but it will not block the rest of the workflow from -proceeding. - -```json -[ - { - "name": "fork_join", - "taskReferenceName": "my_fork_join_ref", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "email_notification", - "taskReferenceName": "email_notification_ref", - "type": "SIMPLE" - } - ], - [ - { - "name": "sms_notification", - "taskReferenceName": "sms_notification_ref", - "type": "SIMPLE" - } - ], - [ - { - "name": "http_notification", - "taskReferenceName": "http_notification_ref", - "type": "SIMPLE" - } - ] - ] - }, - { - "name": "notification_join", - "taskReferenceName": "notification_join_ref", - "type": "JOIN", - "joinOn": [ - "email_notification_ref", - "sms_notification_ref" - ] - } -] -``` - -Here is how the output of notification_join will look like. The output is a map, where the keys are the names of task -references that were being `joinOn`. The corresponding values are the outputs of those tasks. - -```json - -{ - "email_notification_ref": { - "email_sent_at": "2021-11-06T07:37:17+0000", - "email_sent_to": "test@example.com" - }, - "sms_notification_ref": { - "smm_sent_at": "2021-11-06T07:37:17+0129", - "sms_sen": "+1-425-555-0189" - } -} - -``` - diff --git a/docs/docs/reference-docs/json-jq-transform-task.md b/docs/docs/reference-docs/json-jq-transform-task.md deleted file mode 100644 index 794676a61..000000000 --- a/docs/docs/reference-docs/json-jq-transform-task.md +++ /dev/null @@ -1,190 +0,0 @@ ---- -sidebar_position: 1 ---- - -# JSON JQ Transform Task - -```json -"type" : "JSON_JQ_TRANSFORM" -``` -### Introduction - -JSON_JQ_TRANSFORM_TASK is a System task that allows processing of JSON data that is supplied to the task, by using the -popular JQ processing tool’s query expression language. - -Check the [JQ Manual](https://stedolan.github.io/jq/manual/v1.5/), and the -[JQ Playground](https://jqplay.org/) for more information on JQ - -### Use Cases - -JSON is a popular format of choice for data-interchange. It is widely used in web and server applications, document -storage, API I/O etc. It’s also used within Conductor to define workflow and task definitions and passing data and state -between tasks and workflows. This makes a tool like JQ a natural fit for processing task related data. Some common -usages within Conductor includes, working with HTTP task, JOIN tasks or standalone tasks that try to transform data from -the output of one task to the input of another. - -### Configuration - - -| Attribute | Description | -|-------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | Task Name. A unique name that is descriptive of the task function | -| taskReferenceName | Task Reference Name. A unique reference to this task. There can be multiple references of a task within the same workflow definition | -| type | Task Type. In this case, JSON_JQ_TRANSFORM | -| inputParameters | The input parameters that will be supplied to this task. The parameters will be a JSON object of atleast 2 attributes, one of which will be called queryExpression. The others are user named attributes. These attributes will be accessible by the JQ query processor | -| inputParameters/user-defined-key(s) | User defined key(s) along with values. | -| inputParameters/queryExpression | A JQ query expression | - -#### Output Configuration - -| Attribute | Description | -|------------|---------------------------------------------------------------------------| -| result | The first results returned by the JQ expression | -| resultList | A List of results returned by the JQ expression | -| error | An optional error message, indicating that the JQ query failed processing | - -### Example - - -Here is an example of a _`JSON_JQ_TRANSFORM`_ task. The `inputParameters` attribute is expected to have a value object -that has the following - -1. A list of key value pair objects denoted key1/value1, key2/value2 in the example below. Note the key1/value1 are - arbitrary names used in this example. - -2. A key with the name `queryExpression`, whose value is a JQ expression. The expression will operate on the value of - the `inputParameters` attribute. In the example below, the `inputParameters` has 2 inner objects named by attributes - `key1` and `key2`, each of which has an object that is named `value1` and `value2`. They have an associated array of - strings as values, `"a", "b"` and `"c", "d"`. The expression `key3: (.key1.value1 + .key2.value2)` concat's the 2 - string arrays into a single array against an attribute named `key3` - -```json -{ - "name": "jq_example_task", - "taskReferenceName": "my_jq_example_task", - "type": "JSON_JQ_TRANSFORM", - "inputParameters": { - "key1": { - "value1": [ - "a", - "b" - ] - }, - "key2": { - "value2": [ - "c", - "d" - ] - }, - "queryExpression": "{ key3: (.key1.value1 + .key2.value2) }" - } -} -``` - -The execution of this example task above will provide the following output. The `resultList` attribute stores the full -list of the `queryExpression` result. The `result` attribute stores the first element of the resultList. An -optional `error` -attribute along with a string message will be returned if there was an error processing the query expression. - -```json -{ - "result": { - "key3": [ - "a", - "b", - "c", - "d" - ] - }, - "resultList": [ - { - "key3": [ - "a", - "b", - "c", - "d" - ] - } - ] -} -``` - -## Example JQ transforms - -### Cleaning up a JSON response - -A HTTP Task makes an API call to GitHub to request a list of "stargazers" (users who have starred a repository). The API response (for just one user) looks like: - - -Snippet of ```${hundred_stargazers_ref.output}``` - -``` JSON - -"body":[ - { - "starred_at":"2016-12-14T19:55:46Z", - "user":{ - "login":"lzehrung", - "id":924226, - "node_id":"MDQ6VXNlcjkyNDIyNg==", - "avatar_url":"https://avatars.githubusercontent.com/u/924226?v=4", - "gravatar_id":"", - "url":"https://api.github.com/users/lzehrung", - "html_url":"https://github.com/lzehrung", - "followers_url":"https://api.github.com/users/lzehrung/followers", - "following_url":"https://api.github.com/users/lzehrung/following{/other_user}", - "gists_url":"https://api.github.com/users/lzehrung/gists{/gist_id}", - "starred_url":"https://api.github.com/users/lzehrung/starred{/owner}{/repo}", - "subscriptions_url":"https://api.github.com/users/lzehrung/subscriptions", - "organizations_url":"https://api.github.com/users/lzehrung/orgs", - "repos_url":"https://api.github.com/users/lzehrung/repos", - "events_url":"https://api.github.com/users/lzehrung/events{/privacy}", - "received_events_url":"https://api.github.com/users/lzehrung/received_events", - "type":"User", - "site_admin":false - } -} -] - -``` - -We only need the ```starred_at``` and ```login``` parameters for users who starred the repository AFTER a given date (provided as an input to the workflow ```${workflow.input.cutoff_date}```). We'll use the JQ Transform to simplify the output: - -```JSON -{ - "name": "jq_cleanup_stars", - "taskReferenceName": "jq_cleanup_stars_ref", - "inputParameters": { - "starlist": "${hundred_stargazers_ref.output.response.body}", - "queryExpression": "[.starlist[] | select (.starred_at > \"${workflow.input.cutoff_date}\") |{occurred_at:.starred_at, member: {github: .user.login}}]" - }, - "type": "JSON_JQ_TRANSFORM", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } -``` - -The JSON is stored in ```starlist```. The ```queryExpression``` reads in the JSON, selects only entries where the ```starred_at``` value meets the date criteria, and generates output JSON of the form: - -```JSON -{ - "occurred_at": "date from JSON", - "member":{ - "github" : "github Login from JSON" - } -} -``` - -The entire expression is wrapped in [] to indicate that the response should be an array. - - - - - diff --git a/docs/docs/reference-docs/kafka-publish-task.md b/docs/docs/reference-docs/kafka-publish-task.md deleted file mode 100644 index d7cda5b9f..000000000 --- a/docs/docs/reference-docs/kafka-publish-task.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -sidebar_position: 13 ---- - -# Kafka Publish Task -```json -"type" : "KAFKA_PUBLISH" -``` - -### Introduction - -A Kafka Publish task is used to push messages to another microservice via Kafka. - -### Configuration -The task expects an input parameter named ```kafka_request``` as part of the task's input with the following details: - -| name | description | -|------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| bootStrapServers | bootStrapServers for connecting to given kafka. | -| key | Key to be published | -| keySerializer | Serializer used for serializing the key published to kafka. One of the following can be set :
    1. org.apache.kafka.common.serialization.IntegerSerializer
    2. org.apache.kafka.common.serialization.LongSerializer
    3. org.apache.kafka.common.serialization.StringSerializer.
    Default is String serializer | -| value | Value published to kafka | -| requestTimeoutMs | Request timeout while publishing to kafka. If this value is not given the value is read from the property `kafka.publish.request.timeout.ms`. If the property is not set the value defaults to 100 ms | -| maxBlockMs | maxBlockMs while publishing to kafka. If this value is not given the value is read from the property `kafka.publish.max.block.ms`. If the property is not set the value defaults to 500 ms | -| headers | A map of additional kafka headers to be sent along with the request. | -| topic | Topic to publish | - -### Examples - -Sample Task - - -```json -{ - "name": "call_kafka", - "taskReferenceName": "call_kafka", - "inputParameters": { - "kafka_request": { - "topic": "userTopic", - "value": "Message to publish", - "bootStrapServers": "localhost:9092", - "headers": { - "x-Auth":"Auth-key" - }, - "key": "123", - "keySerializer": "org.apache.kafka.common.serialization.IntegerSerializer" - } - }, - "type": "KAFKA_PUBLISH" -} -``` - -The task expects an input parameter named `"kafka_request"` as part -of the task's input with the following details: - -1. `"bootStrapServers"` - bootStrapServers for connecting to given kafka. -2. `"key"` - Key to be published. -3. `"keySerializer"` - Serializer used for serializing the key published to kafka. -One of the following can be set : -a. org.apache.kafka.common.serialization.IntegerSerializer -b. org.apache.kafka.common.serialization.LongSerializer -c. org.apache.kafka.common.serialization.StringSerializer. -Default is String serializer. -4. `"value"` - Value published to kafka -5. `"requestTimeoutMs"` - Request timeout while publishing to kafka. -If this value is not given the value is read from the property -kafka.publish.request.timeout.ms. If the property is not set the value -defaults to 100 ms. -6. `"maxBlockMs"` - maxBlockMs while publishing to kafka. If this value is -not given the value is read from the property kafka.publish.max.block.ms. -If the property is not set the value defaults to 500 ms. -7. `"headers"` - A map of additional kafka headers to be sent along with -the request. -8. `"topic"` - Topic to publish. - -The producer created in the kafka task is cached. By default -the cache size is 10 and expiry time is 120000 ms. To change the -defaults following can be modified -kafka.publish.producer.cache.size, -kafka.publish.producer.cache.time.ms respectively. - -#### Kafka Task Output - -Task status transitions to `COMPLETED`. - -The task is marked as `FAILED` if the message could not be published to -the Kafka queue. diff --git a/docs/docs/reference-docs/redis.md b/docs/docs/reference-docs/redis.md deleted file mode 100644 index ee96c6c08..000000000 --- a/docs/docs/reference-docs/redis.md +++ /dev/null @@ -1,38 +0,0 @@ -# Redis - -By default conductor runs with an in-memory Redis mock. However, you -can change the configuration by setting the properties `conductor.db.type` and `conductor.redis.hosts`. - -## `conductor.db.type` - -| Value | Description | -|--------------------------------|----------------------------------------------------------------------------------------| -| dynomite | Dynomite Cluster. Dynomite is a proxy layer that provides sharding and replication. | -| memory | Uses an in-memory Redis mock. Should be used only for development and testing purposes.| -| redis_cluster | Redis Cluster configuration. | -| redis_sentinel | Redis Sentinel configuration. | -| redis_standalone | Redis Standalone configuration. | - - - -## `conductor.redis.hosts` - -Expected format is `host:port:rack` separated by semicolon, e.g.: - -```properties -conductor.redis.hosts=host0:6379:us-east-1c;host1:6379:us-east-1c;host2:6379:us-east-1c -``` - -### Auth Support - -Password authentication is supported. The password should be set as the 4th param of the first host `host:port:rack:password`, e.g.: - -```properties -conductor.redis.hosts=host0:6379:us-east-1c:my_str0ng_pazz;host1:6379:us-east-1c;host2:6379:us-east-1c -``` - - -**Notes** - -- In a cluster, all nodes use the same password. -- In a sentinel configuration, sentinels and redis nodes use the same password. diff --git a/docs/docs/reference-docs/set-variable-task.md b/docs/docs/reference-docs/set-variable-task.md deleted file mode 100644 index c9c8b55e6..000000000 --- a/docs/docs/reference-docs/set-variable-task.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Set Variable - -```json -"type" : "SET_VARIABLE" -``` -### Introduction -Set Variable allows us to set workflow variables by creating or updating them -with new values. - -### Use Cases - -Variables can be initialized in the workflow definition as well as during -the workflow run. Once a variable was initialized it can be read or -overwritten with a new value by any other task. - -### Configuration - -Set Variable task is defined directly inside the workflow with type -`SET_VARIABLE`. - -## Examples - -Suppose in a workflow, we have to store a value in a variable and then later in -workflow reuse the value stored in the variable just as we do in programming, in such -scenarios `Set Variable` task can be used. - -Following is the workflow definition with `SET_VARIABLE` task. - -```json -{ - "name": "Set_Variable_Workflow", - "description": "Set a value to a variable and then reuse it later in the workflow", - "version": 1, - "tasks": [ - { - "name": "Set_Name", - "taskReferenceName": "Set_Name", - "type": "SET_VARIABLE", - "inputParameters": { - "name": "Foo" - } - }, - { - "name": "Read_Name", - "taskReferenceName": "Read_Name", - "inputParameters": { - "var_name" : "${workflow.variables.name}" - }, - "type": "SIMPLE" - } - ], - "restartable": true, - "ownerEmail":"abc@example.com", - "workflowStatusListenerEnabled": true, - "schemaVersion": 2 -} -``` - -In the above example, it can be seen that the task `Set_Name` is a Set Variable Task and -the variable `name` is set to `Foo` and later in the workflow it is referenced by -`"${workflow.variables.name}"` in another task. diff --git a/docs/docs/reference-docs/start-workflow-task.md b/docs/docs/reference-docs/start-workflow-task.md deleted file mode 100644 index 53db54886..000000000 --- a/docs/docs/reference-docs/start-workflow-task.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -sidebar_position: 1 ---- -# Start Workflow -```json -"type" : "START_WORKFLOW" -``` -### Introduction - -Start Workflow starts another workflow. Unlike `SUB_WORKFLOW`, `START_WORKFLOW` does -not create a relationship between starter and the started workflow. It also does not wait for the started workflow to complete. A `START_WORKFLOW` is -considered successful once the requested workflow is started successfully. In other words, `START_WORKFLOW` is marked as COMPLETED, once the started -workflow is in RUNNING state. - -### Use Cases - -When another workflow needs to be started from a workflow, `START_WORKFLOW` can be used. - -### Configuration - -Start Workflow task is defined directly inside the workflow with type `START_WORKFLOW`. - -#### Input - -**Parameters:** - -| name | type | description | -|---------------|------------------|---------------------------------------------------------------------------------------------------------------------| -| startWorkflow | Map[String, Any] | The value of this parameter is [Start Workflow Request](/gettingstarted/startworkflow.html#start-workflow-request). | - -#### Output - -| name | type | description | -|------------|--------|--------------------------------| -| workflowId | String | The id of the started workflow | diff --git a/docs/docs/reference-docs/sub-workflow-task.md b/docs/docs/reference-docs/sub-workflow-task.md deleted file mode 100644 index 0c5686ed6..000000000 --- a/docs/docs/reference-docs/sub-workflow-task.md +++ /dev/null @@ -1,183 +0,0 @@ ---- -sidebar_position: 1 ---- -# Sub Workflow -```json -"type" : "SUB_WORKFLOW" -``` -### Introduction -Sub Workflow task allows for nesting a workflow within another workflow. Nested workflows contain a reference to their parent. - -### Use Cases - -Suppose we want to include another workflow inside our current workflow. In that -case, Sub Workflow Task would be used. - -### Configuration - -Sub Workflow task is defined directly inside the workflow with type `SUB_WORKFLOW`. - -#### Input - -**Parameters:** - -| name | type | description | -|------------------|------------------|-------------| -| subWorkflowParam | Map[String, Any] | See below | - -**subWorkflowParam** - -| name | type | description | -|--------------------|-------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | String | Name of the workflow to execute | -| version | Integer | Version of the workflow to execute | -| taskToDomain | Map[String, String] | Allows scheduling the sub workflow's tasks per given mappings.
    See [Task Domains](/configuration/taskdomains.html) for instructions to configure taskDomains. | -| workflowDefinition | [WorkflowDefinition](/configuration/workflowdef.html) | Allows starting a subworkflow with a dynamic workflow definition. | - -#### Output - -| name | type | description | -|---------------|--------|-------------------------------------------------------------------| -| subWorkflowId | String | Sub-workflow execution Id generated when running the sub-workflow | - - -### Examples - - -Imagine we have a workflow that has a fork in it. In the example below, we input one image, but using a fork to create 2 images simultaneously: - - -![workflow with fork](/img/workflow_fork.png) - -The left fork will create a JPG, and the right fork a WEBP image. Maintaining this workflow might be difficult, as changes made to one side of the fork do not automatically propagate the other. Rather than using 2 tasks, we can define a ```image_convert_resize``` workflow that we can call for both forks as a sub-workflow: - - -```json - -{{ - "name": "image_convert_resize_subworkflow1", - "description": "Image Processing Workflow", - "version": 1, - "tasks": [{ - "name": "image_convert_resize_multipleformat_fork", - "taskReferenceName": "image_convert_resize_multipleformat_ref", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [{ - "name": "image_convert_resize_sub", - "taskReferenceName": "subworkflow_jpg_ref", - "inputParameters": { - "fileLocation": "${workflow.input.fileLocation}", - "recipeParameters": { - "outputSize": { - "width": "${workflow.input.recipeParameters.outputSize.width}", - "height": "${workflow.input.recipeParameters.outputSize.height}" - }, - "outputFormat": "jpg" - } - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "image_convert_resize", - "version": 1 - } - }], - [{ - "name": "image_convert_resize_sub", - "taskReferenceName": "subworkflow_webp_ref", - "inputParameters": { - "fileLocation": "${workflow.input.fileLocation}", - "recipeParameters": { - "outputSize": { - "width": "${workflow.input.recipeParameters.outputSize.width}", - "height": "${workflow.input.recipeParameters.outputSize.height}" - }, - "outputFormat": "webp" - } - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "image_convert_resize", - "version": 1 - } - } - - ] - ] - }, - { - "name": "image_convert_resize_multipleformat_join", - "taskReferenceName": "image_convert_resize_multipleformat_join_ref", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "subworkflow_jpg_ref", - "upload_toS3_webp_ref" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": { - "fileLocationJpg": "${subworkflow_jpg_ref.output.fileLocation}", - "fileLocationWebp": "${subworkflow_webp_ref.output.fileLocation}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": true, - "ownerEmail": "conductor@example.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "variables": {}, - "inputTemplate": {} -} -``` - -Now our diagram will appear as: -![workflow with 2 subworkflows](/img/subworkflow_diagram.png) - - - -The inputs to both sides of the workflow are identical before and after - but we've abstracted the tasks into the sub-workflow. Any change to the sub-workflow will automatically occur in bth sides of the fork. - - -Looking at the subworkflow (the WEBP version): - -``` -{ - "name": "image_convert_resize_sub", - "taskReferenceName": "subworkflow_webp_ref", - "inputParameters": { - "fileLocation": "${workflow.input.fileLocation}", - "recipeParameters": { - "outputSize": { - "width": "${workflow.input.recipeParameters.outputSize.width}", - "height": "${workflow.input.recipeParameters.outputSize.height}" - }, - "outputFormat": "webp" - } - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "image_convert_resize", - "version": 1 - } - } -``` - -The ```subWorkflowParam``` tells conductor which workflow to call. The task is marked as completed upon the completion of the spawned workflow. -If the sub-workflow is terminated or fails the task is marked as failure and retried if configured. - -### Optional Sub Workflow Task -If the Sub Workflow task is defined as optional in the parent workflow task definition, the parent workflow task will not be retried if sub-workflow is terminated or failed. -In addition, even if the sub-workflow is retried/rerun/restarted after reaching to a terminal status, the parent workflow task status will remain as it is. diff --git a/docs/docs/reference-docs/switch-task.md b/docs/docs/reference-docs/switch-task.md deleted file mode 100644 index 6cb78d808..000000000 --- a/docs/docs/reference-docs/switch-task.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Switch - -```json -"type" : "SWITCH" -``` -### Introduction -A switch task is similar to `case...switch` statement in a programming language. The `switch` expression, is -a configuration on the `SWITCH` task type. Currently, two evaluators are supported: - -### Configuration - -Following are the task configuration parameters : - -| name | type | description | -|---------------|-------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------| -| evaluatorType | String | [evaluatortType values](#evaluator-types) | -| expression | String | Depends on the [evaluatortType value](#evaluator-types) | -| decisionCases | Map[String, List[task]] | Map where key is possible values that can result from `expression` being evaluated by `evaluatorType` with value being list of tasks to be executed. | -| defaultCase | List[task] | List of tasks to be executed when no matching value if found in decision case (default condition) | - - -#### Evaluator Types -| name | description | expression | -|-------------|---------------------------------------------------|-----------------------| -| value-param | Use a parameter directly as the value | input parameter | -| javascript | Evaluate JavaScript expressions and compute value | JavaScript expression | - -### Use Cases - -Useful in any situation where we have to execute one of many task options. - - -### Output - -Following is/are output generated by the `Switch` Task. - -| name | type | description | -|------------------|--------------|---------------------------------------------------------------| -| evaluationResult | List[String] | A List of string representing the list of cases that matched. | - - -### Examples - -In this example workflow, we have to ship a package with the shipping service providers on the basis of input provided -while running the workflow. - -Let's create a Workflow with the following switch task definition that uses `value-param` evaluatorType: - -```json -{ - "name": "switch_task", - "taskReferenceName": "switch_task", - "inputParameters": { - "switchCaseValue": "${workflow.input.service}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "switchCaseValue", - "defaultCase": [ - { - ... - } - ], - "decisionCases": { - "fedex": [ - { - ... - } - ], - "ups": [ - { - ... - } - ] - } -} -``` - -In the definition above the value of the parameter `switch_case_value` -is used to determine the switch-case. The evaluator type is `value-param` and the expression is a direct reference to -the name of an input parameter. If the value of `switch_case_value` is `fedex` then the decision case `ship_via_fedex`is -executed as shown below. - -![Conductor UI - Workflow Run](/img/Switch_Fedex.png) - -In a similar way - if the input was `ups`, then `ship_via_ups` will be executed. If none of the cases match then the -default option is executed. - -Here is an example using the `javascript` evaluator type: - -```json -{ - "name": "switch_task", - "taskReferenceName": "switch_task", - "inputParameters": { - "inputValue": "${workflow.input.service}" - }, - "type": "SWITCH", - "evaluatorType": "javascript", - "expression": "$.inputValue == 'fedex' ? 'fedex' : 'ups'", - "defaultCase": [ - { - ... - } - ], - "decisionCases": { - "fedex": [ - { - ... - } - ], - "ups": [ - { - ... - } - ] - } -} -``` diff --git a/docs/docs/reference-docs/terminate-task.md b/docs/docs/reference-docs/terminate-task.md deleted file mode 100644 index 0412f801a..000000000 --- a/docs/docs/reference-docs/terminate-task.md +++ /dev/null @@ -1,91 +0,0 @@ ---- -sidebar_position: 1 ---- -# Terminate - -```json -"type" : "TERMINATE" -``` -### Introduction -Task that can terminate a workflow with a given status and modify the workflow's output with a given parameter, -it can act as a `return` statement for conditions where you simply want to terminate your workflow. - -### Use Cases -Use it when you want to terminate the workflow without continuing the execution. -For example, if you have a decision where the first condition is met, you want to execute some tasks, -otherwise you want to finish your workflow. - -### Configuration - -Terminate task is defined directly inside the workflow with type -`TERMINATE`. - -```json -{ - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "COMPLETED", - "workflowOutput": "${task0.output}" - }, - "type": "TERMINATE", - "startDelay": 0, - "optional": false -} -``` - -#### Inputs - -**Parameters:** - -| name | type | description | notes | -|-------------------|--------|-----------------------------------------|-------------------------| -| terminationStatus | String | can only accept "COMPLETED" or "FAILED" | task cannot be optional | -| workflowOutput | Any | Expected workflow output || - -### Output - -**Outputs:** - -| name | type | description | -|--------|------|-----------------------------------------------------------------------------------------------------------| -| output | Map | The content of `workflowOutput` from the inputParameters. An empty object if `workflowOutput` is not set. | - -### Examples - -Let's consider the same example we had in [Switch Task](/reference-docs/switch-task.html). - -Suppose in a workflow, we have to take decision to ship the courier with the shipping -service providers on the basis of input provided while running the workflow. -If the input provided while running workflow does not match with the available -shipping providers then the workflow will fail and return. If input provided -matches then it goes ahead. - -Here is a snippet that shows the defalt switch case terminating the workflow: - -```json -{ - "name": "switch_task", - "taskReferenceName": "switch_task", - "type": "SWITCH", - "defaultCase": [ - { - "name": "terminate", - "taskReferenceName": "terminate", - "type": "TERMINATE", - "inputParameters": { - "terminationStatus": "FAILED" - } - } - ] -} -``` - -Workflow gets created as shown in the diagram. - -![Conductor UI - Workflow Diagram](/img/Terminate_Task.png) - - -### Best Practices -1. Include termination reason when terminating the workflow with failure status to make it easy to understand the cause. -2. Include any additional details (e.g. output of the tasks, switch case etc) that helps understand the path taken to termination. diff --git a/docs/docs/reference-docs/wait-task.md b/docs/docs/reference-docs/wait-task.md deleted file mode 100644 index 48c5a0fc2..000000000 --- a/docs/docs/reference-docs/wait-task.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Wait -```json -"type" : "WAIT" -``` -## Introduction - -WAIT is used when the workflow needs to be paused for an external signal to continue. - -## Use Cases -WAIT is used when the workflow needs to wait and pause for an external signal such as a human intervention -(like manual approval) or an event coming from external source such as Kafka, SQS or Conductor's internal queueing mechanism. - -Some use cases where WAIT task is used: - -1. Wait for a certain amount of time (e.g. 2 minutes) or until a certain date time (e.g. 12/25/2022 00:00) -2. To wait for and external signal coming from an event queue mechanism supported by Conductor - -## Configuration -* taskType: WAIT -* Wait for a specific amount of time -format: short: **D**d**H**h**M**m or full: **D**days**H**hours**M**minutes -* The following are the accepted units: *days*, *d*, *hrs*, *hours*, *h*, *minutes*, *mins*, *m*, *seconds*, *secs*, *s* -```json -{ - "taskType": "WAIT", - "inputParameters": { - "duration": "2 days 3 hours" - } -} -``` -* Wait until specific date/time -* e.g. the following Wait task remains blocked until Dec 25, 2022 9am PST -* The date/time can be supplied in one of the following formats: -**yyyy-MM-dd HH:mm**, **yyyy-MM-dd HH:mm**, **yyyy-MM-dd** -```json -{ - "taskType": "WAIT", - "inputParameters": { - "until": "2022-12-25 09:00 PST" - } -} -``` - -## Ending a WAIT when there is no time duration specified - -To conclude a WAIT task, there are three endpoints that can be used. -You'll need the ```workflowId```, ```taskRefName``` or ```taskId``` and the task status (generally ```COMPLETED``` or ```FAILED```). - -1. POST ```/api/tasks``` -2. POST ```api/queue/update/{workflowId}/{taskRefName}/{status}``` -3. POST ```api/queue/update/{workflowId}/task/{taskId}/{status}``` \ No newline at end of file diff --git a/docs/docs/resources/code-of-conduct.md b/docs/docs/resources/code-of-conduct.md deleted file mode 100644 index f8076bc62..000000000 --- a/docs/docs/resources/code-of-conduct.md +++ /dev/null @@ -1,49 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at netflixoss@netflix.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq diff --git a/docs/docs/resources/contributing.md b/docs/docs/resources/contributing.md deleted file mode 100644 index 7d84a32ad..000000000 --- a/docs/docs/resources/contributing.md +++ /dev/null @@ -1,73 +0,0 @@ -# Contributing -Thanks for your interest in Conductor! -This guide helps to find the most efficient way to contribute, ask questions, and report issues. - -Code of conduct ------ - -Please review our [code of conduct](code-of-conduct.md). - -I have a question! ------ - -We have a dedicated [discussion forum](https://github.com/Netflix/conductor/discussions) for asking "how to" questions and to discuss ideas. The discussion forum is a great place to start if you're considering creating a feature request or work on a Pull Request. -*Please do not create issues to ask questions.* - -I want to contribute! ------- - -We welcome Pull Requests and already had many outstanding community contributions! -Creating and reviewing Pull Requests take considerable time. This section helps you set up for a smooth Pull Request experience. - -The stable branch is [main](https://github.com/Netflix/conductor/tree/main). - -Please create pull requests for your contributions against [main](https://github.com/Netflix/conductor/tree/main) only. - -It's a great idea to discuss the new feature you're considering on the [discussion forum](https://github.com/Netflix/conductor/discussions) before writing any code. There are often different ways you can implement a feature. Getting some discussion about different options helps shape the best solution. When starting directly with a Pull Request, there is the risk of having to make considerable changes. Sometimes that is the best approach, though! Showing an idea with code can be very helpful; be aware that it might be throw-away work. Some of our best Pull Requests came out of multiple competing implementations, which helped shape it to perfection. - -Also, consider that not every feature is a good fit for Conductor. A few things to consider are: - -* Is it increasing complexity for the user, or might it be confusing? -* Does it, in any way, break backward compatibility (this is seldom acceptable) -* Does it require new dependencies (this is rarely acceptable for core modules) -* Should the feature be opt-in or enabled by default. For integration with a new Queuing recipe or persistence module, a separate module which can be optionally enabled is the right choice. -* Should the feature be implemented in the main Conductor repository, or would it be better to set up a separate repository? Especially for integration with other systems, a separate repository is often the right choice because the life-cycle of it will be different. - -Of course, for more minor bug fixes and improvements, the process can be more light-weight. - -We'll try to be responsive to Pull Requests. Do keep in mind that because of the inherently distributed nature of open source projects, responses to a PR might take some time because of time zones, weekends, and other things we may be working on. - -I want to report an issue ------ - -If you found a bug, it is much appreciated if you create an issue. Please include clear instructions on how to reproduce the issue, or even better, include a test case on a branch. Make sure to come up with a descriptive title for the issue because this helps while organizing issues. - -I have a great idea for a new feature ----- -Many features in Conductor have come from ideas from the community. If you think something is missing or certain use cases could be supported better, let us know! You can do so by opening a discussion on the [discussion forum](https://github.com/Netflix/conductor/discussions). Provide as much relevant context to why and when the feature would be helpful. Providing context is especially important for "Support XYZ" issues since we might not be familiar with what "XYZ" is and why it's useful. If you have an idea of how to implement the feature, include that as well. - -Once we have decided on a direction, it's time to summarize the idea by creating a new issue. - -## Code Style -We use [spotless](https://github.com/diffplug/spotless) to enforce consistent code style for the project, so make sure to run `gradlew spotlessApply` to fix any violations after code changes. - -## License - -By contributing your code, you agree to license your contribution under the terms of the APLv2: https://github.com/Netflix/conductor/blob/master/LICENSE - -All files are released with the Apache 2.0 license, and the following license header will be automatically added to your new file if none present: - -``` -/** - * Copyright $YEAR Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -``` diff --git a/docs/docs/resources/license.md b/docs/docs/resources/license.md deleted file mode 100644 index 518de4064..000000000 --- a/docs/docs/resources/license.md +++ /dev/null @@ -1,15 +0,0 @@ -# License - -Copyright 2022 Netflix, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/docs/docs/resources/related.md b/docs/docs/resources/related.md deleted file mode 100644 index 9a293f152..000000000 --- a/docs/docs/resources/related.md +++ /dev/null @@ -1,74 +0,0 @@ -# Community projects related to Conductor - -## Client SDKs - -Further, all of the (non-Java) SDKs have a new GitHub home: the Conductor SDK repository is your new source for Conductor SDKs: - -* [Golang](https://github.com/conductor-sdk/conductor-go) -* [Python](https://github.com/conductor-sdk/conductor-python) -* [C#](https://github.com/conductor-sdk/conductor-csharp) -* [Clojure](https://github.com/conductor-sdk/conductor-clojure) - -All contributions on the above client sdks can be made on [Conductor SDK](https://github.com/conductor-sdk) repository. - -## Microservices operations - -* https://github.com/flaviostutz/schellar - Schellar is a scheduler tool for instantiating Conductor workflows from time to time, mostly like a cron job, but with transport of input/output variables between calls. - -* https://github.com/flaviostutz/backtor - Backtor is a backup scheduler tool that uses Conductor workers to handle backup operations and decide when to expire backups (ex.: keep backup 3 days, 2 weeks, 2 months, 1 semester) - -* https://github.com/cquon/conductor-tools - Conductor CLI for launching workflows, polling tasks, listing running tasks etc - - -## Conductor deployment - -* https://github.com/flaviostutz/conductor-server - Docker container for running Conductor with Prometheus metrics plugin installed and some tweaks to ease provisioning of workflows from json files embedded to the container - -* https://github.com/flaviostutz/conductor-ui - Docker container for running Conductor UI so that you can easily scale UI independently - -* https://github.com/flaviostutz/elasticblast - "Elasticsearch to Bleve" bridge tailored for running Conductor on top of Bleve indexer. The footprint of Elasticsearch may cost too much for small deployments on Cloud environment. - -* https://github.com/mohelsaka/conductor-prometheus-metrics - Conductor plugin for exposing Prometheus metrics over path '/metrics' - -## OAuth2.0 Security Configuration -Forked Repository - [Conductor (Secure)](https://github.com/maheshyaddanapudi/conductor/tree/oauth2) - -[OAuth2.0 Role Based Security!](https://github.com/maheshyaddanapudi/conductor/blob/oauth2/SECURITY.md) - Spring Security with easy configuration to secure the Conductor server APIs. - -Docker image published to [Docker Hub](https://hub.docker.com/repository/docker/conductorboot/server) - -## Conductor Worker utilities - -* https://github.com/ggrcha/conductor-go-client - Conductor Golang client for writing Workers in Golang - -* https://github.com/courosh12/conductor-dotnet-client - Conductor DOTNET client for writing Workers in DOTNET - * https://github.com/TwoUnderscorez/serilog-sinks-conductor-task-log - Serilog sink for sending worker log events to Netflix Conductor - -* https://github.com/davidwadden/conductor-workers - Various ready made Conductor workers for common operations on some platforms (ex.: Jira, Github, Concourse) - -## Conductor Web UI - -* https://github.com/maheshyaddanapudi/conductor-ng-ui - Angular based - Conductor Workflow Management UI - -## Conductor Persistence - -### Mongo Persistence - -* https://github.com/maheshyaddanapudi/conductor/tree/mongo_persistence - With option to use Mongo Database as persistence unit. - * Mongo Persistence / Option to use Mongo Database as persistence unit. - * Docker Compose example with MongoDB Container. - -### Oracle Persistence - -* https://github.com/maheshyaddanapudi/conductor/tree/oracle_persistence - With option to use Oracle Database as persistence unit. - * Oracle Persistence / Option to use Oracle Database as persistence unit : version > 12.2 - Tested well with 19C - * Docker Compose example with Oracle Container. - -## Schedule Conductor Workflow -* https://github.com/jas34/scheduledwf - It solves the following problem statements: - * At times there are use cases in which we need to run some tasks/jobs only at a scheduled time. - * In microservice architecture maintaining schedulers in various microservices is a pain. - * We should have a central dedicate service that can do scheduling for us and provide a trigger to a microservices at expected time. -* It offers an additional module `io.github.jas34.scheduledwf.config.ScheduledWfServerModule` built on the existing core -of conductor and does not require deployment of any additional service. -For more details refer: [Schedule Conductor Workflows](https://jas34.github.io/scheduledwf) and [Capability In Conductor To Schedule Workflows](https://github.com/Netflix/conductor/discussions/2256) \ No newline at end of file diff --git a/docs/docs/technicaldetails.md b/docs/docs/technicaldetails.md deleted file mode 100644 index b22630330..000000000 --- a/docs/docs/technicaldetails.md +++ /dev/null @@ -1,150 +0,0 @@ -# Technical Details - -### gRPC Framework -As part of this addition, all of the modules and bootstrap code within them were refactored to leverage providers, which facilitated moving the Jetty server into a separate module and the conformance to Guice guidelines and best practices. -This feature constitutes a server-side gRPC implementation along with protobuf RPC schemas for the workflow, metadata and task APIs that can be run concurrently with the Jersey-based HTTP/REST server. The protobuf models for all the types are exposed through the API. gRPC java clients for the workflow, metadata and task APIs are also available for use. Another valuable addition is an idiomatic Go gRPC client implementation for the worker API. -The proto models are auto-generated at compile time using this ProtoGen library. This custom library adds messageInput and messageOutput fields to all proto tasks and task definitions. The goal of these fields is providing a type-safe way to pass input and input metadata through tasks that use the gRPC API. These fields use the Any protobuf type which can store any arbitrary message type in a type-safe way, without the server needing to know the exact serialization format of the message. In order to expose these Any objects in the REST API, a custom encoding is used that contains the raw data of the serialized message by converting it into a dictionary with '@type' and '@value' keys, where '@type' is identical to the canonical representation and '@value' contains a base64 encoded string with the binary data of the serialized message. The JsonMapperProvider provides the object mapper initialized with this module to enable serialization/deserialization of these JSON objects. - - -### Cassandra Persistence -The Cassandra persistence layer currently provides a partial implementation of the ExecutionDAO that supports all the CRUD operations for tasks and workflow execution. The data modelling is done in a denormalized manner and stored in two tables. The “workflows” table houses all the information for a workflow execution including all its tasks and is the source of truth for all the information regarding a workflow and its tasks. The “task_lookup” table, as the name suggests stores a lookup of taskIds to workflowId. This table facilitates the fast retrieval of task data given a taskId. -All the datastore operations that are used during the critical execution path of a workflow have been implemented currently. Few of the operational abilities of the ExecutionDAO are yet to be implemented. This module also does not provide implementations for QueueDAO and MetadataDAO. We envision using the Cassandra DAO with an external queue implementation, since implementing a queuing recipe on top of Cassandra is an anti-pattern that we want to stay away from. - - -### External Payload Storage -The implementation of this feature is such that the externalization of payloads is fully transparent and automated to the user. Conductor operators can configure the usage of this feature and is completely abstracted and hidden from the user, thereby allowing the operators full control over the barrier limits. Currently, only AWS S3 is supported as a storage system, however, as with all other Conductor components, this is pluggable and can be extended to enable any other object store to be used as an external payload storage system. -The externalization of payloads is enforced using two kinds of [barriers](/externalpayloadstorage.html). Soft barriers are used when the payload size is warranted enough to be stored as part of workflow execution. These payloads will be stored in external storage and used during execution. Hard barriers are enforced to safeguard against voluminous data, and such payloads are rejected and the workflow execution is failed. -The payload size is evaluated in the client before being sent over the wire to the server. If the payload size exceeds the configured soft limit, the client makes a request to the server for the location at which the payload is to be stored. In this case where S3 is being used, the server returns a signed url for the location and the client uploads the payload using this signed url. The relative path to the payload object is then stored in the workflow/task metadata. The server can then download this payload from this path and use as needed during execution. This allows the server to control access to the S3 bucket, thereby making the user applications where the worker processes are run completely agnostic of the permissions needed to access this location. - - -### Dynamic Workflow Executions -In the earlier version (v1.x), Conductor allowed the execution of workflows referencing the workflow and task definitions stored as metadata in the system. This meant that a workflow execution with 10 custom tasks to run entailed: - -- Registration of the 10 task definitions if they don't exist (assuming workflow task type SIMPLE for simplicity) -- Registration of the workflow definition -- Each time a definition needs to be retrieved, a call to the metadata store needed to be performed -- In addition to that, the system allowed current metadata that is in use to be altered, leading to possible inconsistencies/race conditions - -To eliminate these pain points, the execution was changed such that the workflow definition is embedded within the workflow execution and the task definitions are themselves embedded within this workflow definition. This enables the concept of ephemeral/dynamic workflows and tasks. Instead of fetching metadata definitions throughout the execution, the definitions are fetched and embedded into the execution at the start of the workflow execution. This also enabled the StartWorkflowRequest to be extended to provide the complete workflow definition that will be used during execution, thus removing the need for pre-registration. The MetadataMapperService prefetches the workflow and task definitions and embeds these within the workflow data, if not provided in the StartWorkflowRequest. - -Following benefits are seen as a result of these changes: - -- Grants immutability of the definition stored within the execution data against modifications to the metadata store -- Better testability of workflows with faster experimental changes to definitions -- Reduced stress on the datastore due to prefetching the metadata only once at the start - - -### Decoupling Elasticsearch from Persistence -In the earlier version (1.x), the indexing logic was imbibed within the persistence layer, thus creating a tight coupling between the primary datastore and the indexing engine. This meant that the primary datastore determines how we orchestrate between the storage (redis, mysql, etc) and the indexer(elastic search). The main disadvantage of this approach is the lack of flexibility, that is, we cannot run an in-memory database and external elastic search or vice-versa. -We plan to improve this further by removing the indexing from the critical path of workflow execution, thus reducing possible points of failure during execution. - - -### Elasticsearch 5/6 Support -Indexing workflow execution is one of the primary features of Conductor. This enables archival of terminal state workflows from the primary data store, along with providing a clean search capability from the UI. -In Conductor 1.x, we supported both versions 2 and 5 of Elasticsearch by shadowing version 5 and all its dependencies. This proved to be rather tedious increasing build times by over 10 minutes. In Conductor 2.x, we have removed active support for ES 2.x, because of valuable community contributions for elasticsearch 5 and elasticsearch 6 modules. Unlike Conductor 1.x, Conductor 2.x supports elasticsearch 5 by default, which can easily be replaced with version 6 by following the simple instructions [here](https://github.com/Netflix/conductor/tree/master/es6-persistence#build). - -### Maintaining workflow consistency with distributed locking and fencing tokens - -#### Problem - -Conductor’s Workflow decide is the core logic which recursively evaluates the state of the workflow, schedules tasks, persists workflow and task(s) state at several checkpoints, and progresses the workflow. - -In a multi-node Conductor server deployment, the decide on a workflow can be triggered concurrently. For example, the worker can update Conductor server with latest task state, which calls decide, while the sweeper service (which periodically evaluates the workflow state to progress from task timeouts) would also call the decide on a different instance. The decide can be run concurrently in two different jvm nodes with two different workflow states, and based on the workflow configuration and current state, the result could be inconsistent. - -#### A two-part solution to maintain Workflow Consistency - -**Preventing concurrent decides with distributed locking:** -The goal is to allow only one decide to run on a workflow at any given time across the whole Conductor Server cluster. This can be achieved by plugging in distributed locking implementations like Zookeeper, Redlock etc. A Zookeeper module implementing Conductor’s Locking service is provided. - -**Preventing stale data updates with fencing tokens:** -While the locking service helps to run one decide at a time, it might still be possible for nodes with timed out locks to reactivate and continue execution from where it left off (usually with stale data). This can be avoided with fencing tokens, which basically is an incrementing counter on workflow state with read-before-write support in a transaction or similar construct. - -*At Netflix, we use Cassandra. Considering the tradeoffs of Cassandra’s Lightweight Transactions (LWT) and the probability of this stale updates happening, and our testing results, we’ve decided to first only rollout distributed locking with Zookeeper. We'll monitor our system and add C* LWT if needed. - -#### Setting up desired level of consistency - -Based on your requirements, it is possible to use none, one or both of the distributed locking and fencing tokens implementations. - -#### Alternative solution to distributed "decide" evaluation - -As mentioned in the previous section, the "decide" logic is triggered from multiple places in a conductor instance. Either a direct trigger such as user starting a workflow or a timed trigger from the Sweeper service. - -> Sweeper service is responsible for continually checking state of all workflows executions and trigger the "decide" logic which in turn can time the workflow out. - -In a single node deployment (single dynomite rack and single conductor server) this shouldn't be a problem. But when running multiple replicated dynomite racks and a conductor server on top of each rack, this might trigger the race condition described in previous section. - -> Dynomite rack is a single or multiple instance dynomite setup that holds all the data. - -> More on dynomite HA setup: (https://netflixtechblog.com/introducing-dynomite-making-non-distributed-databases-distributed-c7bce3d89404) - -In a cluster deployment, the default behavior for Dyno Queues is such, that it distributes the workload (round-robin style) to all the conductor servers. -This can create a situation where the first task to be executed is queued for conductor server #1 but the sweeper service is queued for conductor server #2. - -##### More on dyno queues - -Dyno queues are the default queuing mechanism of conductor. - -Queues are allocated and used for: -* Task execution - each task type gets a queue -* Workflow execution - single queue with all currently executing workflows (deciderQueue) - * This queue is used by SweeperService - -**Each conductor server instance gets its own set of queues**. Or more precisely a queue shard of its own. -This means that if you have 2 task types, you end up with 6 queues altogether e.g. - -``` -conductor_queues.test.QUEUE._deciderQueue.c -conductor_queues.test.QUEUE._deciderQueue.d -conductor_queues.test.QUEUE.HTTP.c -conductor_queues.test.QUEUE.HTTP.d -conductor_queues.test.QUEUE.LAMBDA.c -conductor_queues.test.QUEUE.LAMBDA.d -``` - -> The "c" and "d" suffixes are the shards identifying conductor server instace #1 and instance #2 respectively. - -> The shard names are extracted from dynomite rack name such as us-east-1c that is set in "LOCAL_RACK" or "EC2_AVAILABILTY_ZONE" - -Considering an execution of a simple workflow with just 2 tasks: [HTTP, LAMBDA], you should end up with queues being filled as follows: - -``` -Workflow execution -> conductor_queues.test.QUEUE._deciderQueue.c -HTTP taks execution -> conductor_queues.test.QUEUE.HTTP.d -LAMBDA task execution -> conductor_queues.test.QUEUE.LAMBDA.c -``` - -Which means that SweeperService in conductor instance #1 is responsible for sweeping the workflow, conductor #2 is responsible for executing HTTP task and conductor #1 again responsible for executing LAMBDA task. - -This illustrates the race condition: If the HTTP task completion in instance #2 happens at the same time as sweep in instance #1 ... you can end up with 2 different updates to a workflow execution: one update timing workflow out while the other completing the task and scheduling next. - -> The round-robin strategy responsible for work distribution is defined [here](https://github.com/Netflix/dyno-queues/blob/1cde55bbb69acd631c671a0cb2f9db2419163e33/dyno-queues-redis/src/main/java/com/netflix/dyno/queues/redis/sharding/RoundRobinStrategy.java) - -##### Back to alternative solution - -The alternative solution here is **Switching round-robin queue allocation for a local-only strategy**. -Meaning that a workflow and its task executions are queued only for the conductor instance which started the workflow. - -This completely avoids the race condition for the price of removing task execution distribution. - -Since all tasks and the sweeper service read/write only from/to "local" queues, it is impossible to run into a race condition between conductor instances. - -The downside here is that the workload is not distributed across all conductor servers. Which might be an advantage in active-standby deployments. - -Considering other downsides ... - -Considering a situation where a conductor instance goes down: -* With local-only strategy, the workflow executions from failed conductor instance will not progress until: - * The conductor instance is restarted or - * The executions are manually terminated and restarted from a different node -* With round-robin strategy, there is a chance the tasks will be rescheduled on a different conductor node - * This is nondeterministic though - -**Enabling local only queue allocation strategy for dyno queues:** - -Just enable following setting the config.properties: - -``` -workflow.dyno.queue.sharding.strategy=localOnly -``` - -> The default is roundRobin diff --git a/docs/kitchensink.json b/docs/kitchensink.json deleted file mode 100644 index 5e1e50de8..000000000 --- a/docs/kitchensink.json +++ /dev/null @@ -1,153 +0,0 @@ -{ - "name": "kitchensink", - "description": "kitchensink workflow", - "version": 1, - "tasks": [ - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_0", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/workflow/_search?q=status:COMPLETED&size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${task_2.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_4", - "taskReferenceName": "task_4", - "inputParameters": { - "mod": "${task_2.output.mod}", - "oddEven": "${task_2.output.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${task_4.output.dynamicTasks}", - "input": "${task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input" - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN" - } - ], - "1": [ - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_10", - "taskReferenceName": "task_10", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - [ - { - "name": "task_11", - "taskReferenceName": "task_11", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "join2", - "type": "JOIN", - "joinOn": [ - "wf3", - "wf4" - ] - } - ] - } - }, - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_1", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/workflow/_search?q=status:COMPLETED&size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "inputParameters": { - "statuses": "${get_es_1.output...status}", - "fistWorkflowId": "${get_es_1.output.workflowId[0]}" - }, - "type": "SIMPLE" - } - ], - "schemaVersion": 2 -} \ No newline at end of file diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml deleted file mode 100644 index a1199c32a..000000000 --- a/docs/mkdocs.yml +++ /dev/null @@ -1,108 +0,0 @@ -site_name: Conductor Documentation - -repo_url: https://github.com/Netflix/conductor -edit_uri: '' -strict: true -use_directory_urls: false - -nav: - - Getting Started: - - Running Conductor: - - From Source: gettingstarted/source.md - - Using Docker: gettingstarted/docker.md - - Basic Concepts: gettingstarted/basicconcepts.md - - High Level Steps: gettingstarted/steps.md - - Using the Client: gettingstarted/client.md - - Start a Workflow: gettingstarted/startworkflow.md - - Why Conductor?: gettingstarted/intro.md - - How-Tos: - - Workflows: - - Debugging Workflows: how-tos/Workflows/debugging-workflows.md - - Handling Errors: how-tos/Workflows/handling-errors.md - - Searching Workflows: how-tos/Workflows/searching-workflows.md - - Starting Workflows: how-tos/Workflows/starting-workflows.md - - Updating Workflows: how-tos/Workflows/updating-workflows.md - - View Workflow Execution: how-tos/Workflows/view-workflow-executions.md - - Versioning Workflows: how-tos/Workflows/versioning-workflows.md - - Tasks: - - Creating Task Definitions: how-tos/Tasks/creating-tasks.md - - Dynamic vs Switch Tasks: how-tos/Tasks/dynamic-vs-switch-tasks.md - - Monitoring Task Queues: how-tos/Tasks/monitoring-task-queues.md - - Reusing Tasks: how-tos/Tasks/reusing-tasks.md - - Task Configurations: how-tos/Tasks/task-configurations.md - - Task Inputs: how-tos/Tasks/task-inputs.md - - Task Timeouts: how-tos/Tasks/task-timeouts.md - - Updating Task Definitions: how-tos/Tasks/updating-tasks.md - - Extending System Tasks: how-tos/Tasks/extending-system-tasks.md - - Workers: - - Build a Go Task Worker: how-tos/Workers/build-a-golang-task-worker.md - - Build a Java Task Worker: how-tos/Workers/build-a-java-task-worker.md - - Build a Python Task Worker: how-tos/Workers/build-a-python-task-worker.md - - Monitoring: - - Conductor Log Level: how-tos/Monitoring/Conductor-LogLevel.md - - Developer Labs: - - Beginner: labs/beginner.md - - A First Workflow: labs/running-first-workflow.md - - Events and Event Handlers: labs/eventhandlers.md - - Kitchen Sink: labs/kitchensink.md - - Documentation: - - Architecture: - - Overview: architecture/overview.md - - Task Lifecycle: architecture/tasklifecycle.md - - API Specification: apispec.md - - Configuration: - - Task Definition: configuration/taskdef.md - - Worker Definition: configuration/workerdef.md - - Workflow Definition: configuration/workflowdef.md - - System Tasks: configuration/systask.md - - System Operators: configuration/sysoperator.md - - Event Handlers: configuration/eventhandlers.md - - Task Domains: configuration/taskdomains.md - - Isolation Groups: configuration/isolationgroups.md - - Operators: - - Do-While: reference-docs/do-while-task.md - - Dynamic: reference-docs/dynamic-task.md - - Dynamic Fork: reference-docs/dynamic-fork-task.md - - Fork: reference-docs/fork-task.md - - Join: reference-docs/join-task.md - - Set Variable: reference-docs/set-variable-task.md - - Start Workflow: reference-docs/start-workflow-task.md - - Sub Workflow: reference-docs/sub-workflow-task.md - - Switch: reference-docs/switch-task.md - - Terminate: reference-docs/terminate-task.md - - Wait: reference-docs/wait-task.md - - System Tasks: - - Event Task: reference-docs/event-task.md - - HTTP Task: reference-docs/http-task.md - - Inline Task: reference-docs/inline-task.md - - JSON JQ Transform Task: reference-docs/json-jq-transform-task.md - - Kafka Publish Task: reference-docs/kafka-publish-task.md - - Conductor Metrics: - - Server Metrics: metrics/server.md - - Client Metrics: metrics/client.md - - Advanced Topics: - - Extending Conductor: extend.md - - Annotation Processor: reference-docs/annotation-processor.md - - Archival of Workflows: reference-docs/archival-of-workflows.md - - Azure Blog Storage: reference-docs/azureblob-storage.md - - External Payload Storage: externalpayloadstorage.md - - Redis: reference-docs/redis.md - - Polyglot SDKs: - - Python SDK: how-tos/python-sdk.md - - Best Practices: bestpractices.md - - FAQ: faq.md - - Directed Acyclic Graph: reference-docs/directed-acyclic-graph.md - - Technical Details: technicaldetails.md - - Resources: - - Contributing: resources/contributing.md - - Code of Conduct: resources/code-of-conduct.md - - Related Projects: resources/related.md - - License: resources/license.md -theme: - name: mkdocs - custom_dir: theme/ -extra_css: - - css/custom.css -markdown_extensions: -- admonition -- codehilite diff --git a/docs/theme/main.html b/docs/theme/main.html deleted file mode 100644 index 57b3011db..000000000 --- a/docs/theme/main.html +++ /dev/null @@ -1,43 +0,0 @@ -{% extends "base.html" %} - -{% block extrahead %} - - - - - - -{% endblock %} - - -{% block content %} -{% if page and page.is_homepage %} -
    {% include "content.html" %}
    -{% else %} -
    {% include "toc.html" %}
    -
    {% include "content.html" %}
    -{% endif %} -{% endblock %} - -{%- block next_prev %} -{%- endblock %} - -{% block footer %} - -{% endblock %} \ No newline at end of file diff --git a/docs/theme/toc-sub.html b/docs/theme/toc-sub.html deleted file mode 100644 index 25e9da180..000000000 --- a/docs/theme/toc-sub.html +++ /dev/null @@ -1,14 +0,0 @@ -{%- if not nav_item.children %} -
  • - {{ nav_item.title }} -
  • -{%- else %} -
  • - {{ nav_item.title }} -
      - {%- for nav_item in nav_item.children %} - {% include "toc-sub.html" %} - {%- endfor %} -
    -
  • -{%- endif %} diff --git a/docs/theme/toc.html b/docs/theme/toc.html deleted file mode 100644 index 240974784..000000000 --- a/docs/theme/toc.html +++ /dev/null @@ -1,7 +0,0 @@ -
    -
      - {%- for nav_item in nav %} - {% include "toc-sub.html" %} - {%- endfor %} -
    -
    \ No newline at end of file diff --git a/es6-persistence/build.gradle b/es6-persistence/build.gradle deleted file mode 100644 index 2bf0d49c8..000000000 --- a/es6-persistence/build.gradle +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - - compileOnly 'org.springframework.boot:spring-boot-starter' - compileOnly 'org.springframework.retry:spring-retry' - - implementation "commons-io:commons-io:${revCommonsIo}" - implementation "org.apache.commons:commons-lang3" - // SBMTODO: remove guava dep - implementation "com.google.guava:guava:${revGuava}" - - implementation "org.elasticsearch.client:transport" - implementation "org.elasticsearch.client:elasticsearch-rest-client" - implementation "org.elasticsearch.client:elasticsearch-rest-high-level-client" - - testImplementation 'org.springframework.retry:spring-retry' - testImplementation "org.awaitility:awaitility:${revAwaitility}" - testImplementation "org.testcontainers:elasticsearch:${revTestContainer}" - testImplementation project(':conductor-common').sourceSets.test.output -} diff --git a/es6-persistence/dependencies.lock b/es6-persistence/dependencies.lock deleted file mode 100644 index 9960d19fb..000000000 --- a/es6-persistence/dependencies.lock +++ /dev/null @@ -1,447 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "locked": "6.8.12" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "locked": "6.8.12" - } - }, - "testCompileClasspath": { - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "commons-io:commons-io": { - "locked": "2.7" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.6" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "locked": "6.8.12" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "org.testcontainers:elasticsearch": { - "locked": "1.15.3" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.6" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "locked": "6.8.12" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "org.testcontainers:elasticsearch": { - "locked": "1.15.3" - } - } -} \ No newline at end of file diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchConditions.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchConditions.java deleted file mode 100644 index e8edae218..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchConditions.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.config; - -import org.springframework.boot.autoconfigure.condition.AllNestedConditions; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; - -public class ElasticSearchConditions { - - private ElasticSearchConditions() {} - - public static class ElasticSearchV6Enabled extends AllNestedConditions { - - ElasticSearchV6Enabled() { - super(ConfigurationPhase.PARSE_CONFIGURATION); - } - - @SuppressWarnings("unused") - @ConditionalOnProperty( - name = "conductor.indexing.enabled", - havingValue = "true", - matchIfMissing = true) - static class enabledIndexing {} - - @SuppressWarnings("unused") - @ConditionalOnProperty( - name = "conductor.elasticsearch.version", - havingValue = "6", - matchIfMissing = true) - static class enabledES6 {} - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchProperties.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchProperties.java deleted file mode 100644 index 50500af64..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchProperties.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.config; - -import java.net.MalformedURLException; -import java.net.URL; -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; - -@ConfigurationProperties("conductor.elasticsearch") -public class ElasticSearchProperties { - - /** - * The comma separated list of urls for the elasticsearch cluster. Format -- - * host1:port1,host2:port2 - */ - private String url = "localhost:9300"; - - /** The index prefix to be used when creating indices */ - private String indexPrefix = "conductor"; - - /** The color of the elasticserach cluster to wait for to confirm healthy status */ - private String clusterHealthColor = "green"; - - /** The size of the batch to be used for bulk indexing in async mode */ - private int indexBatchSize = 1; - - /** The size of the queue used for holding async indexing tasks */ - private int asyncWorkerQueueSize = 100; - - /** The maximum number of threads allowed in the async pool */ - private int asyncMaxPoolSize = 12; - - /** - * The time in seconds after which the async buffers will be flushed (if no activity) to prevent - * data loss - */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration asyncBufferFlushTimeout = Duration.ofSeconds(10); - - /** The number of shards that the index will be created with */ - private int indexShardCount = 5; - - /** The number of replicas that the index will be configured to have */ - private int indexReplicasCount = 1; - - /** The number of task log results that will be returned in the response */ - private int taskLogResultLimit = 10; - - /** The timeout in milliseconds used when requesting a connection from the connection manager */ - private int restClientConnectionRequestTimeout = -1; - - /** Used to control if index management is to be enabled or will be controlled externally */ - private boolean autoIndexManagementEnabled = true; - - /** - * Document types are deprecated in ES6 and removed from ES7. This property can be used to - * disable the use of specific document types with an override. This property is currently used - * in ES6 module. - * - *

    Note that this property will only take effect if {@link - * ElasticSearchProperties#isAutoIndexManagementEnabled} is set to false and index management is - * handled outside of this module. - */ - private String documentTypeOverride = ""; - - /** Elasticsearch basic auth username */ - private String username; - - /** Elasticsearch basic auth password */ - private String password; - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getIndexPrefix() { - return indexPrefix; - } - - public void setIndexPrefix(String indexPrefix) { - this.indexPrefix = indexPrefix; - } - - public String getClusterHealthColor() { - return clusterHealthColor; - } - - public void setClusterHealthColor(String clusterHealthColor) { - this.clusterHealthColor = clusterHealthColor; - } - - public int getIndexBatchSize() { - return indexBatchSize; - } - - public void setIndexBatchSize(int indexBatchSize) { - this.indexBatchSize = indexBatchSize; - } - - public int getAsyncWorkerQueueSize() { - return asyncWorkerQueueSize; - } - - public void setAsyncWorkerQueueSize(int asyncWorkerQueueSize) { - this.asyncWorkerQueueSize = asyncWorkerQueueSize; - } - - public int getAsyncMaxPoolSize() { - return asyncMaxPoolSize; - } - - public void setAsyncMaxPoolSize(int asyncMaxPoolSize) { - this.asyncMaxPoolSize = asyncMaxPoolSize; - } - - public Duration getAsyncBufferFlushTimeout() { - return asyncBufferFlushTimeout; - } - - public void setAsyncBufferFlushTimeout(Duration asyncBufferFlushTimeout) { - this.asyncBufferFlushTimeout = asyncBufferFlushTimeout; - } - - public int getIndexShardCount() { - return indexShardCount; - } - - public void setIndexShardCount(int indexShardCount) { - this.indexShardCount = indexShardCount; - } - - public int getIndexReplicasCount() { - return indexReplicasCount; - } - - public void setIndexReplicasCount(int indexReplicasCount) { - this.indexReplicasCount = indexReplicasCount; - } - - public int getTaskLogResultLimit() { - return taskLogResultLimit; - } - - public void setTaskLogResultLimit(int taskLogResultLimit) { - this.taskLogResultLimit = taskLogResultLimit; - } - - public int getRestClientConnectionRequestTimeout() { - return restClientConnectionRequestTimeout; - } - - public void setRestClientConnectionRequestTimeout(int restClientConnectionRequestTimeout) { - this.restClientConnectionRequestTimeout = restClientConnectionRequestTimeout; - } - - public boolean isAutoIndexManagementEnabled() { - return autoIndexManagementEnabled; - } - - public void setAutoIndexManagementEnabled(boolean autoIndexManagementEnabled) { - this.autoIndexManagementEnabled = autoIndexManagementEnabled; - } - - public String getDocumentTypeOverride() { - return documentTypeOverride; - } - - public void setDocumentTypeOverride(String documentTypeOverride) { - this.documentTypeOverride = documentTypeOverride; - } - - public String getUsername() { - return username; - } - - public void setUsername(String username) { - this.username = username; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public List toURLs() { - String clusterAddress = getUrl(); - String[] hosts = clusterAddress.split(","); - return Arrays.stream(hosts) - .map( - host -> - (host.startsWith("http://") - || host.startsWith("https://") - || host.startsWith("tcp://")) - ? toURL(host) - : toURL("tcp://" + host)) - .collect(Collectors.toList()); - } - - private URL toURL(String url) { - try { - return new URL(url); - } catch (MalformedURLException e) { - throw new IllegalArgumentException(url + "can not be converted to java.net.URL"); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchV6Configuration.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchV6Configuration.java deleted file mode 100644 index a16bea44d..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchV6Configuration.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.config; - -import java.net.InetAddress; -import java.net.URI; -import java.net.URL; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.apache.http.HttpHost; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.transport.client.PreBuiltTransportClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; -import org.springframework.retry.backoff.FixedBackOffPolicy; -import org.springframework.retry.support.RetryTemplate; - -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.es6.dao.index.ElasticSearchDAOV6; -import com.netflix.conductor.es6.dao.index.ElasticSearchRestDAOV6; - -import com.fasterxml.jackson.databind.ObjectMapper; - -@Configuration(proxyBeanMethods = false) -@EnableConfigurationProperties(ElasticSearchProperties.class) -@Conditional(ElasticSearchConditions.ElasticSearchV6Enabled.class) -public class ElasticSearchV6Configuration { - private static final Logger log = LoggerFactory.getLogger(ElasticSearchV6Configuration.class); - - @Bean - @Conditional(IsTcpProtocol.class) - public Client client(ElasticSearchProperties properties) { - Settings settings = - Settings.builder() - .put("client.transport.ignore_cluster_name", true) - .put("client.transport.sniff", true) - .build(); - - TransportClient transportClient = new PreBuiltTransportClient(settings); - - List clusterAddresses = getURIs(properties); - - if (clusterAddresses.isEmpty()) { - log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); - } - for (URI hostAddress : clusterAddresses) { - int port = Optional.ofNullable(hostAddress.getPort()).orElse(9200); - try { - transportClient.addTransportAddress( - new TransportAddress(InetAddress.getByName(hostAddress.getHost()), port)); - } catch (Exception e) { - throw new RuntimeException("Invalid host" + hostAddress.getHost(), e); - } - } - return transportClient; - } - - @Bean - @Conditional(IsHttpProtocol.class) - public RestClient restClient(ElasticSearchProperties properties) { - RestClientBuilder restClientBuilder = - RestClient.builder(convertToHttpHosts(properties.toURLs())); - if (properties.getRestClientConnectionRequestTimeout() > 0) { - restClientBuilder.setRequestConfigCallback( - requestConfigBuilder -> - requestConfigBuilder.setConnectionRequestTimeout( - properties.getRestClientConnectionRequestTimeout())); - } - - return restClientBuilder.build(); - } - - @Bean - @Conditional(IsHttpProtocol.class) - public RestClientBuilder restClientBuilder(ElasticSearchProperties properties) { - RestClientBuilder builder = RestClient.builder(convertToHttpHosts(properties.toURLs())); - - if (properties.getUsername() != null && properties.getPassword() != null) { - log.info( - "Configure ElasticSearch with BASIC authentication. User:{}", - properties.getUsername()); - final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials( - AuthScope.ANY, - new UsernamePasswordCredentials( - properties.getUsername(), properties.getPassword())); - builder.setHttpClientConfigCallback( - httpClientBuilder -> - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)); - } else { - log.info("Configure ElasticSearch with no authentication."); - } - return builder; - } - - @Bean - @Conditional(IsHttpProtocol.class) - public IndexDAO es6IndexRestDAO( - RestClientBuilder restClientBuilder, - ElasticSearchProperties properties, - @Qualifier("es6RetryTemplate") RetryTemplate retryTemplate, - ObjectMapper objectMapper) { - return new ElasticSearchRestDAOV6( - restClientBuilder, retryTemplate, properties, objectMapper); - } - - @Bean - @Conditional(IsTcpProtocol.class) - public IndexDAO es6IndexDAO( - Client client, - @Qualifier("es6RetryTemplate") RetryTemplate retryTemplate, - ElasticSearchProperties properties, - ObjectMapper objectMapper) { - return new ElasticSearchDAOV6(client, retryTemplate, properties, objectMapper); - } - - @Bean - public RetryTemplate es6RetryTemplate() { - RetryTemplate retryTemplate = new RetryTemplate(); - FixedBackOffPolicy fixedBackOffPolicy = new FixedBackOffPolicy(); - fixedBackOffPolicy.setBackOffPeriod(1000L); - retryTemplate.setBackOffPolicy(fixedBackOffPolicy); - return retryTemplate; - } - - private HttpHost[] convertToHttpHosts(List hosts) { - return hosts.stream() - .map(host -> new HttpHost(host.getHost(), host.getPort(), host.getProtocol())) - .toArray(HttpHost[]::new); - } - - public List getURIs(ElasticSearchProperties properties) { - String clusterAddress = properties.getUrl(); - String[] hosts = clusterAddress.split(","); - - return Arrays.stream(hosts) - .map( - host -> - (host.startsWith("http://") - || host.startsWith("https://") - || host.startsWith("tcp://")) - ? URI.create(host) - : URI.create("tcp://" + host)) - .collect(Collectors.toList()); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsHttpProtocol.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsHttpProtocol.java deleted file mode 100644 index 2437e1a22..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsHttpProtocol.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.config; - -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Condition; -import org.springframework.context.annotation.ConditionContext; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.type.AnnotatedTypeMetadata; - -@EnableConfigurationProperties(ElasticSearchProperties.class) -@Configuration -public class IsHttpProtocol implements Condition { - @Override - public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { - String url = context.getEnvironment().getProperty("conductor.elasticsearch.url"); - if (url.startsWith("http") || url.startsWith("https")) { - return true; - } - return false; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsTcpProtocol.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsTcpProtocol.java deleted file mode 100644 index accf3c468..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsTcpProtocol.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.config; - -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Condition; -import org.springframework.context.annotation.ConditionContext; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.type.AnnotatedTypeMetadata; - -@EnableConfigurationProperties(ElasticSearchProperties.class) -@Configuration -public class IsTcpProtocol implements Condition { - @Override - public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { - String url = context.getEnvironment().getProperty("conductor.elasticsearch.url"); - if (url.startsWith("http") || url.startsWith("https")) { - return false; - } - return true; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestBuilderWrapper.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestBuilderWrapper.java deleted file mode 100644 index d0056c729..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestBuilderWrapper.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.util.Objects; - -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.update.UpdateRequest; -import org.springframework.lang.NonNull; - -/** Thread-safe wrapper for {@link BulkRequestBuilder}. */ -public class BulkRequestBuilderWrapper { - - private final BulkRequestBuilder bulkRequestBuilder; - - public BulkRequestBuilderWrapper(@NonNull BulkRequestBuilder bulkRequestBuilder) { - this.bulkRequestBuilder = Objects.requireNonNull(bulkRequestBuilder); - } - - public void add(@NonNull UpdateRequest req) { - synchronized (bulkRequestBuilder) { - bulkRequestBuilder.add(Objects.requireNonNull(req)); - } - } - - public void add(@NonNull IndexRequest req) { - synchronized (bulkRequestBuilder) { - bulkRequestBuilder.add(Objects.requireNonNull(req)); - } - } - - public int numberOfActions() { - synchronized (bulkRequestBuilder) { - return bulkRequestBuilder.numberOfActions(); - } - } - - public ActionFuture execute() { - synchronized (bulkRequestBuilder) { - return bulkRequestBuilder.execute(); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestWrapper.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestWrapper.java deleted file mode 100644 index d33aedf5c..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestWrapper.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.util.Objects; - -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.update.UpdateRequest; -import org.springframework.lang.NonNull; - -/** Thread-safe wrapper for {@link BulkRequest}. */ -class BulkRequestWrapper { - - private final BulkRequest bulkRequest; - - BulkRequestWrapper(@NonNull BulkRequest bulkRequest) { - this.bulkRequest = Objects.requireNonNull(bulkRequest); - } - - public void add(@NonNull UpdateRequest req) { - synchronized (bulkRequest) { - bulkRequest.add(Objects.requireNonNull(req)); - } - } - - public void add(@NonNull IndexRequest req) { - synchronized (bulkRequest) { - bulkRequest.add(Objects.requireNonNull(req)); - } - } - - BulkRequest get() { - return bulkRequest; - } - - int numberOfActions() { - synchronized (bulkRequest) { - return bulkRequest.numberOfActions(); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchBaseDAO.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchBaseDAO.java deleted file mode 100644 index 38733977e..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchBaseDAO.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.io.IOException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; - -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.es6.dao.query.parser.Expression; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; - -abstract class ElasticSearchBaseDAO implements IndexDAO { - - String indexPrefix; - - String loadTypeMappingSource(String path) throws IOException { - return applyIndexPrefixToTemplate( - IOUtils.toString(ElasticSearchBaseDAO.class.getResourceAsStream(path))); - } - - private String applyIndexPrefixToTemplate(String text) { - String pattern = "\"template\": \"\\*(.*)\\*\""; - Pattern r = Pattern.compile(pattern); - Matcher m = r.matcher(text); - StringBuilder sb = new StringBuilder(); - while (m.find()) { - m.appendReplacement( - sb, - m.group(0) - .replaceFirst( - Pattern.quote(m.group(1)), indexPrefix + "_" + m.group(1))); - } - m.appendTail(sb); - return sb.toString(); - } - - BoolQueryBuilder boolQueryBuilder(String expression, String queryString) - throws ParserException { - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if (StringUtils.isNotEmpty(expression)) { - Expression exp = Expression.fromString(expression); - queryBuilder = exp.getFilterBuilder(); - } - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(queryString); - return QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - } - - protected String getIndexName(String documentType) { - return indexPrefix + "_" + documentType; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchDAOV6.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchDAOV6.java deleted file mode 100644 index 0d40fd61a..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchDAOV6.java +++ /dev/null @@ -1,935 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.io.IOException; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.time.LocalDate; -import java.util.*; -import java.util.concurrent.*; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; - -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.search.sort.SortOrder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.retry.support.RetryTemplate; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.es6.config.ElasticSearchProperties; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; -import com.netflix.conductor.metrics.Monitors; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.type.MapType; -import com.fasterxml.jackson.databind.type.TypeFactory; - -@Trace -public class ElasticSearchDAOV6 extends ElasticSearchBaseDAO implements IndexDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchDAOV6.class); - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String LOG_DOC_TYPE = "task_log"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String MSG_DOC_TYPE = "message"; - - private static final int CORE_POOL_SIZE = 6; - private static final long KEEP_ALIVE_TIME = 1L; - private static final int UPDATE_REQUEST_RETRY_COUNT = 5; - - private static final String CLASS_NAME = ElasticSearchDAOV6.class.getSimpleName(); - - private final String workflowIndexName; - private final String taskIndexName; - private final String eventIndexPrefix; - private String eventIndexName; - private final String messageIndexPrefix; - private String messageIndexName; - private String logIndexName; - private final String logIndexPrefix; - private final String docTypeOverride; - - private final ObjectMapper objectMapper; - private final Client elasticSearchClient; - - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private final ExecutorService executorService; - private final ExecutorService logExecutorService; - - private final ConcurrentHashMap bulkRequests; - private final int indexBatchSize; - private final long asyncBufferFlushTimeout; - private final ElasticSearchProperties properties; - - private final RetryTemplate retryTemplate; - - static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); - } - - public ElasticSearchDAOV6( - Client elasticSearchClient, - RetryTemplate retryTemplate, - ElasticSearchProperties properties, - ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - this.elasticSearchClient = elasticSearchClient; - this.indexPrefix = properties.getIndexPrefix(); - this.workflowIndexName = getIndexName(WORKFLOW_DOC_TYPE); - this.taskIndexName = getIndexName(TASK_DOC_TYPE); - this.logIndexPrefix = this.indexPrefix + "_" + LOG_DOC_TYPE; - this.messageIndexPrefix = this.indexPrefix + "_" + MSG_DOC_TYPE; - this.eventIndexPrefix = this.indexPrefix + "_" + EVENT_DOC_TYPE; - int workerQueueSize = properties.getAsyncWorkerQueueSize(); - int maximumPoolSize = properties.getAsyncMaxPoolSize(); - this.bulkRequests = new ConcurrentHashMap<>(); - this.indexBatchSize = properties.getIndexBatchSize(); - this.asyncBufferFlushTimeout = properties.getAsyncBufferFlushTimeout().toMillis(); - this.properties = properties; - - if (!properties.isAutoIndexManagementEnabled() - && StringUtils.isNotBlank(properties.getDocumentTypeOverride())) { - docTypeOverride = properties.getDocumentTypeOverride(); - } else { - docTypeOverride = ""; - } - - this.executorService = - new ThreadPoolExecutor( - CORE_POOL_SIZE, - maximumPoolSize, - KEEP_ALIVE_TIME, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - LOGGER.warn( - "Request {} to async dao discarded in executor {}", - runnable, - executor); - Monitors.recordDiscardedIndexingCount("indexQueue"); - }); - - int corePoolSize = 1; - maximumPoolSize = 2; - long keepAliveTime = 30L; - this.logExecutorService = - new ThreadPoolExecutor( - corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - LOGGER.warn( - "Request {} to async log dao discarded in executor {}", - runnable, - executor); - Monitors.recordDiscardedIndexingCount("logQueue"); - }); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); - this.retryTemplate = retryTemplate; - } - - @PreDestroy - private void shutdown() { - LOGGER.info("Starting graceful shutdown of executor service"); - shutdownExecutorService(logExecutorService); - shutdownExecutorService(executorService); - } - - private void shutdownExecutorService(ExecutorService execService) { - try { - execService.shutdown(); - if (execService.awaitTermination(30, TimeUnit.SECONDS)) { - LOGGER.debug("tasks completed, shutting down"); - } else { - LOGGER.warn("Forcing shutdown after waiting for 30 seconds"); - execService.shutdownNow(); - } - } catch (InterruptedException ie) { - LOGGER.warn( - "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); - execService.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - @Override - @PostConstruct - public void setup() throws Exception { - waitForHealthyCluster(); - - if (properties.isAutoIndexManagementEnabled()) { - createIndexesTemplates(); - createWorkflowIndex(); - createTaskIndex(); - } - } - - private void waitForHealthyCluster() throws Exception { - elasticSearchClient - .admin() - .cluster() - .prepareHealth() - .setWaitForGreenStatus() - .execute() - .get(); - } - - /** Initializes the indexes templates task_log, message and event, and mappings. */ - private void createIndexesTemplates() { - try { - initIndexesTemplates(); - updateIndexesNames(); - Executors.newScheduledThreadPool(1) - .scheduleAtFixedRate(this::updateIndexesNames, 0, 1, TimeUnit.HOURS); - } catch (Exception e) { - LOGGER.error("Error creating index templates", e); - } - } - - private void initIndexesTemplates() { - initIndexTemplate(LOG_DOC_TYPE); - initIndexTemplate(EVENT_DOC_TYPE); - initIndexTemplate(MSG_DOC_TYPE); - } - - private void initIndexTemplate(String type) { - String template = "template_" + type; - GetIndexTemplatesResponse result = - elasticSearchClient - .admin() - .indices() - .prepareGetTemplates(template) - .execute() - .actionGet(); - if (result.getIndexTemplates().isEmpty()) { - LOGGER.info("Creating the index template '{}'", template); - try { - String templateSource = loadTypeMappingSource("/" + template + ".json"); - elasticSearchClient - .admin() - .indices() - .preparePutTemplate(template) - .setSource(templateSource.getBytes(), XContentType.JSON) - .execute() - .actionGet(); - } catch (Exception e) { - LOGGER.error("Failed to init " + template, e); - } - } - } - - private void updateIndexesNames() { - logIndexName = updateIndexName(LOG_DOC_TYPE); - eventIndexName = updateIndexName(EVENT_DOC_TYPE); - messageIndexName = updateIndexName(MSG_DOC_TYPE); - } - - private String updateIndexName(String type) { - String indexName = - this.indexPrefix + "_" + type + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - createIndex(indexName); - return indexName; - } - - private void createWorkflowIndex() { - createIndex(workflowIndexName); - addTypeMapping(workflowIndexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); - } - - private void createTaskIndex() { - createIndex(taskIndexName); - addTypeMapping(taskIndexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); - } - - private void createIndex(String indexName) { - try { - elasticSearchClient - .admin() - .indices() - .prepareGetIndex() - .addIndices(indexName) - .execute() - .actionGet(); - } catch (IndexNotFoundException infe) { - try { - CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); - createIndexRequest.settings( - Settings.builder() - .put("index.number_of_shards", properties.getIndexShardCount()) - .put( - "index.number_of_replicas", - properties.getIndexReplicasCount())); - - elasticSearchClient.admin().indices().create(createIndexRequest).actionGet(); - } catch (ResourceAlreadyExistsException done) { - LOGGER.error("Failed to update log index name: {}", indexName, done); - } - } - } - - private void addTypeMapping(String indexName, String type, String sourcePath) { - GetMappingsResponse getMappingsResponse = - elasticSearchClient - .admin() - .indices() - .prepareGetMappings(indexName) - .addTypes(type) - .execute() - .actionGet(); - if (getMappingsResponse.mappings().isEmpty()) { - LOGGER.info("Adding the {} type mappings", indexName); - try { - String source = loadTypeMappingSource(sourcePath); - elasticSearchClient - .admin() - .indices() - .preparePutMapping(indexName) - .setType(type) - .setSource(source, XContentType.JSON) - .execute() - .actionGet(); - } catch (Exception e) { - LOGGER.error("Failed to init index " + indexName + " mappings", e); - } - } - } - - @Override - public void indexWorkflow(WorkflowSummary workflow) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = workflow.getWorkflowId(); - byte[] doc = objectMapper.writeValueAsBytes(workflow); - String docType = - StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - - UpdateRequest req = buildUpdateRequest(id, doc, workflowIndexName, docType); - elasticSearchClient.update(req).actionGet(); - - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing workflow: {}", - endTime - startTime, - workflow.getWorkflowId()); - Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "indexWorkflow"); - LOGGER.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(WorkflowSummary workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(TaskSummary task) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = task.getTaskId(); - byte[] doc = objectMapper.writeValueAsBytes(task); - String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; - - UpdateRequest req = new UpdateRequest(taskIndexName, docType, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - indexObject(req, TASK_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing task:{} in workflow: {}", - endTime - startTime, - task.getTaskId(), - task.getWorkflowId()); - Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - LOGGER.error("Failed to index task: {}", task.getTaskId(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(TaskSummary task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - private void indexObject(UpdateRequest req, String docType) { - if (bulkRequests.get(docType) == null) { - bulkRequests.put( - docType, - new BulkRequests( - System.currentTimeMillis(), elasticSearchClient.prepareBulk())); - } - bulkRequests.get(docType).getBulkRequestBuilder().add(req); - if (bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() - >= this.indexBatchSize) { - indexBulkRequest(docType); - } - } - - private synchronized void indexBulkRequest(String docType) { - if (bulkRequests.get(docType).getBulkRequestBuilder() != null - && bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() > 0) { - updateWithRetry(bulkRequests.get(docType).getBulkRequestBuilder(), docType); - bulkRequests.put( - docType, - new BulkRequests( - System.currentTimeMillis(), elasticSearchClient.prepareBulk())); - } - } - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - if (taskExecLogs.isEmpty()) { - return; - } - - try { - long startTime = Instant.now().toEpochMilli(); - BulkRequestBuilderWrapper bulkRequestBuilder = - new BulkRequestBuilderWrapper(elasticSearchClient.prepareBulk()); - for (TaskExecLog log : taskExecLogs) { - String docType = - StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; - IndexRequest request = new IndexRequest(logIndexName, docType); - request.source(objectMapper.writeValueAsBytes(log), XContentType.JSON); - bulkRequestBuilder.add(request); - } - bulkRequestBuilder.execute().actionGet(5, TimeUnit.SECONDS); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); - Monitors.recordESIndexTime( - "index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - List taskIds = - taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); - LOGGER.error("Failed to index task execution logs for tasks: {}", taskIds, e); - } - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - try { - BoolQueryBuilder query = boolQueryBuilder("taskId='" + taskId + "'", "*"); - - String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; - final SearchRequestBuilder srb = - elasticSearchClient - .prepareSearch(logIndexPrefix + "*") - .setQuery(query) - .setTypes(docType) - .setSize(properties.getTaskLogResultLimit()) - .addSort(SortBuilders.fieldSort("createdTime").order(SortOrder.ASC)); - - return mapTaskExecLogsResponse(srb.execute().actionGet()); - } catch (Exception e) { - LOGGER.error("Failed to get task execution logs for task: {}", taskId, e); - } - return null; - } - - private List mapTaskExecLogsResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List logs = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); - logs.add(tel); - } - return logs; - } - - @Override - public void addMessage(String queue, Message message) { - try { - long startTime = Instant.now().toEpochMilli(); - Map doc = new HashMap<>(); - doc.put("messageId", message.getId()); - doc.put("payload", message.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - - String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; - UpdateRequest req = new UpdateRequest(messageIndexName, docType, message.getId()); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - indexObject(req, MSG_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing message: {}", - endTime - startTime, - message.getId()); - Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); - } catch (Exception e) { - LOGGER.error("Failed to index message: {}", message.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); - } - - @Override - public List getMessages(String queue) { - try { - BoolQueryBuilder fq = boolQueryBuilder("queue='" + queue + "'", "*"); - - String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; - final SearchRequestBuilder srb = - elasticSearchClient - .prepareSearch(messageIndexPrefix + "*") - .setQuery(fq) - .setTypes(docType) - .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); - - return mapGetMessagesResponse(srb.execute().actionGet()); - } catch (Exception e) { - LOGGER.error("Failed to get messages for queue: {}", queue, e); - } - return null; - } - - private List mapGetMessagesResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - TypeFactory factory = TypeFactory.defaultInstance(); - MapType type = factory.constructMapType(HashMap.class, String.class, String.class); - List messages = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - Map mapSource = objectMapper.readValue(source, type); - Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); - messages.add(msg); - } - return messages; - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - long startTime = Instant.now().toEpochMilli(); - byte[] doc = objectMapper.writeValueAsBytes(eventExecution); - String id = - eventExecution.getName() - + "." - + eventExecution.getEvent() - + "." - + eventExecution.getMessageId() - + "." - + eventExecution.getId(); - String docType = - StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; - UpdateRequest req = buildUpdateRequest(id, doc, eventIndexName, docType); - indexObject(req, EVENT_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing event execution: {}", - endTime - startTime, - eventExecution.getId()); - Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - LOGGER.error("Failed to index event execution: {}", eventExecution.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync( - () -> addEventExecution(eventExecution), logExecutorService); - } - - @Override - public List getEventExecutions(String event) { - try { - BoolQueryBuilder fq = boolQueryBuilder("event='" + event + "'", "*"); - - String docType = - StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; - final SearchRequestBuilder srb = - elasticSearchClient - .prepareSearch(eventIndexPrefix + "*") - .setQuery(fq) - .setTypes(docType) - .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); - - return mapEventExecutionsResponse(srb.execute().actionGet()); - } catch (Exception e) { - LOGGER.error("Failed to get executions for event: {}", event, e); - } - return null; - } - - private List mapEventExecutionsResponse(SearchResponse response) - throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List executions = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - EventExecution tel = objectMapper.readValue(source, EventExecution.class); - executions.add(tel); - } - return executions; - } - - private void updateWithRetry(BulkRequestBuilderWrapper request, String docType) { - try { - long startTime = Instant.now().toEpochMilli(); - retryTemplate.execute(context -> request.execute().actionGet(5, TimeUnit.SECONDS)); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing object of type: {}", endTime - startTime, docType); - Monitors.recordESIndexTime("index_object", docType, endTime - startTime); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "index"); - LOGGER.error("Failed to index {} for requests", request.numberOfActions(), e); - } - } - - @Override - public SearchResult searchWorkflows( - String query, String freeText, int start, int count, List sort) { - return search(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - } - - @Override - public long getWorkflowCount(String query, String freeText) { - return count(query, freeText, WORKFLOW_DOC_TYPE); - } - - @Override - public SearchResult searchTasks( - String query, String freeText, int start, int count, List sort) { - return search(query, start, count, sort, freeText, TASK_DOC_TYPE); - } - - @Override - public void removeWorkflow(String workflowId) { - try { - long startTime = Instant.now().toEpochMilli(); - DeleteRequest request = - new DeleteRequest(workflowIndexName, WORKFLOW_DOC_TYPE, workflowId); - DeleteResponse response = elasticSearchClient.delete(request).actionGet(); - if (response.getResult() == DocWriteResponse.Result.DELETED) { - LOGGER.error("Index removal failed - document not found by id: {}", workflowId); - } - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for removing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Throwable e) { - LOGGER.error("Failed to remove workflow {} from index", workflowId, e); - Monitors.error(CLASS_NAME, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - if (keys.length != values.length) { - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "Number of keys and values do not match"); - } - - long startTime = Instant.now().toEpochMilli(); - UpdateRequest request = - new UpdateRequest(workflowIndexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - Map source = - IntStream.range(0, keys.length) - .boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - LOGGER.debug( - "Updating workflow {} in elasticsearch index: {}", - workflowInstanceId, - workflowIndexName); - elasticSearchClient.update(request).actionGet(); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId); - Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } - - @Override - public CompletableFuture asyncUpdateWorkflow( - String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.runAsync( - () -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - GetRequest request = - new GetRequest(workflowIndexName, docType, workflowInstanceId) - .fetchSourceContext( - new FetchSourceContext( - true, new String[] {fieldToGet}, Strings.EMPTY_ARRAY)); - GetResponse response = elasticSearchClient.get(request).actionGet(); - - if (response.isExists()) { - Map sourceAsMap = response.getSourceAsMap(); - if (sourceAsMap.get(fieldToGet) != null) { - return sourceAsMap.get(fieldToGet).toString(); - } - } - - LOGGER.debug( - "Unable to find Workflow: {} in ElasticSearch index: {}.", - workflowInstanceId, - workflowIndexName); - return null; - } - - private long count(String structuredQuery, String freeTextQuery, String docType) { - try { - docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; - BoolQueryBuilder fq = boolQueryBuilder(structuredQuery, freeTextQuery); - // The count api has been removed from the Java api, use the search api instead and set - // size to 0. - final SearchRequestBuilder srb = - elasticSearchClient - .prepareSearch(getIndexName(docType)) - .setQuery(fq) - .setTypes(docType) - .storedFields("_id") - .setSize(0); - SearchResponse response = srb.get(); - return response.getHits().getTotalHits(); - } catch (ParserException e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - private SearchResult search( - String structuredQuery, - int start, - int size, - List sortOptions, - String freeTextQuery, - String docType) { - try { - docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; - BoolQueryBuilder fq = boolQueryBuilder(structuredQuery, freeTextQuery); - final SearchRequestBuilder srb = - elasticSearchClient - .prepareSearch(getIndexName(docType)) - .setQuery(fq) - .setTypes(docType) - .storedFields("_id") - .setFrom(start) - .setSize(size); - - addSortOptions(srb, sortOptions); - - return mapSearchResult(srb.get()); - } catch (ParserException e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - private void addSortOptions(SearchRequestBuilder srb, List sortOptions) { - if (sortOptions != null) { - sortOptions.forEach( - sortOption -> { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int indx = sortOption.indexOf(':'); - // Can't be 0, need the field name at-least - if (indx > 0) { - field = sortOption.substring(0, indx); - order = SortOrder.valueOf(sortOption.substring(indx + 1)); - } - srb.addSort(field, order); - }); - } - } - - private SearchResult mapSearchResult(SearchResponse response) { - List result = new LinkedList<>(); - response.getHits().forEach(hit -> result.add(hit.getId())); - long count = response.getHits().getTotalHits(); - return new SearchResult<>(count, result); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = - QueryBuilders.boolQuery() - .must( - QueryBuilders.rangeQuery("endTime") - .lt(LocalDate.now().minusDays(archiveTtlDays).toString()) - .gte( - LocalDate.now() - .minusDays(archiveTtlDays) - .minusDays(1) - .toString())) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .should(QueryBuilders.termQuery("status", "TIMED_OUT")) - .should(QueryBuilders.termQuery("status", "TERMINATED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumShouldMatch(1); - String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - SearchRequestBuilder s = - elasticSearchClient - .prepareSearch(indexName) - .setTypes(docType) - .setQuery(q) - .setSize(1000); - return extractSearchIds(s); - } - - private UpdateRequest buildUpdateRequest( - String id, byte[] doc, String indexName, String docType) { - UpdateRequest req = new UpdateRequest(indexName, docType, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - req.retryOnConflict(UPDATE_REQUEST_RETRY_COUNT); - return req; - } - - private List extractSearchIds(SearchRequestBuilder s) { - SearchResponse response = s.execute().actionGet(); - SearchHits hits = response.getHits(); - List ids = new LinkedList<>(); - for (SearchHit hit : hits.getHits()) { - ids.add(hit.getId()); - } - return ids; - } - - /** - * Flush the buffers if bulk requests have not been indexed for the past {@link - * ElasticSearchProperties#getAsyncBufferFlushTimeout()} seconds. This is to prevent data loss - * in case the instance is terminated, while the buffer still holds documents to be indexed. - */ - private void flushBulkRequests() { - bulkRequests.entrySet().stream() - .filter( - entry -> - (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) - >= asyncBufferFlushTimeout) - .filter( - entry -> - entry.getValue().getBulkRequestBuilder() != null - && entry.getValue() - .getBulkRequestBuilder() - .numberOfActions() - > 0) - .forEach( - entry -> { - LOGGER.debug( - "Flushing bulk request buffer for type {}, size: {}", - entry.getKey(), - entry.getValue().getBulkRequestBuilder().numberOfActions()); - indexBulkRequest(entry.getKey()); - }); - } - - private static class BulkRequests { - - private long lastFlushTime; - private BulkRequestBuilderWrapper bulkRequestBuilder; - - public long getLastFlushTime() { - return lastFlushTime; - } - - public void setLastFlushTime(long lastFlushTime) { - this.lastFlushTime = lastFlushTime; - } - - public BulkRequestBuilderWrapper getBulkRequestBuilder() { - return bulkRequestBuilder; - } - - public void setBulkRequestBuilder(BulkRequestBuilder bulkRequestBuilder) { - this.bulkRequestBuilder = new BulkRequestBuilderWrapper(bulkRequestBuilder); - } - - BulkRequests(long lastFlushTime, BulkRequestBuilder bulkRequestBuilder) { - this.lastFlushTime = lastFlushTime; - this.bulkRequestBuilder = new BulkRequestBuilderWrapper(bulkRequestBuilder); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDAOV6.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDAOV6.java deleted file mode 100644 index a55d2f7d3..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDAOV6.java +++ /dev/null @@ -1,1087 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.io.IOException; -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.time.LocalDate; -import java.util.*; -import java.util.concurrent.*; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpStatus; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.http.nio.entity.NStringEntity; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.client.*; -import org.elasticsearch.client.core.CountRequest; -import org.elasticsearch.client.core.CountResponse; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.retry.support.RetryTemplate; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.es6.config.ElasticSearchProperties; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; -import com.netflix.conductor.metrics.Monitors; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.type.MapType; -import com.fasterxml.jackson.databind.type.TypeFactory; - -@Trace -public class ElasticSearchRestDAOV6 extends ElasticSearchBaseDAO implements IndexDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchRestDAOV6.class); - - private static final int CORE_POOL_SIZE = 6; - private static final long KEEP_ALIVE_TIME = 1L; - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String LOG_DOC_TYPE = "task_log"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String MSG_DOC_TYPE = "message"; - - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private @interface HttpMethod { - - String GET = "GET"; - String POST = "POST"; - String PUT = "PUT"; - String HEAD = "HEAD"; - } - - private static final String className = ElasticSearchRestDAOV6.class.getSimpleName(); - - private final String workflowIndexName; - private final String taskIndexName; - private final String eventIndexPrefix; - private String eventIndexName; - private final String messageIndexPrefix; - private String messageIndexName; - private String logIndexName; - private final String logIndexPrefix; - private final String docTypeOverride; - - private final String clusterHealthColor; - private final ObjectMapper objectMapper; - private final RestHighLevelClient elasticSearchClient; - private final RestClient elasticSearchAdminClient; - private final ExecutorService executorService; - private final ExecutorService logExecutorService; - private final ConcurrentHashMap bulkRequests; - private final int indexBatchSize; - private final long asyncBufferFlushTimeout; - private final ElasticSearchProperties properties; - - private final RetryTemplate retryTemplate; - - static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); - } - - public ElasticSearchRestDAOV6( - RestClientBuilder restClientBuilder, - RetryTemplate retryTemplate, - ElasticSearchProperties properties, - ObjectMapper objectMapper) { - - this.objectMapper = objectMapper; - this.elasticSearchAdminClient = restClientBuilder.build(); - this.elasticSearchClient = new RestHighLevelClient(restClientBuilder); - this.clusterHealthColor = properties.getClusterHealthColor(); - this.bulkRequests = new ConcurrentHashMap<>(); - this.indexBatchSize = properties.getIndexBatchSize(); - this.asyncBufferFlushTimeout = properties.getAsyncBufferFlushTimeout().toMillis(); - this.properties = properties; - - this.indexPrefix = properties.getIndexPrefix(); - if (!properties.isAutoIndexManagementEnabled() - && StringUtils.isNotBlank(properties.getDocumentTypeOverride())) { - docTypeOverride = properties.getDocumentTypeOverride(); - } else { - docTypeOverride = ""; - } - - this.workflowIndexName = getIndexName(WORKFLOW_DOC_TYPE); - this.taskIndexName = getIndexName(TASK_DOC_TYPE); - this.logIndexPrefix = this.indexPrefix + "_" + LOG_DOC_TYPE; - this.messageIndexPrefix = this.indexPrefix + "_" + MSG_DOC_TYPE; - this.eventIndexPrefix = this.indexPrefix + "_" + EVENT_DOC_TYPE; - int workerQueueSize = properties.getAsyncWorkerQueueSize(); - int maximumPoolSize = properties.getAsyncMaxPoolSize(); - - // Set up a workerpool for performing async operations. - this.executorService = - new ThreadPoolExecutor( - CORE_POOL_SIZE, - maximumPoolSize, - KEEP_ALIVE_TIME, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - LOGGER.warn( - "Request {} to async dao discarded in executor {}", - runnable, - executor); - Monitors.recordDiscardedIndexingCount("indexQueue"); - }); - - // Set up a workerpool for performing async operations for task_logs, event_executions, - // message - int corePoolSize = 1; - maximumPoolSize = 2; - long keepAliveTime = 30L; - this.logExecutorService = - new ThreadPoolExecutor( - corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - LOGGER.warn( - "Request {} to async log dao discarded in executor {}", - runnable, - executor); - Monitors.recordDiscardedIndexingCount("logQueue"); - }); - - Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); - this.retryTemplate = retryTemplate; - } - - @PreDestroy - private void shutdown() { - LOGGER.info("Gracefully shutdown executor service"); - shutdownExecutorService(logExecutorService); - shutdownExecutorService(executorService); - } - - private void shutdownExecutorService(ExecutorService execService) { - try { - execService.shutdown(); - if (execService.awaitTermination(30, TimeUnit.SECONDS)) { - LOGGER.debug("tasks completed, shutting down"); - } else { - LOGGER.warn("Forcing shutdown after waiting for 30 seconds"); - execService.shutdownNow(); - } - } catch (InterruptedException ie) { - LOGGER.warn( - "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); - execService.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - @Override - @PostConstruct - public void setup() throws Exception { - waitForHealthyCluster(); - - if (properties.isAutoIndexManagementEnabled()) { - createIndexesTemplates(); - createWorkflowIndex(); - createTaskIndex(); - } - } - - private void createIndexesTemplates() { - try { - initIndexesTemplates(); - updateIndexesNames(); - Executors.newScheduledThreadPool(1) - .scheduleAtFixedRate(this::updateIndexesNames, 0, 1, TimeUnit.HOURS); - } catch (Exception e) { - LOGGER.error("Error creating index templates!", e); - } - } - - private void initIndexesTemplates() { - initIndexTemplate(LOG_DOC_TYPE); - initIndexTemplate(EVENT_DOC_TYPE); - initIndexTemplate(MSG_DOC_TYPE); - } - - /** Initializes the index with the required templates and mappings. */ - private void initIndexTemplate(String type) { - String template = "template_" + type; - try { - if (doesResourceNotExist("/_template/" + template)) { - LOGGER.info("Creating the index template '" + template + "'"); - InputStream stream = - ElasticSearchDAOV6.class.getResourceAsStream("/" + template + ".json"); - byte[] templateSource = IOUtils.toByteArray(stream); - - HttpEntity entity = - new NByteArrayEntity(templateSource, ContentType.APPLICATION_JSON); - elasticSearchAdminClient.performRequest( - HttpMethod.PUT, "/_template/" + template, Collections.emptyMap(), entity); - } - } catch (Exception e) { - LOGGER.error("Failed to init " + template, e); - } - } - - private void updateIndexesNames() { - logIndexName = updateIndexName(LOG_DOC_TYPE); - eventIndexName = updateIndexName(EVENT_DOC_TYPE); - messageIndexName = updateIndexName(MSG_DOC_TYPE); - } - - private String updateIndexName(String type) { - String indexName = - this.indexPrefix + "_" + type + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - try { - addIndex(indexName); - return indexName; - } catch (IOException e) { - LOGGER.error("Failed to update log index name: {}", indexName, e); - throw new ApplicationException(e.getMessage(), e); - } - } - - private void createWorkflowIndex() { - String indexName = getIndexName(WORKFLOW_DOC_TYPE); - try { - addIndex(indexName); - } catch (IOException e) { - LOGGER.error("Failed to initialize index '{}'", indexName, e); - } - try { - addMappingToIndex(indexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); - } catch (IOException e) { - LOGGER.error("Failed to add {} mapping", WORKFLOW_DOC_TYPE); - } - } - - private void createTaskIndex() { - String indexName = getIndexName(TASK_DOC_TYPE); - try { - addIndex(indexName); - } catch (IOException e) { - LOGGER.error("Failed to initialize index '{}'", indexName, e); - } - try { - addMappingToIndex(indexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); - } catch (IOException e) { - LOGGER.error("Failed to add {} mapping", TASK_DOC_TYPE); - } - } - - /** - * Waits for the ES cluster to become green. - * - * @throws Exception If there is an issue connecting with the ES cluster. - */ - private void waitForHealthyCluster() throws Exception { - Map params = new HashMap<>(); - params.put("wait_for_status", this.clusterHealthColor); - params.put("timeout", "30s"); - - elasticSearchAdminClient.performRequest("GET", "/_cluster/health", params); - } - - /** - * Adds an index to elasticsearch if it does not exist. - * - * @param index The name of the index to create. - * @throws IOException If an error occurred during requests to ES. - */ - private void addIndex(final String index) throws IOException { - - LOGGER.info("Adding index '{}'...", index); - - String resourcePath = "/" + index; - - if (doesResourceNotExist(resourcePath)) { - - try { - ObjectNode setting = objectMapper.createObjectNode(); - ObjectNode indexSetting = objectMapper.createObjectNode(); - - indexSetting.put("number_of_shards", properties.getIndexShardCount()); - indexSetting.put("number_of_replicas", properties.getIndexReplicasCount()); - - setting.set("index", indexSetting); - - elasticSearchAdminClient.performRequest( - HttpMethod.PUT, - resourcePath, - Collections.emptyMap(), - new NStringEntity(setting.toString(), ContentType.APPLICATION_JSON)); - LOGGER.info("Added '{}' index", index); - } catch (ResponseException e) { - - boolean errorCreatingIndex = true; - - Response errorResponse = e.getResponse(); - if (errorResponse.getStatusLine().getStatusCode() == HttpStatus.SC_BAD_REQUEST) { - JsonNode root = - objectMapper.readTree(EntityUtils.toString(errorResponse.getEntity())); - String errorCode = root.get("error").get("type").asText(); - if ("index_already_exists_exception".equals(errorCode)) { - errorCreatingIndex = false; - } - } - - if (errorCreatingIndex) { - throw e; - } - } - } else { - LOGGER.info("Index '{}' already exists", index); - } - } - - /** - * Adds a mapping type to an index if it does not exist. - * - * @param index The name of the index. - * @param mappingType The name of the mapping type. - * @param mappingFilename The name of the mapping file to use to add the mapping if it does not - * exist. - * @throws IOException If an error occurred during requests to ES. - */ - private void addMappingToIndex( - final String index, final String mappingType, final String mappingFilename) - throws IOException { - - LOGGER.info("Adding '{}' mapping to index '{}'...", mappingType, index); - - String resourcePath = "/" + index + "/_mapping/" + mappingType; - - if (doesResourceNotExist(resourcePath)) { - HttpEntity entity = - new NByteArrayEntity( - loadTypeMappingSource(mappingFilename).getBytes(), - ContentType.APPLICATION_JSON); - elasticSearchAdminClient.performRequest( - HttpMethod.PUT, resourcePath, Collections.emptyMap(), entity); - LOGGER.info("Added '{}' mapping", mappingType); - } else { - LOGGER.info("Mapping '{}' already exists", mappingType); - } - } - - /** - * Determines whether a resource exists in ES. This will call a GET method to a particular path - * and return true if status 200; false otherwise. - * - * @param resourcePath The path of the resource to get. - * @return True if it exists; false otherwise. - * @throws IOException If an error occurred during requests to ES. - */ - public boolean doesResourceExist(final String resourcePath) throws IOException { - Response response = elasticSearchAdminClient.performRequest(HttpMethod.HEAD, resourcePath); - return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; - } - - /** - * The inverse of doesResourceExist. - * - * @param resourcePath The path of the resource to check. - * @return True if it does not exist; false otherwise. - * @throws IOException If an error occurred during requests to ES. - */ - public boolean doesResourceNotExist(final String resourcePath) throws IOException { - return !doesResourceExist(resourcePath); - } - - @Override - public void indexWorkflow(WorkflowSummary workflow) { - try { - long startTime = Instant.now().toEpochMilli(); - String workflowId = workflow.getWorkflowId(); - byte[] docBytes = objectMapper.writeValueAsBytes(workflow); - String docType = - StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - - IndexRequest request = new IndexRequest(workflowIndexName, docType, workflowId); - request.source(docBytes, XContentType.JSON); - elasticSearchClient.index(request, RequestOptions.DEFAULT); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(className, "indexWorkflow"); - LOGGER.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(WorkflowSummary workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(TaskSummary task) { - try { - long startTime = Instant.now().toEpochMilli(); - String taskId = task.getTaskId(); - String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; - - indexObject(taskIndexName, docType, taskId, task); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing task:{} in workflow: {}", - endTime - startTime, - taskId, - task.getWorkflowId()); - Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - LOGGER.error("Failed to index task: {}", task.getTaskId(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(TaskSummary task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - if (taskExecLogs.isEmpty()) { - return; - } - - long startTime = Instant.now().toEpochMilli(); - BulkRequest bulkRequest = new BulkRequest(); - for (TaskExecLog log : taskExecLogs) { - - byte[] docBytes; - try { - docBytes = objectMapper.writeValueAsBytes(log); - } catch (JsonProcessingException e) { - LOGGER.error("Failed to convert task log to JSON for task {}", log.getTaskId()); - continue; - } - - String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; - IndexRequest request = new IndexRequest(logIndexName, docType); - request.source(docBytes, XContentType.JSON); - bulkRequest.add(request); - } - - try { - elasticSearchClient.bulk(bulkRequest, RequestOptions.DEFAULT); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); - Monitors.recordESIndexTime( - "index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - List taskIds = - taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); - LOGGER.error("Failed to index task execution logs for tasks: {}", taskIds, e); - } - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - try { - BoolQueryBuilder query = boolQueryBuilder("taskId='" + taskId + "'", "*"); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(query); - searchSourceBuilder.sort(new FieldSortBuilder("createdTime").order(SortOrder.ASC)); - searchSourceBuilder.size(properties.getTaskLogResultLimit()); - - // Generate the actual request to send to ES. - String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; - SearchRequest searchRequest = new SearchRequest(logIndexPrefix + "*"); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - return mapTaskExecLogsResponse(response); - } catch (Exception e) { - LOGGER.error("Failed to get task execution logs for task: {}", taskId, e); - } - return null; - } - - private List mapTaskExecLogsResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List logs = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); - logs.add(tel); - } - return logs; - } - - @Override - public List getMessages(String queue) { - try { - BoolQueryBuilder query = boolQueryBuilder("queue='" + queue + "'", "*"); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(query); - searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); - - // Generate the actual request to send to ES. - String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; - SearchRequest searchRequest = new SearchRequest(messageIndexPrefix + "*"); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - return mapGetMessagesResponse(response); - } catch (Exception e) { - LOGGER.error("Failed to get messages for queue: {}", queue, e); - } - return null; - } - - private List mapGetMessagesResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - TypeFactory factory = TypeFactory.defaultInstance(); - MapType type = factory.constructMapType(HashMap.class, String.class, String.class); - List messages = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - Map mapSource = objectMapper.readValue(source, type); - Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); - messages.add(msg); - } - return messages; - } - - @Override - public List getEventExecutions(String event) { - try { - BoolQueryBuilder query = boolQueryBuilder("event='" + event + "'", "*"); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(query); - searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); - - // Generate the actual request to send to ES. - String docType = - StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; - SearchRequest searchRequest = new SearchRequest(eventIndexPrefix + "*"); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - return mapEventExecutionsResponse(response); - } catch (Exception e) { - LOGGER.error("Failed to get executions for event: {}", event, e); - } - return null; - } - - private List mapEventExecutionsResponse(SearchResponse response) - throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List executions = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - EventExecution tel = objectMapper.readValue(source, EventExecution.class); - executions.add(tel); - } - return executions; - } - - @Override - public void addMessage(String queue, Message message) { - try { - long startTime = Instant.now().toEpochMilli(); - Map doc = new HashMap<>(); - doc.put("messageId", message.getId()); - doc.put("payload", message.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - - String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; - indexObject(messageIndexName, docType, doc); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing message: {}", - endTime - startTime, - message.getId()); - Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); - } catch (Exception e) { - LOGGER.error("Failed to index message: {}", message.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = - eventExecution.getName() - + "." - + eventExecution.getEvent() - + "." - + eventExecution.getMessageId() - + "." - + eventExecution.getId(); - - String docType = - StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; - indexObject(eventIndexName, docType, id, eventExecution); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing event execution: {}", - endTime - startTime, - eventExecution.getId()); - Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - LOGGER.error("Failed to index event execution: {}", eventExecution.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync( - () -> addEventExecution(eventExecution), logExecutorService); - } - - @Override - public SearchResult searchWorkflows( - String query, String freeText, int start, int count, List sort) { - try { - return searchObjectIdsViaExpression( - query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public SearchResult searchTasks( - String query, String freeText, int start, int count, List sort) { - try { - return searchObjectIdsViaExpression(query, start, count, sort, freeText, TASK_DOC_TYPE); - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public void removeWorkflow(String workflowId) { - long startTime = Instant.now().toEpochMilli(); - String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - DeleteRequest request = new DeleteRequest(workflowIndexName, docType, workflowId); - - try { - DeleteResponse response = elasticSearchClient.delete(request); - - if (response.getResult() == DocWriteResponse.Result.NOT_FOUND) { - LOGGER.error("Index removal failed - document not found by id: {}", workflowId); - } - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for removing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (IOException e) { - LOGGER.error("Failed to remove workflow {} from index", workflowId, e); - Monitors.error(className, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - try { - if (keys.length != values.length) { - throw new ApplicationException( - ApplicationException.Code.INVALID_INPUT, - "Number of keys and values do not match"); - } - - long startTime = Instant.now().toEpochMilli(); - String docType = - StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - UpdateRequest request = - new UpdateRequest(workflowIndexName, docType, workflowInstanceId); - Map source = - IntStream.range(0, keys.length) - .boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - - LOGGER.debug("Updating workflow {} with {}", workflowInstanceId, source); - elasticSearchClient.update(request, RequestOptions.DEFAULT); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for updating workflow: {}", - endTime - startTime, - workflowInstanceId); - Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - LOGGER.error("Failed to update workflow {}", workflowInstanceId, e); - Monitors.error(className, "update"); - } - } - - @Override - public CompletableFuture asyncUpdateWorkflow( - String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.runAsync( - () -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - - String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; - GetRequest request = new GetRequest(workflowIndexName, docType, workflowInstanceId); - - GetResponse response; - try { - response = elasticSearchClient.get(request); - } catch (IOException e) { - LOGGER.error( - "Unable to get Workflow: {} from ElasticSearch index: {}", - workflowInstanceId, - workflowIndexName, - e); - return null; - } - - if (response.isExists()) { - Map sourceAsMap = response.getSourceAsMap(); - if (sourceAsMap.get(fieldToGet) != null) { - return sourceAsMap.get(fieldToGet).toString(); - } - } - - LOGGER.debug( - "Unable to find Workflow: {} in ElasticSearch index: {}.", - workflowInstanceId, - workflowIndexName); - return null; - } - - private SearchResult searchObjectIdsViaExpression( - String structuredQuery, - int start, - int size, - List sortOptions, - String freeTextQuery, - String docType) - throws ParserException, IOException { - QueryBuilder queryBuilder = boolQueryBuilder(structuredQuery, freeTextQuery); - return searchObjectIds( - getIndexName(docType), queryBuilder, start, size, sortOptions, docType); - } - - private SearchResult searchObjectIds( - String indexName, QueryBuilder queryBuilder, int start, int size, String docType) - throws IOException { - return searchObjectIds(indexName, queryBuilder, start, size, null, docType); - } - - /** - * Tries to find object ids for a given query in an index. - * - * @param indexName The name of the index. - * @param queryBuilder The query to use for searching. - * @param start The start to use. - * @param size The total return size. - * @param sortOptions A list of string options to sort in the form VALUE:ORDER; where ORDER is - * optional and can be either ASC OR DESC. - * @param docType The document type to searchObjectIdsViaExpression for. - * @return The SearchResults which includes the count and IDs that were found. - * @throws IOException If we cannot communicate with ES. - */ - private SearchResult searchObjectIds( - String indexName, - QueryBuilder queryBuilder, - int start, - int size, - List sortOptions, - String docType) - throws IOException { - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.from(start); - searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(false); - - if (sortOptions != null && !sortOptions.isEmpty()) { - - for (String sortOption : sortOptions) { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int index = sortOption.indexOf(":"); - if (index > 0) { - field = sortOption.substring(0, index); - order = SortOrder.valueOf(sortOption.substring(index + 1)); - } - searchSourceBuilder.sort(new FieldSortBuilder(field).order(order)); - } - } - - // Generate the actual request to send to ES. - docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - List result = new LinkedList<>(); - response.getHits().forEach(hit -> result.add(hit.getId())); - long count = response.getHits().getTotalHits(); - return new SearchResult<>(count, result); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = - QueryBuilders.boolQuery() - .must( - QueryBuilders.rangeQuery("endTime") - .lt(LocalDate.now().minusDays(archiveTtlDays).toString()) - .gte( - LocalDate.now() - .minusDays(archiveTtlDays) - .minusDays(1) - .toString())) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .should(QueryBuilders.termQuery("status", "TIMED_OUT")) - .should(QueryBuilders.termQuery("status", "TERMINATED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumShouldMatch(1); - - SearchResult workflowIds; - try { - workflowIds = searchObjectIds(indexName, q, 0, 1000, WORKFLOW_DOC_TYPE); - } catch (IOException e) { - LOGGER.error("Unable to communicate with ES to find archivable workflows", e); - return Collections.emptyList(); - } - - return workflowIds.getResults(); - } - - @Override - public long getWorkflowCount(String query, String freeText) { - try { - return getObjectCounts(query, freeText, WORKFLOW_DOC_TYPE); - } catch (Exception e) { - throw new ApplicationException( - ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - private long getObjectCounts(String structuredQuery, String freeTextQuery, String docType) - throws ParserException, IOException { - QueryBuilder queryBuilder = boolQueryBuilder(structuredQuery, freeTextQuery); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - sourceBuilder.query(queryBuilder); - - String indexName = getIndexName(docType); - CountRequest countRequest = new CountRequest(new String[] {indexName}, sourceBuilder); - CountResponse countResponse = - elasticSearchClient.count(countRequest, RequestOptions.DEFAULT); - return countResponse.getCount(); - } - - private void indexObject(final String index, final String docType, final Object doc) { - indexObject(index, docType, null, doc); - } - - private void indexObject( - final String index, final String docType, final String docId, final Object doc) { - - byte[] docBytes; - try { - docBytes = objectMapper.writeValueAsBytes(doc); - } catch (JsonProcessingException e) { - LOGGER.error("Failed to convert {} '{}' to byte string", docType, docId); - return; - } - - IndexRequest request = new IndexRequest(index, docType, docId); - request.source(docBytes, XContentType.JSON); - - if (bulkRequests.get(docType) == null) { - bulkRequests.put( - docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); - } - - bulkRequests.get(docType).getBulkRequest().add(request); - if (bulkRequests.get(docType).getBulkRequest().numberOfActions() >= this.indexBatchSize) { - indexBulkRequest(docType); - } - } - - private synchronized void indexBulkRequest(String docType) { - if (bulkRequests.get(docType).getBulkRequest() != null - && bulkRequests.get(docType).getBulkRequest().numberOfActions() > 0) { - synchronized (bulkRequests.get(docType).getBulkRequest()) { - indexWithRetry( - bulkRequests.get(docType).getBulkRequest().get(), - "Bulk Indexing " + docType, - docType); - bulkRequests.put( - docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); - } - } - } - - /** - * Performs an index operation with a retry. - * - * @param request The index request that we want to perform. - * @param operationDescription The type of operation that we are performing. - */ - private void indexWithRetry( - final BulkRequest request, final String operationDescription, String docType) { - try { - long startTime = Instant.now().toEpochMilli(); - retryTemplate.execute( - context -> elasticSearchClient.bulk(request, RequestOptions.DEFAULT)); - long endTime = Instant.now().toEpochMilli(); - LOGGER.debug( - "Time taken {} for indexing object of type: {}", endTime - startTime, docType); - Monitors.recordESIndexTime("index_object", docType, endTime - startTime); - Monitors.recordWorkerQueueSize( - "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - Monitors.recordWorkerQueueSize( - "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(className, "index"); - LOGGER.error("Failed to index {} for request type: {}", request, docType, e); - } - } - - /** - * Flush the buffers if bulk requests have not been indexed for the past {@link - * ElasticSearchProperties#getAsyncBufferFlushTimeout()} seconds. This is to prevent data loss - * in case the instance is terminated, while the buffer still holds documents to be indexed. - */ - private void flushBulkRequests() { - bulkRequests.entrySet().stream() - .filter( - entry -> - (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) - >= asyncBufferFlushTimeout) - .filter( - entry -> - entry.getValue().getBulkRequest() != null - && entry.getValue().getBulkRequest().numberOfActions() > 0) - .forEach( - entry -> { - LOGGER.debug( - "Flushing bulk request buffer for type {}, size: {}", - entry.getKey(), - entry.getValue().getBulkRequest().numberOfActions()); - indexBulkRequest(entry.getKey()); - }); - } - - private static class BulkRequests { - - private final long lastFlushTime; - private final BulkRequestWrapper bulkRequest; - - long getLastFlushTime() { - return lastFlushTime; - } - - BulkRequestWrapper getBulkRequest() { - return bulkRequest; - } - - BulkRequests(long lastFlushTime, BulkRequest bulkRequest) { - this.lastFlushTime = lastFlushTime; - this.bulkRequest = new BulkRequestWrapper(bulkRequest); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/Expression.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/Expression.java deleted file mode 100644 index 9ab2dfe41..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/Expression.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; -import com.netflix.conductor.es6.dao.query.parser.internal.BooleanOp; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; - -public class Expression extends AbstractNode implements FilterProvider { - - private NameValue nameVal; - private GroupedExpression ge; - private BooleanOp op; - private Expression rhs; - - public Expression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(1); - - if (peeked[0] == '(') { - this.ge = new GroupedExpression(is); - } else { - this.nameVal = new NameValue(is); - } - - peeked = peek(3); - if (isBoolOpr(peeked)) { - // we have an expression next - this.op = new BooleanOp(is); - this.rhs = new Expression(is); - } - } - - public boolean isBinaryExpr() { - return this.op != null; - } - - public BooleanOp getOperator() { - return this.op; - } - - public Expression getRightHandSide() { - return this.rhs; - } - - public boolean isNameValue() { - return this.nameVal != null; - } - - public NameValue getNameValue() { - return this.nameVal; - } - - public GroupedExpression getGroupedExpression() { - return this.ge; - } - - @Override - public QueryBuilder getFilterBuilder() { - QueryBuilder lhs = null; - if (nameVal != null) { - lhs = nameVal.getFilterBuilder(); - } else { - lhs = ge.getFilterBuilder(); - } - - if (this.isBinaryExpr()) { - QueryBuilder rhsFilter = rhs.getFilterBuilder(); - if (this.op.isAnd()) { - return QueryBuilders.boolQuery().must(lhs).must(rhsFilter); - } else { - return QueryBuilders.boolQuery().should(lhs).should(rhsFilter); - } - } else { - return lhs; - } - } - - @Override - public String toString() { - if (isBinaryExpr()) { - return "" + (nameVal == null ? ge : nameVal) + op + rhs; - } else { - return "" + (nameVal == null ? ge : nameVal); - } - } - - public static Expression fromString(String value) throws ParserException { - return new Expression(new BufferedInputStream(new ByteArrayInputStream(value.getBytes()))); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/FilterProvider.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/FilterProvider.java deleted file mode 100644 index 3c145975d..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/FilterProvider.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser; - -import org.elasticsearch.index.query.QueryBuilder; - -public interface FilterProvider { - - /** - * @return FilterBuilder for elasticsearch - */ - public QueryBuilder getFilterBuilder(); -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/GroupedExpression.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/GroupedExpression.java deleted file mode 100644 index 90fc3b489..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/GroupedExpression.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser; - -import java.io.InputStream; - -import org.elasticsearch.index.query.QueryBuilder; - -import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; - -public class GroupedExpression extends AbstractNode implements FilterProvider { - - private Expression expression; - - public GroupedExpression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - - this.expression = new Expression(is); - - peeked = read(1); - assertExpected(peeked, ")"); - } - - @Override - public String toString() { - return "(" + expression + ")"; - } - - /** - * @return the expression - */ - public Expression getExpression() { - return expression; - } - - @Override - public QueryBuilder getFilterBuilder() { - return expression.getFilterBuilder(); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/NameValue.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/NameValue.java deleted file mode 100644 index 68c1e5af2..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/NameValue.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser; - -import java.io.InputStream; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; -import com.netflix.conductor.es6.dao.query.parser.internal.ComparisonOp; -import com.netflix.conductor.es6.dao.query.parser.internal.ComparisonOp.Operators; -import com.netflix.conductor.es6.dao.query.parser.internal.ConstValue; -import com.netflix.conductor.es6.dao.query.parser.internal.ListConst; -import com.netflix.conductor.es6.dao.query.parser.internal.Name; -import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; -import com.netflix.conductor.es6.dao.query.parser.internal.Range; - -/** - * - * - *

    - * Represents an expression of the form as below:
    - * key OPR value
    - * OPR is the comparison operator which could be one of the following:
    - * 	>, <, = , !=, IN, BETWEEN
    - * 
    - */ -public class NameValue extends AbstractNode implements FilterProvider { - - private Name name; - - private ComparisonOp op; - - private ConstValue value; - - private Range range; - - private ListConst valueList; - - public NameValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.name = new Name(is); - this.op = new ComparisonOp(is); - - if (this.op.getOperator().equals(Operators.BETWEEN.value())) { - this.range = new Range(is); - } - if (this.op.getOperator().equals(Operators.IN.value())) { - this.valueList = new ListConst(is); - } else { - this.value = new ConstValue(is); - } - } - - @Override - public String toString() { - return "" + name + op + value; - } - - /** - * @return the name - */ - public Name getName() { - return name; - } - - /** - * @return the op - */ - public ComparisonOp getOp() { - return op; - } - - /** - * @return the value - */ - public ConstValue getValue() { - return value; - } - - @Override - public QueryBuilder getFilterBuilder() { - if (op.getOperator().equals(Operators.EQUALS.value())) { - return QueryBuilders.queryStringQuery( - name.getName() + ":" + value.getValue().toString()); - } else if (op.getOperator().equals(Operators.BETWEEN.value())) { - return QueryBuilders.rangeQuery(name.getName()) - .from(range.getLow()) - .to(range.getHigh()); - } else if (op.getOperator().equals(Operators.IN.value())) { - return QueryBuilders.termsQuery(name.getName(), valueList.getList()); - } else if (op.getOperator().equals(Operators.NOT_EQUALS.value())) { - return QueryBuilders.queryStringQuery( - "NOT " + name.getName() + ":" + value.getValue().toString()); - } else if (op.getOperator().equals(Operators.GREATER_THAN.value())) { - return QueryBuilders.rangeQuery(name.getName()) - .from(value.getValue()) - .includeLower(false) - .includeUpper(false); - } else if (op.getOperator().equals(Operators.IS.value())) { - if (value.getSysConstant().equals(ConstValue.SystemConsts.NULL)) { - return QueryBuilders.boolQuery() - .mustNot( - QueryBuilders.boolQuery() - .must(QueryBuilders.matchAllQuery()) - .mustNot(QueryBuilders.existsQuery(name.getName()))); - } else if (value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)) { - return QueryBuilders.boolQuery() - .mustNot( - QueryBuilders.boolQuery() - .must(QueryBuilders.matchAllQuery()) - .must(QueryBuilders.existsQuery(name.getName()))); - } - } else if (op.getOperator().equals(Operators.LESS_THAN.value())) { - return QueryBuilders.rangeQuery(name.getName()) - .to(value.getValue()) - .includeLower(false) - .includeUpper(false); - } else if (op.getOperator().equals(Operators.STARTS_WITH.value())) { - return QueryBuilders.prefixQuery(name.getName(), value.getUnquotedValue()); - } - - throw new IllegalStateException("Incorrect/unsupported operators"); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/AbstractNode.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/AbstractNode.java deleted file mode 100644 index 2d5fe84ad..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/AbstractNode.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; -import java.math.BigDecimal; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; - -public abstract class AbstractNode { - - public static final Pattern WHITESPACE = Pattern.compile("\\s"); - - protected static Set comparisonOprs = new HashSet<>(); - - static { - comparisonOprs.add('>'); - comparisonOprs.add('<'); - comparisonOprs.add('='); - } - - protected InputStream is; - - protected AbstractNode(InputStream is) throws ParserException { - this.is = is; - this.parse(); - } - - protected boolean isNumber(String test) { - try { - // If you can convert to a big decimal value, then it is a number. - new BigDecimal(test); - return true; - - } catch (NumberFormatException e) { - // Ignore - } - return false; - } - - protected boolean isBoolOpr(byte[] buffer) { - if (buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R') { - return true; - } else { - return buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'; - } - } - - protected boolean isComparisonOpr(byte[] buffer) { - if (buffer[0] == 'I' && buffer[1] == 'N') { - return true; - } else if (buffer[0] == '!' && buffer[1] == '=') { - return true; - } else { - return comparisonOprs.contains((char) buffer[0]); - } - } - - protected byte[] peek(int length) throws Exception { - return read(length, true); - } - - protected byte[] read(int length) throws Exception { - return read(length, false); - } - - protected String readToken() throws Exception { - skipWhitespace(); - StringBuilder sb = new StringBuilder(); - while (is.available() > 0) { - char c = (char) peek(1)[0]; - if (c == ' ' || c == '\t' || c == '\n' || c == '\r') { - is.skip(1); - break; - } else if (c == '=' || c == '>' || c == '<' || c == '!') { - // do not skip - break; - } - sb.append(c); - is.skip(1); - } - return sb.toString().trim(); - } - - protected boolean isNumeric(char c) { - return c == '-' || c == 'e' || (c >= '0' && c <= '9') || c == '.'; - } - - protected void assertExpected(byte[] found, String expected) throws ParserException { - assertExpected(new String(found), expected); - } - - protected void assertExpected(String found, String expected) throws ParserException { - if (!found.equals(expected)) { - throw new ParserException("Expected " + expected + ", found " + found); - } - } - - protected void assertExpected(char found, char expected) throws ParserException { - if (found != expected) { - throw new ParserException("Expected " + expected + ", found " + found); - } - } - - protected static void efor(int length, FunctionThrowingException consumer) - throws Exception { - for (int i = 0; i < length; i++) { - consumer.accept(i); - } - } - - protected abstract void _parse() throws Exception; - - // Public stuff here - private void parse() throws ParserException { - // skip white spaces - skipWhitespace(); - try { - _parse(); - } catch (Exception e) { - if (!(e instanceof ParserException)) { - throw new ParserException("Error parsing", e); - } else { - throw (ParserException) e; - } - } - skipWhitespace(); - } - - // Private methods - - private byte[] read(int length, boolean peekOnly) throws Exception { - byte[] buf = new byte[length]; - if (peekOnly) { - is.mark(length); - } - efor(length, (Integer c) -> buf[c] = (byte) is.read()); - if (peekOnly) { - is.reset(); - } - return buf; - } - - protected void skipWhitespace() throws ParserException { - try { - while (is.available() > 0) { - byte c = peek(1)[0]; - if (c == ' ' || c == '\t' || c == '\n' || c == '\r') { - // skip - read(1); - } else { - break; - } - } - } catch (Exception e) { - throw new ParserException(e.getMessage(), e); - } - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/BooleanOp.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/BooleanOp.java deleted file mode 100644 index ccdc15a55..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/BooleanOp.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; - -public class BooleanOp extends AbstractNode { - - private String value; - - public BooleanOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] buffer = peek(3); - if (buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R') { - this.value = "OR"; - } else if (buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D') { - this.value = "AND"; - } else { - throw new ParserException("No valid boolean operator found..."); - } - read(this.value.length()); - } - - @Override - public String toString() { - return " " + value + " "; - } - - public String getOperator() { - return value; - } - - public boolean isAnd() { - return "AND".equals(value); - } - - public boolean isOr() { - return "OR".equals(value); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ComparisonOp.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ComparisonOp.java deleted file mode 100644 index 10d44863d..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ComparisonOp.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; - -public class ComparisonOp extends AbstractNode { - - public enum Operators { - BETWEEN("BETWEEN"), - EQUALS("="), - LESS_THAN("<"), - GREATER_THAN(">"), - IN("IN"), - NOT_EQUALS("!="), - IS("IS"), - STARTS_WITH("STARTS_WITH"); - - private final String value; - - Operators(String value) { - this.value = value; - } - - public String value() { - return value; - } - } - - static { - int max = 0; - for (Operators op : Operators.values()) { - max = Math.max(max, op.value().length()); - } - maxOperatorLength = max; - } - - private static final int maxOperatorLength; - - private static final int betweenLen = Operators.BETWEEN.value().length(); - private static final int startsWithLen = Operators.STARTS_WITH.value().length(); - - private String value; - - public ComparisonOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(maxOperatorLength); - if (peeked[0] == '=' || peeked[0] == '>' || peeked[0] == '<') { - this.value = new String(peeked, 0, 1); - } else if (peeked[0] == 'I' && peeked[1] == 'N') { - this.value = "IN"; - } else if (peeked[0] == 'I' && peeked[1] == 'S') { - this.value = "IS"; - } else if (peeked[0] == '!' && peeked[1] == '=') { - this.value = "!="; - } else if (peeked.length >= betweenLen - && peeked[0] == 'B' - && peeked[1] == 'E' - && peeked[2] == 'T' - && peeked[3] == 'W' - && peeked[4] == 'E' - && peeked[5] == 'E' - && peeked[6] == 'N') { - this.value = Operators.BETWEEN.value(); - } else if (peeked.length == startsWithLen - && new String(peeked).equals(Operators.STARTS_WITH.value())) { - this.value = Operators.STARTS_WITH.value(); - } else { - throw new ParserException( - "Expecting an operator (=, >, <, !=, BETWEEN, IN, STARTS_WITH), but found none. Peeked=>" - + new String(peeked)); - } - - read(this.value.length()); - } - - @Override - public String toString() { - return " " + value + " "; - } - - public String getOperator() { - return value; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ConstValue.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ConstValue.java deleted file mode 100644 index 47bebce6b..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ConstValue.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; - -/** - * Constant value can be: - * - *

      - *
    1. List of values (a,b,c) - *
    2. Range of values (m AND n) - *
    3. A value (x) - *
    4. A value is either a string or a number - *
    - */ -public class ConstValue extends AbstractNode { - - public enum SystemConsts { - NULL("null"), - NOT_NULL("not null"); - private final String value; - - SystemConsts(String value) { - this.value = value; - } - - public String value() { - return value; - } - } - - private static final String QUOTE = "\""; - - private Object value; - - private SystemConsts sysConsts; - - public ConstValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(4); - String sp = new String(peeked).trim(); - // Read a constant value (number or a string) - if (peeked[0] == '"' || peeked[0] == '\'') { - this.value = readString(is); - } else if (sp.toLowerCase().startsWith("not")) { - this.value = SystemConsts.NOT_NULL.value(); - sysConsts = SystemConsts.NOT_NULL; - read(SystemConsts.NOT_NULL.value().length()); - } else if (sp.equalsIgnoreCase(SystemConsts.NULL.value())) { - this.value = SystemConsts.NULL.value(); - sysConsts = SystemConsts.NULL; - read(SystemConsts.NULL.value().length()); - } else { - this.value = readNumber(is); - } - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while (is.available() > 0) { - is.mark(1); - char c = (char) is.read(); - if (!isNumeric(c)) { - is.reset(); - break; - } else { - sb.append(c); - } - } - return sb.toString().trim(); - } - - /** - * Reads an escaped string - * - * @throws Exception - */ - private String readString(InputStream is) throws Exception { - char delim = (char) read(1)[0]; - StringBuilder sb = new StringBuilder(); - boolean valid = false; - while (is.available() > 0) { - char c = (char) is.read(); - if (c == delim) { - valid = true; - break; - } else if (c == '\\') { - // read the next character as part of the value - c = (char) is.read(); - sb.append(c); - } else { - sb.append(c); - } - } - if (!valid) { - throw new ParserException( - "String constant is not quoted with <" + delim + "> : " + sb.toString()); - } - return QUOTE + sb.toString() + QUOTE; - } - - public Object getValue() { - return value; - } - - @Override - public String toString() { - return "" + value; - } - - public String getUnquotedValue() { - String result = toString(); - if (result.length() >= 2 && result.startsWith(QUOTE) && result.endsWith(QUOTE)) { - result = result.substring(1, result.length() - 1); - } - return result; - } - - public boolean isSysConstant() { - return this.sysConsts != null; - } - - public SystemConsts getSysConstant() { - return this.sysConsts; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/FunctionThrowingException.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/FunctionThrowingException.java deleted file mode 100644 index afb9b7df1..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/FunctionThrowingException.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -@FunctionalInterface -public interface FunctionThrowingException { - - void accept(T t) throws Exception; -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ListConst.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ListConst.java deleted file mode 100644 index 3efda6b3e..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ListConst.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; -import java.util.LinkedList; -import java.util.List; - -/** List of constants */ -public class ListConst extends AbstractNode { - - private List values; - - public ListConst(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - this.values = readList(); - } - - private List readList() throws Exception { - List list = new LinkedList<>(); - boolean valid = false; - char c; - - StringBuilder sb = new StringBuilder(); - while (is.available() > 0) { - c = (char) is.read(); - if (c == ')') { - valid = true; - break; - } else if (c == ',') { - list.add(sb.toString().trim()); - sb = new StringBuilder(); - } else { - sb.append(c); - } - } - list.add(sb.toString().trim()); - if (!valid) { - throw new ParserException("Expected ')' but never encountered in the stream"); - } - return list; - } - - public List getList() { - return values; - } - - @Override - public String toString() { - return values.toString(); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Name.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Name.java deleted file mode 100644 index a26945602..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Name.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; - -/** Represents the name of the field to be searched against. */ -public class Name extends AbstractNode { - - private String value; - - public Name(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.value = readToken(); - } - - @Override - public String toString() { - return value; - } - - public String getName() { - return value; - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ParserException.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ParserException.java deleted file mode 100644 index 0b946c475..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ParserException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -@SuppressWarnings("serial") -public class ParserException extends Exception { - - public ParserException(String message) { - super(message); - } - - public ParserException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Range.java b/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Range.java deleted file mode 100644 index aa4c66f7a..000000000 --- a/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Range.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.InputStream; - -public class Range extends AbstractNode { - - private String low; - - private String high; - - public Range(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.low = readNumber(is); - - skipWhitespace(); - byte[] peeked = read(3); - assertExpected(peeked, "AND"); - skipWhitespace(); - - String num = readNumber(is); - if ("".equals(num)) { - throw new ParserException("Missing the upper range value..."); - } - this.high = num; - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while (is.available() > 0) { - is.mark(1); - char c = (char) is.read(); - if (!isNumeric(c)) { - is.reset(); - break; - } else { - sb.append(c); - } - } - return sb.toString().trim(); - } - - /** - * @return the low - */ - public String getLow() { - return low; - } - - /** - * @return the high - */ - public String getHigh() { - return high; - } - - @Override - public String toString() { - return low + " AND " + high; - } -} diff --git a/es6-persistence/src/main/resources/mappings_docType_task.json b/es6-persistence/src/main/resources/mappings_docType_task.json deleted file mode 100644 index 2a90be39c..000000000 --- a/es6-persistence/src/main/resources/mappings_docType_task.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "task": { - "properties": { - "correlationId": { - "type": "keyword", - "index": true - }, - "endTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "executionTime": { - "type": "long" - }, - "input": { - "type": "text", - "index": true - }, - "output": { - "type": "text", - "index": true - }, - "queueWaitTime": { - "type": "long" - }, - "reasonForIncompletion": { - "type": "keyword", - "index": true - }, - "scheduledTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "startTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "status": { - "type": "keyword", - "index": true - }, - "taskDefName": { - "type": "keyword", - "index": true - }, - "taskId": { - "type": "keyword", - "index": true - }, - "taskType": { - "type": "keyword", - "index": true - }, - "updateTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "workflowId": { - "type": "keyword", - "index": true - }, - "workflowType": { - "type": "keyword", - "index": true - } - } - } -} \ No newline at end of file diff --git a/es6-persistence/src/main/resources/mappings_docType_workflow.json b/es6-persistence/src/main/resources/mappings_docType_workflow.json deleted file mode 100644 index abec7535c..000000000 --- a/es6-persistence/src/main/resources/mappings_docType_workflow.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "workflow": { - "properties": { - "correlationId": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "endTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "executionTime": { - "type": "long", - "doc_values": true - }, - "failedReferenceTaskNames": { - "type": "text", - "index": false - }, - "input": { - "type": "text", - "index": true - }, - "output": { - "type": "text", - "index": true - }, - "reasonForIncompletion": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "startTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "status": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "updateTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "version": { - "type": "long", - "doc_values": true - }, - "workflowId": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "workflowType": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "rawJSON": { - "type": "text", - "index": false - }, - "event": { - "type": "keyword", - "index": true - } - } - } -} \ No newline at end of file diff --git a/es6-persistence/src/main/resources/template_event.json b/es6-persistence/src/main/resources/template_event.json deleted file mode 100644 index 3b605a377..000000000 --- a/es6-persistence/src/main/resources/template_event.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "order": 0, - "template": "*event*", - "settings": { - "index": { - "refresh_interval": "1s" - } - }, - "mappings": { - "event": { - "properties": { - "action": { - "type": "keyword", - "index": true - }, - "created": { - "type": "long" - }, - "event": { - "type": "keyword", - "index": true - }, - "id": { - "type": "keyword", - "index": true - }, - "messageId": { - "type": "keyword", - "index": true - }, - "name": { - "type": "keyword", - "index": true - }, - "output": { - "properties": { - "workflowId": { - "type": "keyword", - "index": true - } - } - }, - "status": { - "type": "keyword", - "index": true - } - } - } - }, - "aliases": {} -} \ No newline at end of file diff --git a/es6-persistence/src/main/resources/template_message.json b/es6-persistence/src/main/resources/template_message.json deleted file mode 100644 index 3f7857732..000000000 --- a/es6-persistence/src/main/resources/template_message.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "order": 0, - "template": "*message*", - "settings": { - "index": { - "refresh_interval": "1s" - } - }, - "mappings": { - "message": { - "properties": { - "created": { - "type": "long" - }, - "messageId": { - "type": "keyword", - "index": true - }, - "payload": { - "type": "keyword", - "index": true - }, - "queue": { - "type": "keyword", - "index": true - } - } - } - }, - "aliases": {} -} \ No newline at end of file diff --git a/es6-persistence/src/main/resources/template_task_log.json b/es6-persistence/src/main/resources/template_task_log.json deleted file mode 100644 index ebe63ccf3..000000000 --- a/es6-persistence/src/main/resources/template_task_log.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "order": 0, - "template": "*task*log*", - "settings": { - "index": { - "refresh_interval": "1s" - } - }, - "mappings": { - "task_log": { - "properties": { - "createdTime": { - "type": "long" - }, - "log": { - "type": "keyword", - "index": true - }, - "taskId": { - "type": "keyword", - "index": true - } - } - } - }, - "aliases": {} -} \ No newline at end of file diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java deleted file mode 100644 index 529f22e12..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.net.InetAddress; -import java.util.concurrent.ExecutionException; - -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.transport.client.PreBuiltTransportClient; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.springframework.retry.support.RetryTemplate; - -abstract class ElasticSearchDaoBaseTest extends ElasticSearchTest { - - protected TransportClient elasticSearchClient; - protected ElasticSearchDAOV6 indexDAO; - - @Before - public void setup() throws Exception { - int mappedPort = container.getMappedPort(9300); - properties.setUrl("tcp://localhost:" + mappedPort); - - Settings settings = - Settings.builder().put("client.transport.ignore_cluster_name", true).build(); - - elasticSearchClient = - new PreBuiltTransportClient(settings) - .addTransportAddress( - new TransportAddress( - InetAddress.getByName("localhost"), mappedPort)); - - indexDAO = - new ElasticSearchDAOV6( - elasticSearchClient, new RetryTemplate(), properties, objectMapper); - indexDAO.setup(); - } - - @AfterClass - public static void closeClient() { - container.stop(); - } - - @After - public void tearDown() { - deleteAllIndices(); - - if (elasticSearchClient != null) { - elasticSearchClient.close(); - } - } - - private void deleteAllIndices() { - ImmutableOpenMap indices = - elasticSearchClient - .admin() - .cluster() - .prepareState() - .get() - .getState() - .getMetaData() - .getIndices(); - indices.forEach( - cursor -> { - try { - elasticSearchClient - .admin() - .indices() - .delete(new DeleteIndexRequest(cursor.value.getIndex().getName())) - .get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java deleted file mode 100644 index 59584e1dc..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; - -import org.apache.http.HttpHost; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; -import org.junit.After; -import org.junit.Before; -import org.springframework.retry.support.RetryTemplate; - -abstract class ElasticSearchRestDaoBaseTest extends ElasticSearchTest { - - protected RestClient restClient; - protected ElasticSearchRestDAOV6 indexDAO; - - @Before - public void setup() throws Exception { - String httpHostAddress = container.getHttpHostAddress(); - String host = httpHostAddress.split(":")[0]; - int port = Integer.parseInt(httpHostAddress.split(":")[1]); - - properties.setUrl("http://" + httpHostAddress); - - RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost(host, port, "http")); - restClient = restClientBuilder.build(); - - indexDAO = - new ElasticSearchRestDAOV6( - restClientBuilder, new RetryTemplate(), properties, objectMapper); - indexDAO.setup(); - } - - @After - public void tearDown() throws Exception { - deleteAllIndices(); - - if (restClient != null) { - restClient.close(); - } - } - - private void deleteAllIndices() throws IOException { - Response beforeResponse = restClient.performRequest("GET", "/_cat/indices"); - - Reader streamReader = new InputStreamReader(beforeResponse.getEntity().getContent()); - BufferedReader bufferedReader = new BufferedReader(streamReader); - - String line; - while ((line = bufferedReader.readLine()) != null) { - String[] fields = line.split("\\s"); - String endpoint = String.format("/%s", fields[2]); - - restClient.performRequest("DELETE", endpoint); - } - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchTest.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchTest.java deleted file mode 100644 index 5bb2fe78e..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringRunner; -import org.testcontainers.elasticsearch.ElasticsearchContainer; -import org.testcontainers.utility.DockerImageName; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.es6.config.ElasticSearchProperties; - -import com.fasterxml.jackson.databind.ObjectMapper; - -@ContextConfiguration( - classes = {TestObjectMapperConfiguration.class, ElasticSearchTest.TestConfiguration.class}) -@RunWith(SpringRunner.class) -@TestPropertySource( - properties = {"conductor.indexing.enabled=true", "conductor.elasticsearch.version=6"}) -abstract class ElasticSearchTest { - - @Configuration - static class TestConfiguration { - - @Bean - public ElasticSearchProperties elasticSearchProperties() { - return new ElasticSearchProperties(); - } - } - - protected static final ElasticsearchContainer container = - new ElasticsearchContainer( - DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch-oss") - .withTag("6.8.12")); // this should match the client version - - @Autowired protected ObjectMapper objectMapper; - - @Autowired protected ElasticSearchProperties properties; - - @BeforeClass - public static void startServer() { - container.start(); - } - - @AfterClass - public static void stopServer() { - container.stop(); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6.java deleted file mode 100644 index fc6517be7..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6.java +++ /dev/null @@ -1,414 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.concurrent.ExecutionException; -import java.util.function.Supplier; - -import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.es6.utils.TestUtils; - -import com.google.common.collect.ImmutableMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class TestElasticSearchDAOV6 extends ElasticSearchDaoBaseTest { - - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private static final String INDEX_PREFIX = "conductor"; - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String MSG_DOC_TYPE = "message"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String LOG_INDEX_PREFIX = "task_log"; - - @Test - public void assertInitialSetup() { - SIMPLE_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT")); - - String workflowIndex = INDEX_PREFIX + "_" + WORKFLOW_DOC_TYPE; - String taskIndex = INDEX_PREFIX + "_" + TASK_DOC_TYPE; - - String taskLogIndex = - INDEX_PREFIX + "_" + LOG_INDEX_PREFIX + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - String messageIndex = - INDEX_PREFIX + "_" + MSG_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - String eventIndex = - INDEX_PREFIX + "_" + EVENT_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - assertTrue("Index 'conductor_workflow' should exist", indexExists("conductor_workflow")); - assertTrue("Index 'conductor_task' should exist", indexExists("conductor_task")); - - assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); - assertTrue("Index '" + messageIndex + "' should exist", indexExists(messageIndex)); - assertTrue("Index '" + eventIndex + "' should exist", indexExists(eventIndex)); - - assertTrue( - "Mapping 'workflow' for index 'conductor' should exist", - doesMappingExist(workflowIndex, WORKFLOW_DOC_TYPE)); - assertTrue( - "Mapping 'task' for index 'conductor' should exist", - doesMappingExist(taskIndex, TASK_DOC_TYPE)); - } - - private boolean indexExists(final String index) { - IndicesExistsRequest request = new IndicesExistsRequest(index); - try { - return elasticSearchClient.admin().indices().exists(request).get().isExists(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - } - - private boolean doesMappingExist(final String index, final String mappingName) { - GetMappingsRequest request = new GetMappingsRequest().indices(index); - try { - GetMappingsResponse response = - elasticSearchClient.admin().indices().getMappings(request).get(); - - return response.getMappings().get(index).containsKey(mappingName); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - } - - @Test - public void shouldIndexWorkflow() { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - - assertWorkflowSummary(workflow.getWorkflowId(), workflow); - } - - @Test - public void shouldIndexWorkflowAsync() throws Exception { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.asyncIndexWorkflow(workflow).get(); - - assertWorkflowSummary(workflow.getWorkflowId(), workflow); - } - - @Test - public void shouldRemoveWorkflow() { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - - // wait for workflow to be indexed - List workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 1); - assertEquals(1, workflows.size()); - - indexDAO.removeWorkflow(workflow.getWorkflowId()); - - workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 0); - - assertTrue("Workflow was not removed.", workflows.isEmpty()); - } - - @Test - public void shouldAsyncRemoveWorkflow() throws Exception { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - - // wait for workflow to be indexed - List workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 1); - assertEquals(1, workflows.size()); - - indexDAO.asyncRemoveWorkflow(workflow.getWorkflowId()).get(); - - workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 0); - - assertTrue("Workflow was not removed.", workflows.isEmpty()); - } - - @Test - public void shouldUpdateWorkflow() { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - - indexDAO.updateWorkflow( - workflow.getWorkflowId(), - new String[] {"status"}, - new Object[] {WorkflowStatus.COMPLETED}); - - workflow.setStatus(WorkflowStatus.COMPLETED); - assertWorkflowSummary(workflow.getWorkflowId(), workflow); - } - - @Test - public void shouldAsyncUpdateWorkflow() throws Exception { - WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - - indexDAO.asyncUpdateWorkflow( - workflow.getWorkflowId(), - new String[] {"status"}, - new Object[] {WorkflowStatus.FAILED}) - .get(); - - workflow.setStatus(WorkflowStatus.FAILED); - assertWorkflowSummary(workflow.getWorkflowId(), workflow); - } - - @Test - public void shouldIndexTask() { - TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); - indexDAO.indexTask(taskSummary); - - List tasks = tryFindResults(() -> searchTasks(taskSummary)); - - assertEquals(taskSummary.getTaskId(), tasks.get(0)); - } - - @Test - public void shouldIndexTaskAsync() throws Exception { - TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); - - indexDAO.asyncIndexTask(taskSummary).get(); - - List tasks = tryFindResults(() -> searchTasks(taskSummary)); - - assertEquals(taskSummary.getTaskId(), tasks.get(0)); - } - - @Test - public void shouldAddTaskExecutionLogs() { - List logs = new ArrayList<>(); - String taskId = uuid(); - logs.add(createLog(taskId, "log1")); - logs.add(createLog(taskId, "log2")); - logs.add(createLog(taskId, "log3")); - - indexDAO.addTaskExecutionLogs(logs); - - List indexedLogs = - tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); - - assertEquals(3, indexedLogs.size()); - - assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); - } - - @Test - public void shouldAddTaskExecutionLogsAsync() throws Exception { - List logs = new ArrayList<>(); - String taskId = uuid(); - logs.add(createLog(taskId, "log1")); - logs.add(createLog(taskId, "log2")); - logs.add(createLog(taskId, "log3")); - - indexDAO.asyncAddTaskExecutionLogs(logs).get(); - - List indexedLogs = - tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); - - assertEquals(3, indexedLogs.size()); - - assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); - } - - @Test - public void shouldAddMessage() { - String queue = "queue"; - Message message1 = new Message(uuid(), "payload1", null); - Message message2 = new Message(uuid(), "payload2", null); - - indexDAO.addMessage(queue, message1); - indexDAO.addMessage(queue, message2); - - List indexedMessages = tryFindResults(() -> indexDAO.getMessages(queue), 2); - - assertEquals(2, indexedMessages.size()); - - assertTrue( - "Not all messages was indexed", - indexedMessages.containsAll(Arrays.asList(message1, message2))); - } - - @Test - public void shouldAddEventExecution() { - String event = "event"; - EventExecution execution1 = createEventExecution(event); - EventExecution execution2 = createEventExecution(event); - - indexDAO.addEventExecution(execution1); - indexDAO.addEventExecution(execution2); - - List indexedExecutions = - tryFindResults(() -> indexDAO.getEventExecutions(event), 2); - - assertEquals(2, indexedExecutions.size()); - - assertTrue( - "Not all event executions was indexed", - indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); - } - - @Test - public void shouldAsyncAddEventExecution() throws Exception { - String event = "event2"; - EventExecution execution1 = createEventExecution(event); - EventExecution execution2 = createEventExecution(event); - - indexDAO.asyncAddEventExecution(execution1).get(); - indexDAO.asyncAddEventExecution(execution2).get(); - - List indexedExecutions = - tryFindResults(() -> indexDAO.getEventExecutions(event), 2); - - assertEquals(2, indexedExecutions.size()); - - assertTrue( - "Not all event executions was indexed", - indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); - } - - @Test - public void shouldAddIndexPrefixToIndexTemplate() throws Exception { - String json = TestUtils.loadJsonResource("expected_template_task_log"); - - String content = indexDAO.loadTypeMappingSource("/template_task_log.json"); - - assertEquals(json, content); - } - - @Test - public void shouldCountWorkflows() { - int counts = 1100; - for (int i = 0; i < counts; i++) { - WorkflowSummary workflow = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflow); - } - - // wait for workflow to be indexed - long result = tryGetCount(() -> getWorkflowCount("template_workflow", "RUNNING"), counts); - assertEquals(counts, result); - } - - private long tryGetCount(Supplier countFunction, int resultsCount) { - long result = 0; - for (int i = 0; i < 20; i++) { - result = countFunction.get(); - if (result == resultsCount) { - return result; - } - try { - Thread.sleep(100); - } catch (InterruptedException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - return result; - } - - // Get total workflow counts given the name and status - private long getWorkflowCount(String workflowName, String status) { - return indexDAO.getWorkflowCount( - "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", "*"); - } - - private void assertWorkflowSummary(String workflowId, WorkflowSummary summary) { - assertEquals(summary.getWorkflowType(), indexDAO.get(workflowId, "workflowType")); - assertEquals(String.valueOf(summary.getVersion()), indexDAO.get(workflowId, "version")); - assertEquals(summary.getWorkflowId(), indexDAO.get(workflowId, "workflowId")); - assertEquals(summary.getCorrelationId(), indexDAO.get(workflowId, "correlationId")); - assertEquals(summary.getStartTime(), indexDAO.get(workflowId, "startTime")); - assertEquals(summary.getUpdateTime(), indexDAO.get(workflowId, "updateTime")); - assertEquals(summary.getEndTime(), indexDAO.get(workflowId, "endTime")); - assertEquals(summary.getStatus().name(), indexDAO.get(workflowId, "status")); - assertEquals(summary.getInput(), indexDAO.get(workflowId, "input")); - assertEquals(summary.getOutput(), indexDAO.get(workflowId, "output")); - assertEquals( - summary.getReasonForIncompletion(), - indexDAO.get(workflowId, "reasonForIncompletion")); - assertEquals( - String.valueOf(summary.getExecutionTime()), - indexDAO.get(workflowId, "executionTime")); - assertEquals(summary.getEvent(), indexDAO.get(workflowId, "event")); - assertEquals( - summary.getFailedReferenceTaskNames(), - indexDAO.get(workflowId, "failedReferenceTaskNames")); - } - - private List tryFindResults(Supplier> searchFunction) { - return tryFindResults(searchFunction, 1); - } - - private List tryFindResults(Supplier> searchFunction, int resultsCount) { - List result = Collections.emptyList(); - for (int i = 0; i < 20; i++) { - result = searchFunction.get(); - if (result.size() == resultsCount) { - return result; - } - try { - Thread.sleep(100); - } catch (InterruptedException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - return result; - } - - private List searchWorkflows(String workflowId) { - return indexDAO.searchWorkflows( - "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) - .getResults(); - } - - private List searchTasks(TaskSummary taskSummary) { - return indexDAO.searchTasks( - "", - "workflowId:\"" + taskSummary.getWorkflowId() + "\"", - 0, - 100, - Collections.emptyList()) - .getResults(); - } - - private TaskExecLog createLog(String taskId, String log) { - TaskExecLog taskExecLog = new TaskExecLog(log); - taskExecLog.setTaskId(taskId); - return taskExecLog; - } - - private EventExecution createEventExecution(String event) { - EventExecution execution = new EventExecution(uuid(), uuid()); - execution.setName("name"); - execution.setEvent(event); - execution.setCreated(System.currentTimeMillis()); - execution.setStatus(EventExecution.Status.COMPLETED); - execution.setAction(EventHandler.Action.Type.start_workflow); - execution.setOutput(ImmutableMap.of("a", 1, "b", 2, "c", 3)); - return execution; - } - - private String uuid() { - return UUID.randomUUID().toString(); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java deleted file mode 100644 index 8a1f14ef9..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.util.HashMap; -import java.util.concurrent.TimeUnit; - -import org.junit.Test; -import org.springframework.test.context.TestPropertySource; - -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; - -import com.fasterxml.jackson.core.JsonProcessingException; - -import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@TestPropertySource(properties = "conductor.elasticsearch.indexBatchSize=2") -public class TestElasticSearchDAOV6Batch extends ElasticSearchDaoBaseTest { - - @Test - public void indexTaskWithBatchSizeTwo() { - String correlationId = "some-correlation-id"; - TaskSummary taskSummary = new TaskSummary(); - taskSummary.setTaskId("some-task-id"); - taskSummary.setWorkflowId("some-workflow-instance-id"); - taskSummary.setTaskType("some-task-type"); - taskSummary.setStatus(Status.FAILED); - try { - taskSummary.setInput( - objectMapper.writeValueAsString( - new HashMap() { - { - put("input_key", "input_value"); - } - })); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - taskSummary.setCorrelationId(correlationId); - taskSummary.setTaskDefName("some-task-def-name"); - taskSummary.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(taskSummary); - indexDAO.indexTask(taskSummary); - - await().atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> { - SearchResult result = - indexDAO.searchTasks( - "correlationId='" + correlationId + "'", - "*", - 0, - 10000, - null); - - assertTrue( - "should return 1 or more search results", - result.getResults().size() > 0); - assertEquals( - "taskId should match the indexed task", - "some-task-id", - result.getResults().get(0)); - }); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6.java deleted file mode 100644 index 50bcd5a6f..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6.java +++ /dev/null @@ -1,424 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.io.IOException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.TimeZone; -import java.util.UUID; -import java.util.function.Supplier; - -import org.junit.Test; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.es6.utils.TestUtils; - -import com.google.common.collect.ImmutableMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class TestElasticSearchRestDAOV6 extends ElasticSearchRestDaoBaseTest { - - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private static final String INDEX_PREFIX = "conductor"; - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String MSG_DOC_TYPE = "message"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String LOG_INDEX_PREFIX = "task_log"; - - private boolean indexExists(final String index) throws IOException { - return indexDAO.doesResourceExist("/" + index); - } - - private boolean doesMappingExist(final String index, final String mappingName) - throws IOException { - return indexDAO.doesResourceExist("/" + index + "/_mapping/" + mappingName); - } - - @Test - public void assertInitialSetup() throws IOException { - SIMPLE_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT")); - - String workflowIndex = INDEX_PREFIX + "_" + WORKFLOW_DOC_TYPE; - String taskIndex = INDEX_PREFIX + "_" + TASK_DOC_TYPE; - - String taskLogIndex = - INDEX_PREFIX + "_" + LOG_INDEX_PREFIX + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - String messageIndex = - INDEX_PREFIX + "_" + MSG_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - String eventIndex = - INDEX_PREFIX + "_" + EVENT_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - assertTrue("Index 'conductor_workflow' should exist", indexExists("conductor_workflow")); - assertTrue("Index 'conductor_task' should exist", indexExists("conductor_task")); - - assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); - assertTrue("Index '" + messageIndex + "' should exist", indexExists(messageIndex)); - assertTrue("Index '" + eventIndex + "' should exist", indexExists(eventIndex)); - - assertTrue( - "Mapping 'workflow' for index 'conductor' should exist", - doesMappingExist(workflowIndex, WORKFLOW_DOC_TYPE)); - assertTrue( - "Mapping 'task' for index 'conductor' should exist", - doesMappingExist(taskIndex, TASK_DOC_TYPE)); - } - - @Test - public void shouldIndexWorkflow() { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - - assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); - } - - @Test - public void shouldIndexWorkflowAsync() throws Exception { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.asyncIndexWorkflow(workflowSummary).get(); - - assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); - } - - @Test - public void shouldRemoveWorkflow() { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - - // wait for workflow to be indexed - List workflows = - tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); - assertEquals(1, workflows.size()); - - indexDAO.removeWorkflow(workflowSummary.getWorkflowId()); - - workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 0); - - assertTrue("Workflow was not removed.", workflows.isEmpty()); - } - - @Test - public void shouldAsyncRemoveWorkflow() throws Exception { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - - // wait for workflow to be indexed - List workflows = - tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); - assertEquals(1, workflows.size()); - - indexDAO.asyncRemoveWorkflow(workflowSummary.getWorkflowId()).get(); - - workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 0); - - assertTrue("Workflow was not removed.", workflows.isEmpty()); - } - - @Test - public void shouldUpdateWorkflow() { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - - indexDAO.updateWorkflow( - workflowSummary.getWorkflowId(), - new String[] {"status"}, - new Object[] {WorkflowStatus.COMPLETED}); - - workflowSummary.setStatus(WorkflowStatus.COMPLETED); - assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); - } - - @Test - public void shouldAsyncUpdateWorkflow() throws Exception { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - - indexDAO.asyncUpdateWorkflow( - workflowSummary.getWorkflowId(), - new String[] {"status"}, - new Object[] {WorkflowStatus.FAILED}) - .get(); - - workflowSummary.setStatus(WorkflowStatus.FAILED); - assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); - } - - @Test - public void shouldIndexTask() { - TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); - indexDAO.indexTask(taskSummary); - - List tasks = tryFindResults(() -> searchTasks(taskSummary)); - - assertEquals(taskSummary.getTaskId(), tasks.get(0)); - } - - @Test - public void shouldIndexTaskAsync() throws Exception { - TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); - indexDAO.asyncIndexTask(taskSummary).get(); - - List tasks = tryFindResults(() -> searchTasks(taskSummary)); - - assertEquals(taskSummary.getTaskId(), tasks.get(0)); - } - - @Test - public void shouldAddTaskExecutionLogs() { - List logs = new ArrayList<>(); - String taskId = uuid(); - logs.add(createLog(taskId, "log1")); - logs.add(createLog(taskId, "log2")); - logs.add(createLog(taskId, "log3")); - - indexDAO.addTaskExecutionLogs(logs); - - List indexedLogs = - tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); - - assertEquals(3, indexedLogs.size()); - - assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); - } - - @Test - public void shouldAddTaskExecutionLogsAsync() throws Exception { - List logs = new ArrayList<>(); - String taskId = uuid(); - logs.add(createLog(taskId, "log1")); - logs.add(createLog(taskId, "log2")); - logs.add(createLog(taskId, "log3")); - - indexDAO.asyncAddTaskExecutionLogs(logs).get(); - - List indexedLogs = - tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); - - assertEquals(3, indexedLogs.size()); - - assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); - } - - @Test - public void shouldAddMessage() { - String queue = "queue"; - Message message1 = new Message(uuid(), "payload1", null); - Message message2 = new Message(uuid(), "payload2", null); - - indexDAO.addMessage(queue, message1); - indexDAO.addMessage(queue, message2); - - List indexedMessages = tryFindResults(() -> indexDAO.getMessages(queue), 2); - - assertEquals(2, indexedMessages.size()); - - assertTrue( - "Not all messages was indexed", - indexedMessages.containsAll(Arrays.asList(message1, message2))); - } - - @Test - public void shouldAddEventExecution() { - String event = "event"; - EventExecution execution1 = createEventExecution(event); - EventExecution execution2 = createEventExecution(event); - - indexDAO.addEventExecution(execution1); - indexDAO.addEventExecution(execution2); - - List indexedExecutions = - tryFindResults(() -> indexDAO.getEventExecutions(event), 2); - - assertEquals(2, indexedExecutions.size()); - - assertTrue( - "Not all event executions was indexed", - indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); - } - - @Test - public void shouldAsyncAddEventExecution() throws Exception { - String event = "event2"; - EventExecution execution1 = createEventExecution(event); - EventExecution execution2 = createEventExecution(event); - - indexDAO.asyncAddEventExecution(execution1).get(); - indexDAO.asyncAddEventExecution(execution2).get(); - - List indexedExecutions = - tryFindResults(() -> indexDAO.getEventExecutions(event), 2); - - assertEquals(2, indexedExecutions.size()); - - assertTrue( - "Not all event executions was indexed", - indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); - } - - @Test - public void shouldAddIndexPrefixToIndexTemplate() throws Exception { - String json = TestUtils.loadJsonResource("expected_template_task_log"); - - String content = indexDAO.loadTypeMappingSource("/template_task_log.json"); - - assertEquals(json, content); - } - - @Test - public void shouldCountWorkflows() { - int counts = 1100; - for (int i = 0; i < counts; i++) { - WorkflowSummary workflowSummary = - TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); - indexDAO.indexWorkflow(workflowSummary); - } - - // wait for workflow to be indexed - long result = tryGetCount(() -> getWorkflowCount("template_workflow", "RUNNING"), counts); - assertEquals(counts, result); - } - - private long tryGetCount(Supplier countFunction, int resultsCount) { - long result = 0; - for (int i = 0; i < 20; i++) { - result = countFunction.get(); - if (result == resultsCount) { - return result; - } - try { - Thread.sleep(100); - } catch (InterruptedException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - return result; - } - - // Get total workflow counts given the name and status - private long getWorkflowCount(String workflowName, String status) { - return indexDAO.getWorkflowCount( - "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", "*"); - } - - private void assertWorkflowSummary(String workflowId, WorkflowSummary summary) { - assertEquals(summary.getWorkflowType(), indexDAO.get(workflowId, "workflowType")); - assertEquals(String.valueOf(summary.getVersion()), indexDAO.get(workflowId, "version")); - assertEquals(summary.getWorkflowId(), indexDAO.get(workflowId, "workflowId")); - assertEquals(summary.getCorrelationId(), indexDAO.get(workflowId, "correlationId")); - assertEquals(summary.getStartTime(), indexDAO.get(workflowId, "startTime")); - assertEquals(summary.getUpdateTime(), indexDAO.get(workflowId, "updateTime")); - assertEquals(summary.getEndTime(), indexDAO.get(workflowId, "endTime")); - assertEquals(summary.getStatus().name(), indexDAO.get(workflowId, "status")); - assertEquals(summary.getInput(), indexDAO.get(workflowId, "input")); - assertEquals(summary.getOutput(), indexDAO.get(workflowId, "output")); - assertEquals( - summary.getReasonForIncompletion(), - indexDAO.get(workflowId, "reasonForIncompletion")); - assertEquals( - String.valueOf(summary.getExecutionTime()), - indexDAO.get(workflowId, "executionTime")); - assertEquals(summary.getEvent(), indexDAO.get(workflowId, "event")); - assertEquals( - summary.getFailedReferenceTaskNames(), - indexDAO.get(workflowId, "failedReferenceTaskNames")); - } - - private List tryFindResults(Supplier> searchFunction) { - return tryFindResults(searchFunction, 1); - } - - private List tryFindResults(Supplier> searchFunction, int resultsCount) { - List result = Collections.emptyList(); - for (int i = 0; i < 20; i++) { - result = searchFunction.get(); - if (result.size() == resultsCount) { - return result; - } - try { - Thread.sleep(100); - } catch (InterruptedException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - return result; - } - - private List searchWorkflows(String workflowId) { - return indexDAO.searchWorkflows( - "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) - .getResults(); - } - - private List searchWorkflows(String workflowName, String status) { - List sortOptions = new ArrayList<>(); - sortOptions.add("startTime:DESC"); - return indexDAO.searchWorkflows( - "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", - "*", - 0, - 1000, - sortOptions) - .getResults(); - } - - private List searchTasks(TaskSummary taskSummary) { - return indexDAO.searchTasks( - "", - "workflowId:\"" + taskSummary.getWorkflowId() + "\"", - 0, - 100, - Collections.emptyList()) - .getResults(); - } - - private TaskExecLog createLog(String taskId, String log) { - TaskExecLog taskExecLog = new TaskExecLog(log); - taskExecLog.setTaskId(taskId); - return taskExecLog; - } - - private EventExecution createEventExecution(String event) { - EventExecution execution = new EventExecution(uuid(), uuid()); - execution.setName("name"); - execution.setEvent(event); - execution.setCreated(System.currentTimeMillis()); - execution.setStatus(EventExecution.Status.COMPLETED); - execution.setAction(EventHandler.Action.Type.start_workflow); - execution.setOutput(ImmutableMap.of("a", 1, "b", 2, "c", 3)); - return execution; - } - - private String uuid() { - return UUID.randomUUID().toString(); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6Batch.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6Batch.java deleted file mode 100644 index 98f503923..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6Batch.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.index; - -import java.util.HashMap; -import java.util.concurrent.TimeUnit; - -import org.junit.Test; -import org.springframework.test.context.TestPropertySource; - -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; - -import com.fasterxml.jackson.core.JsonProcessingException; - -import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@TestPropertySource(properties = "conductor.elasticsearch.indexBatchSize=2") -public class TestElasticSearchRestDAOV6Batch extends ElasticSearchRestDaoBaseTest { - - @Test - public void indexTaskWithBatchSizeTwo() { - String correlationId = "some-correlation-id"; - TaskSummary taskSummary = new TaskSummary(); - taskSummary.setTaskId("some-task-id"); - taskSummary.setWorkflowId("some-workflow-instance-id"); - taskSummary.setTaskType("some-task-type"); - taskSummary.setStatus(Status.FAILED); - try { - taskSummary.setInput( - objectMapper.writeValueAsString( - new HashMap() { - { - put("input_key", "input_value"); - } - })); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - taskSummary.setCorrelationId(correlationId); - taskSummary.setTaskDefName("some-task-def-name"); - taskSummary.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(taskSummary); - indexDAO.indexTask(taskSummary); - - await().atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> { - SearchResult result = - indexDAO.searchTasks( - "correlationId='" + correlationId + "'", - "*", - 0, - 10000, - null); - - assertTrue( - "should return 1 or more search results", - result.getResults().size() > 0); - assertEquals( - "taskId should match the indexed task", - "some-task-id", - result.getResults().get(0)); - }); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/TestExpression.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/TestExpression.java deleted file mode 100644 index 8ebe4c83f..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/TestExpression.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import org.junit.Test; - -import com.netflix.conductor.es6.dao.query.parser.internal.ConstValue; -import com.netflix.conductor.es6.dao.query.parser.internal.TestAbstractParser; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class TestExpression extends TestAbstractParser { - - @Test - public void test() throws Exception { - String test = - "type='IMAGE' AND subType ='sdp' AND (metadata.width > 50 OR metadata.height > 50)"; - InputStream inputStream = - new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expression = new Expression(inputStream); - - assertTrue(expression.isBinaryExpr()); - assertNull(expression.getGroupedExpression()); - assertNotNull(expression.getNameValue()); - - NameValue nameValue = expression.getNameValue(); - assertEquals("type", nameValue.getName().getName()); - assertEquals("=", nameValue.getOp().getOperator()); - assertEquals("\"IMAGE\"", nameValue.getValue().getValue()); - - Expression rightHandSide = expression.getRightHandSide(); - assertNotNull(rightHandSide); - assertTrue(rightHandSide.isBinaryExpr()); - - nameValue = rightHandSide.getNameValue(); - assertNotNull(nameValue); // subType = sdp - assertNull(rightHandSide.getGroupedExpression()); - assertEquals("subType", nameValue.getName().getName()); - assertEquals("=", nameValue.getOp().getOperator()); - assertEquals("\"sdp\"", nameValue.getValue().getValue()); - - assertEquals("AND", rightHandSide.getOperator().getOperator()); - rightHandSide = rightHandSide.getRightHandSide(); - assertNotNull(rightHandSide); - assertFalse(rightHandSide.isBinaryExpr()); - GroupedExpression groupedExpression = rightHandSide.getGroupedExpression(); - assertNotNull(groupedExpression); - expression = groupedExpression.getExpression(); - assertNotNull(expression); - - assertTrue(expression.isBinaryExpr()); - nameValue = expression.getNameValue(); - assertNotNull(nameValue); - assertEquals("metadata.width", nameValue.getName().getName()); - assertEquals(">", nameValue.getOp().getOperator()); - assertEquals("50", nameValue.getValue().getValue()); - - assertEquals("OR", expression.getOperator().getOperator()); - rightHandSide = expression.getRightHandSide(); - assertNotNull(rightHandSide); - assertFalse(rightHandSide.isBinaryExpr()); - nameValue = rightHandSide.getNameValue(); - assertNotNull(nameValue); - - assertEquals("metadata.height", nameValue.getName().getName()); - assertEquals(">", nameValue.getOp().getOperator()); - assertEquals("50", nameValue.getValue().getValue()); - } - - @Test - public void testWithSysConstants() throws Exception { - String test = "type='IMAGE' AND subType ='sdp' AND description IS null"; - InputStream inputStream = - new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expression = new Expression(inputStream); - - assertTrue(expression.isBinaryExpr()); - assertNull(expression.getGroupedExpression()); - assertNotNull(expression.getNameValue()); - - NameValue nameValue = expression.getNameValue(); - assertEquals("type", nameValue.getName().getName()); - assertEquals("=", nameValue.getOp().getOperator()); - assertEquals("\"IMAGE\"", nameValue.getValue().getValue()); - - Expression rightHandSide = expression.getRightHandSide(); - assertNotNull(rightHandSide); - assertTrue(rightHandSide.isBinaryExpr()); - - nameValue = rightHandSide.getNameValue(); - assertNotNull(nameValue); // subType = sdp - assertNull(rightHandSide.getGroupedExpression()); - assertEquals("subType", nameValue.getName().getName()); - assertEquals("=", nameValue.getOp().getOperator()); - assertEquals("\"sdp\"", nameValue.getValue().getValue()); - - assertEquals("AND", rightHandSide.getOperator().getOperator()); - rightHandSide = rightHandSide.getRightHandSide(); - assertNotNull(rightHandSide); - assertFalse(rightHandSide.isBinaryExpr()); - - GroupedExpression groupedExpression = rightHandSide.getGroupedExpression(); - assertNull(groupedExpression); - nameValue = rightHandSide.getNameValue(); - assertNotNull(nameValue); - assertEquals("description", nameValue.getName().getName()); - assertEquals("IS", nameValue.getOp().getOperator()); - - ConstValue constValue = nameValue.getValue(); - assertNotNull(constValue); - assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NULL); - - test = "description IS not null"; - inputStream = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - expression = new Expression(inputStream); - - nameValue = expression.getNameValue(); - assertNotNull(nameValue); - assertEquals("description", nameValue.getName().getName()); - assertEquals("IS", nameValue.getOp().getOperator()); - - constValue = nameValue.getValue(); - assertNotNull(constValue); - assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestAbstractParser.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestAbstractParser.java deleted file mode 100644 index 36a2adc48..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestAbstractParser.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -public abstract class TestAbstractParser { - - protected InputStream getInputStream(String expression) { - return new BufferedInputStream(new ByteArrayInputStream(expression.getBytes())); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestBooleanOp.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestBooleanOp.java deleted file mode 100644 index 216c289a2..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestBooleanOp.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class TestBooleanOp extends TestAbstractParser { - - @Test - public void test() throws Exception { - String[] tests = new String[] {"AND", "OR"}; - for (String test : tests) { - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected = ParserException.class) - public void testInvalid() throws Exception { - String test = "<"; - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestComparisonOp.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestComparisonOp.java deleted file mode 100644 index 3878947df..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestComparisonOp.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class TestComparisonOp extends TestAbstractParser { - - @Test - public void test() throws Exception { - String[] tests = new String[] {"<", ">", "=", "!=", "IN", "BETWEEN", "STARTS_WITH"}; - for (String test : tests) { - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected = ParserException.class) - public void testInvalidOp() throws Exception { - String test = "AND"; - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestConstValue.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestConstValue.java deleted file mode 100644 index 2ae311d54..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestConstValue.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import java.util.List; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class TestConstValue extends TestAbstractParser { - - @Test - public void testStringConst() throws Exception { - String test = "'string value'"; - String expected = - test.replaceAll( - "'", "\""); // Quotes are removed but then the result is double quoted. - ConstValue constValue = new ConstValue(getInputStream(test)); - assertNotNull(constValue.getValue()); - assertEquals(expected, constValue.getValue()); - assertTrue(constValue.getValue() instanceof String); - - test = "\"string value\""; - constValue = new ConstValue(getInputStream(test)); - assertNotNull(constValue.getValue()); - assertEquals(expected, constValue.getValue()); - assertTrue(constValue.getValue() instanceof String); - } - - @Test - public void testSystemConst() throws Exception { - String test = "null"; - ConstValue constValue = new ConstValue(getInputStream(test)); - assertNotNull(constValue.getValue()); - assertTrue(constValue.getValue() instanceof String); - assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NULL); - - test = "not null"; - constValue = new ConstValue(getInputStream(test)); - assertNotNull(constValue.getValue()); - assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - } - - @Test(expected = ParserException.class) - public void testInvalid() throws Exception { - String test = "'string value"; - new ConstValue(getInputStream(test)); - } - - @Test - public void testNumConst() throws Exception { - String test = "12345.89"; - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertTrue( - cv.getValue() - instanceof - String); // Numeric values are stored as string as we are just passing thru - // them to ES - assertEquals(test, cv.getValue()); - } - - @Test - public void testRange() throws Exception { - String test = "50 AND 100"; - Range range = new Range(getInputStream(test)); - assertEquals("50", range.getLow()); - assertEquals("100", range.getHigh()); - } - - @Test(expected = ParserException.class) - public void testBadRange() throws Exception { - String test = "50 AND"; - new Range(getInputStream(test)); - } - - @Test - public void testArray() throws Exception { - String test = "(1, 3, 'name', 'value2')"; - ListConst listConst = new ListConst(getInputStream(test)); - List list = listConst.getList(); - assertEquals(4, list.size()); - assertTrue(list.contains("1")); - assertEquals("'value2'", list.get(3)); // Values are preserved as it is... - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestName.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestName.java deleted file mode 100644 index 3de5abdc0..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestName.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.dao.query.parser.internal; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class TestName extends TestAbstractParser { - - @Test - public void test() throws Exception { - String test = "metadata.en_US.lang "; - Name name = new Name(getInputStream(test)); - String nameVal = name.getName(); - assertNotNull(nameVal); - assertEquals(test.trim(), nameVal); - } -} diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/utils/TestUtils.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/utils/TestUtils.java deleted file mode 100644 index 1102cd494..000000000 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/utils/TestUtils.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.es6.utils; - -import java.nio.charset.StandardCharsets; - -import org.apache.commons.io.FileUtils; -import org.springframework.util.ResourceUtils; - -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.utils.IDGenerator; - -import com.fasterxml.jackson.databind.ObjectMapper; - -public class TestUtils { - - private static final String WORKFLOW_INSTANCE_ID_PLACEHOLDER = "WORKFLOW_INSTANCE_ID"; - - public static WorkflowSummary loadWorkflowSnapshot( - ObjectMapper objectMapper, String resourceFileName) { - try { - String content = loadJsonResource(resourceFileName); - String workflowId = new IDGenerator().generate(); - content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); - - return objectMapper.readValue(content, WorkflowSummary.class); - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - public static TaskSummary loadTaskSnapshot(ObjectMapper objectMapper, String resourceFileName) { - try { - String content = loadJsonResource(resourceFileName); - String workflowId = new IDGenerator().generate(); - content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); - - return objectMapper.readValue(content, TaskSummary.class); - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - public static String loadJsonResource(String resourceFileName) { - try { - return FileUtils.readFileToString( - ResourceUtils.getFile("classpath:" + resourceFileName + ".json"), - StandardCharsets.UTF_8); - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } -} diff --git a/es6-persistence/src/test/resources/expected_template_task_log.json b/es6-persistence/src/test/resources/expected_template_task_log.json deleted file mode 100644 index 1d77ff070..000000000 --- a/es6-persistence/src/test/resources/expected_template_task_log.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "order": 0, - "template": "*conductor_task*log*", - "settings": { - "index": { - "refresh_interval": "1s" - } - }, - "mappings": { - "task_log": { - "properties": { - "createdTime": { - "type": "long" - }, - "log": { - "type": "keyword", - "index": true - }, - "taskId": { - "type": "keyword", - "index": true - } - } - } - }, - "aliases": {} -} \ No newline at end of file diff --git a/es6-persistence/src/test/resources/task_summary.json b/es6-persistence/src/test/resources/task_summary.json deleted file mode 100644 index a409a22f1..000000000 --- a/es6-persistence/src/test/resources/task_summary.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "taskId": "9dea4567-0240-4eab-bde8-99f4535ea3fc", - "taskDefName": "templated_task", - "taskType": "templated_task", - "workflowId": "WORKFLOW_INSTANCE_ID", - "workflowType": "template_workflow", - "correlationId": "testTaskDefTemplate", - "scheduledTime": "2021-08-22T05:18:25.121Z", - "startTime": "0", - "endTime": "0", - "updateTime": "2021-08-23T00:18:25.121Z", - "status": "SCHEDULED", - "workflowPriority": 1, - "queueWaitTime": 0, - "executionTime": 0, - "input": "{http_request={method=GET, vipStack=test_stack, body={requestDetails={key1=value1, key2=42}, outputPath=s3://bucket/outputPath, inputPaths=[file://path1, file://path2]}, uri=/get/something}}" -} \ No newline at end of file diff --git a/es6-persistence/src/test/resources/workflow_summary.json b/es6-persistence/src/test/resources/workflow_summary.json deleted file mode 100644 index 443d8464e..000000000 --- a/es6-persistence/src/test/resources/workflow_summary.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "workflowType": "template_workflow", - "version": 1, - "workflowId": "WORKFLOW_INSTANCE_ID", - "priority": 1, - "correlationId": "testTaskDefTemplate", - "startTime": 1534983505050, - "updateTime": 1534983505131, - "endTime": 0, - "status": "RUNNING", - "input": "{path1=file://path1, path2=file://path2, requestDetails={key1=value1, key2=42}, outputPath=s3://bucket/outputPath}" -} diff --git a/esb.html b/esb.html new file mode 100644 index 000000000..79f263680 --- /dev/null +++ b/esb.html @@ -0,0 +1,88 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + +

    + + + +
    +
    +
    + View More on GitHub +

    Enterprise Service Bus Usage (EU)

    +
    +

    An Enterprise Service Bus (ESB) is a way of message passing between modules of a distributed application in which one module acts as a service bus for all of the other modules to pass messages on. There are pros and cons to this approach. However, in microservices, it can become an issue of creating a single point of failure, and increasing coupling, so it should be avoided.

    +
    +
    + +
    +
    + +
    +
    +
    ESB Candidates
    +
    + +
    + +
    +
    +
    + + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/family.properties b/family.properties deleted file mode 100644 index 41ff976d5..000000000 --- a/family.properties +++ /dev/null @@ -1 +0,0 @@ -generation=1 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 41d9927a4..000000000 Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index 5c51a4acc..000000000 --- a/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionSha256Sum=29e49b10984e585d8118b7d0bc452f944e386458df27371b49b4ac1dec4b7fda -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew deleted file mode 100755 index 1b6c78733..000000000 --- a/gradlew +++ /dev/null @@ -1,234 +0,0 @@ -#!/bin/sh - -# -# Copyright © 2015-2021 the original authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -############################################################################## -# -# Gradle start up script for POSIX generated by Gradle. -# -# Important for running: -# -# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is -# noncompliant, but you have some other compliant shell such as ksh or -# bash, then to run this script, type that shell name before the whole -# command line, like: -# -# ksh Gradle -# -# Busybox and similar reduced shells will NOT work, because this script -# requires all of these POSIX shell features: -# * functions; -# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», -# «${var#prefix}», «${var%suffix}», and «$( cmd )»; -# * compound commands having a testable exit status, especially «case»; -# * various built-in commands including «command», «set», and «ulimit». -# -# Important for patching: -# -# (2) This script targets any POSIX shell, so it avoids extensions provided -# by Bash, Ksh, etc; in particular arrays are avoided. -# -# The "traditional" practice of packing multiple parameters into a -# space-separated string is a well documented source of bugs and security -# problems, so this is (mostly) avoided, by progressively accumulating -# options in "$@", and eventually passing that to Java. -# -# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, -# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; -# see the in-line comments for details. -# -# There are tweaks for specific operating systems such as AIX, CygWin, -# Darwin, MinGW, and NonStop. -# -# (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt -# within the Gradle project. -# -# You can find Gradle at https://github.com/gradle/gradle/. -# -############################################################################## - -# Attempt to set APP_HOME - -# Resolve links: $0 may be a link -app_path=$0 - -# Need this for daisy-chained symlinks. -while - APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path - [ -h "$app_path" ] -do - ls=$( ls -ld "$app_path" ) - link=${ls#*' -> '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" -APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat deleted file mode 100644 index ac1b06f93..000000000 --- a/gradlew.bat +++ /dev/null @@ -1,89 +0,0 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/grpc-client/build.gradle b/grpc-client/build.gradle deleted file mode 100644 index 4ddd6bc71..000000000 --- a/grpc-client/build.gradle +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-grpc') - - implementation "io.grpc:grpc-netty:${revGrpc}" - implementation "io.grpc:grpc-protobuf:${revGrpc}" - implementation "io.grpc:grpc-stub:${revGrpc}" - implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - implementation "org.slf4j:slf4j-api" - implementation "org.apache.commons:commons-lang3" - implementation "com.google.guava:guava:${revGuava}" -} diff --git a/grpc-client/dependencies.lock b/grpc-client/dependencies.lock deleted file mode 100644 index 8fb7966fa..000000000 --- a/grpc-client/dependencies.lock +++ /dev/null @@ -1,343 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - } - }, - "testCompileClasspath": { - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "locked": "1.7.36" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java deleted file mode 100644 index 5b299a93b..000000000 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.concurrent.TimeUnit; - -import javax.annotation.Nullable; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.grpc.SearchPb; - -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; - -abstract class ClientBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(ClientBase.class); - protected static ProtoMapper protoMapper = ProtoMapper.INSTANCE; - - protected final ManagedChannel channel; - - public ClientBase(String address, int port) { - this(ManagedChannelBuilder.forAddress(address, port).usePlaintext()); - } - - public ClientBase(ManagedChannelBuilder builder) { - channel = builder.build(); - } - - public void shutdown() throws InterruptedException { - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } - - SearchPb.Request createSearchRequest( - @Nullable Integer start, - @Nullable Integer size, - @Nullable String sort, - @Nullable String freeText, - @Nullable String query) { - SearchPb.Request.Builder request = SearchPb.Request.newBuilder(); - if (start != null) request.setStart(start); - if (size != null) request.setSize(size); - if (sort != null) request.setSort(sort); - if (freeText != null) request.setFreeText(freeText); - if (query != null) request.setQuery(query); - return request.build(); - } -} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/EventClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/EventClient.java deleted file mode 100644 index e331897a3..000000000 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/EventClient.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.Iterator; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.grpc.EventServiceGrpc; -import com.netflix.conductor.grpc.EventServicePb; -import com.netflix.conductor.proto.EventHandlerPb; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; - -public class EventClient extends ClientBase { - - private final EventServiceGrpc.EventServiceBlockingStub stub; - - public EventClient(String address, int port) { - super(address, port); - this.stub = EventServiceGrpc.newBlockingStub(this.channel); - } - - /** - * Register an event handler with the server - * - * @param eventHandler the event handler definition - */ - public void registerEventHandler(EventHandler eventHandler) { - Preconditions.checkNotNull(eventHandler, "Event handler definition cannot be null"); - stub.addEventHandler( - EventServicePb.AddEventHandlerRequest.newBuilder() - .setHandler(protoMapper.toProto(eventHandler)) - .build()); - } - - /** - * Updates an existing event handler - * - * @param eventHandler the event handler to be updated - */ - public void updateEventHandler(EventHandler eventHandler) { - Preconditions.checkNotNull(eventHandler, "Event handler definition cannot be null"); - stub.updateEventHandler( - EventServicePb.UpdateEventHandlerRequest.newBuilder() - .setHandler(protoMapper.toProto(eventHandler)) - .build()); - } - - /** - * @param event name of the event - * @param activeOnly if true, returns only the active handlers - * @return Returns the list of all the event handlers for a given event - */ - public Iterator getEventHandlers(String event, boolean activeOnly) { - Preconditions.checkArgument(StringUtils.isNotBlank(event), "Event cannot be blank"); - - EventServicePb.GetEventHandlersForEventRequest.Builder request = - EventServicePb.GetEventHandlersForEventRequest.newBuilder() - .setEvent(event) - .setActiveOnly(activeOnly); - Iterator it = stub.getEventHandlersForEvent(request.build()); - return Iterators.transform(it, protoMapper::fromProto); - } - - /** - * Removes the event handler from the conductor server - * - * @param name the name of the event handler - */ - public void unregisterEventHandler(String name) { - Preconditions.checkArgument(StringUtils.isNotBlank(name), "Name cannot be blank"); - stub.removeEventHandler( - EventServicePb.RemoveEventHandlerRequest.newBuilder().setName(name).build()); - } -} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java deleted file mode 100644 index df30845c6..000000000 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.List; - -import javax.annotation.Nullable; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.grpc.MetadataServiceGrpc; -import com.netflix.conductor.grpc.MetadataServicePb; - -import com.google.common.base.Preconditions; - -public class MetadataClient extends ClientBase { - - private final MetadataServiceGrpc.MetadataServiceBlockingStub stub; - - public MetadataClient(String address, int port) { - super(address, port); - this.stub = MetadataServiceGrpc.newBlockingStub(this.channel); - } - - /** - * Register a workflow definition with the server - * - * @param workflowDef the workflow definition - */ - public void registerWorkflowDef(WorkflowDef workflowDef) { - Preconditions.checkNotNull(workflowDef, "Worfklow definition cannot be null"); - stub.createWorkflow( - MetadataServicePb.CreateWorkflowRequest.newBuilder() - .setWorkflow(protoMapper.toProto(workflowDef)) - .build()); - } - - /** - * Updates a list of existing workflow definitions - * - * @param workflowDefs List of workflow definitions to be updated - */ - public void updateWorkflowDefs(List workflowDefs) { - Preconditions.checkNotNull(workflowDefs, "Workflow defs list cannot be null"); - stub.updateWorkflows( - MetadataServicePb.UpdateWorkflowsRequest.newBuilder() - .addAllDefs(workflowDefs.stream().map(protoMapper::toProto)::iterator) - .build()); - } - - /** - * Retrieve the workflow definition - * - * @param name the name of the workflow - * @param version the version of the workflow def - * @return Workflow definition for the given workflow and version - */ - public WorkflowDef getWorkflowDef(String name, @Nullable Integer version) { - Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be blank"); - - MetadataServicePb.GetWorkflowRequest.Builder request = - MetadataServicePb.GetWorkflowRequest.newBuilder().setName(name); - - if (version != null) { - request.setVersion(version); - } - - return protoMapper.fromProto(stub.getWorkflow(request.build()).getWorkflow()); - } - - /** - * Registers a list of task types with the conductor server - * - * @param taskDefs List of task types to be registered. - */ - public void registerTaskDefs(List taskDefs) { - Preconditions.checkNotNull(taskDefs, "Task defs list cannot be null"); - stub.createTasks( - MetadataServicePb.CreateTasksRequest.newBuilder() - .addAllDefs(taskDefs.stream().map(protoMapper::toProto)::iterator) - .build()); - } - - /** - * Updates an existing task definition - * - * @param taskDef the task definition to be updated - */ - public void updateTaskDef(TaskDef taskDef) { - Preconditions.checkNotNull(taskDef, "Task definition cannot be null"); - stub.updateTask( - MetadataServicePb.UpdateTaskRequest.newBuilder() - .setTask(protoMapper.toProto(taskDef)) - .build()); - } - - /** - * Retrieve the task definition of a given task type - * - * @param taskType type of task for which to retrieve the definition - * @return Task Definition for the given task type - */ - public TaskDef getTaskDef(String taskType) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - return protoMapper.fromProto( - stub.getTask( - MetadataServicePb.GetTaskRequest.newBuilder() - .setTaskType(taskType) - .build()) - .getTask()); - } - - /** - * Removes the task definition of a task type from the conductor server. Use with caution. - * - * @param taskType Task type to be unregistered. - */ - public void unregisterTaskDef(String taskType) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - stub.deleteTask( - MetadataServicePb.DeleteTaskRequest.newBuilder().setTaskType(taskType).build()); - } -} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java deleted file mode 100644 index 7bc654123..000000000 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.Iterator; -import java.util.List; -import java.util.stream.Collectors; - -import javax.annotation.Nullable; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.TaskServiceGrpc; -import com.netflix.conductor.grpc.TaskServicePb; -import com.netflix.conductor.proto.TaskPb; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; - -public class TaskClient extends ClientBase { - - private final TaskServiceGrpc.TaskServiceBlockingStub stub; - - public TaskClient(String address, int port) { - super(address, port); - this.stub = TaskServiceGrpc.newBlockingStub(this.channel); - } - - /** - * Perform a poll for a task of a specific task type. - * - * @param taskType The taskType to poll for - * @param domain The domain of the task type - * @param workerId Name of the client worker. Used for logging. - * @return Task waiting to be executed. - */ - public Task pollTask(String taskType, String workerId, String domain) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - Preconditions.checkArgument(StringUtils.isNotBlank(domain), "Domain cannot be blank"); - Preconditions.checkArgument(StringUtils.isNotBlank(workerId), "Worker id cannot be blank"); - - TaskServicePb.PollResponse response = - stub.poll( - TaskServicePb.PollRequest.newBuilder() - .setTaskType(taskType) - .setWorkerId(workerId) - .setDomain(domain) - .build()); - return protoMapper.fromProto(response.getTask()); - } - - /** - * Perform a batch poll for tasks by task type. Batch size is configurable by count. - * - * @param taskType Type of task to poll for - * @param workerId Name of the client worker. Used for logging. - * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be - * less than this number. - * @param timeoutInMillisecond Long poll wait timeout. - * @return List of tasks awaiting to be executed. - */ - public List batchPollTasksByTaskType( - String taskType, String workerId, int count, int timeoutInMillisecond) { - return Lists.newArrayList( - batchPollTasksByTaskTypeAsync(taskType, workerId, count, timeoutInMillisecond)); - } - - /** - * Perform a batch poll for tasks by task type. Batch size is configurable by count. Returns an - * iterator that streams tasks as they become available through GRPC. - * - * @param taskType Type of task to poll for - * @param workerId Name of the client worker. Used for logging. - * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be - * less than this number. - * @param timeoutInMillisecond Long poll wait timeout. - * @return Iterator of tasks awaiting to be executed. - */ - public Iterator batchPollTasksByTaskTypeAsync( - String taskType, String workerId, int count, int timeoutInMillisecond) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - Preconditions.checkArgument(StringUtils.isNotBlank(workerId), "Worker id cannot be blank"); - Preconditions.checkArgument(count > 0, "Count must be greater than 0"); - - Iterator it = - stub.batchPoll( - TaskServicePb.BatchPollRequest.newBuilder() - .setTaskType(taskType) - .setWorkerId(workerId) - .setCount(count) - .setTimeout(timeoutInMillisecond) - .build()); - - return Iterators.transform(it, protoMapper::fromProto); - } - - /** - * Updates the result of a task execution. - * - * @param taskResult TaskResults to be updated. - */ - public void updateTask(TaskResult taskResult) { - Preconditions.checkNotNull(taskResult, "Task result cannot be null"); - stub.updateTask( - TaskServicePb.UpdateTaskRequest.newBuilder() - .setResult(protoMapper.toProto(taskResult)) - .build()); - } - - /** - * Log execution messages for a task. - * - * @param taskId id of the task - * @param logMessage the message to be logged - */ - public void logMessageForTask(String taskId, String logMessage) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); - stub.addLog( - TaskServicePb.AddLogRequest.newBuilder() - .setTaskId(taskId) - .setLog(logMessage) - .build()); - } - - /** - * Fetch execution logs for a task. - * - * @param taskId id of the task. - */ - public List getTaskLogs(String taskId) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); - return stub - .getTaskLogs( - TaskServicePb.GetTaskLogsRequest.newBuilder().setTaskId(taskId).build()) - .getLogsList() - .stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList()); - } - - /** - * Retrieve information about the task - * - * @param taskId ID of the task - * @return Task details - */ - public Task getTaskDetails(String taskId) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); - return protoMapper.fromProto( - stub.getTask(TaskServicePb.GetTaskRequest.newBuilder().setTaskId(taskId).build()) - .getTask()); - } - - public int getQueueSizeForTask(String taskType) { - Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - - TaskServicePb.QueueSizesResponse sizes = - stub.getQueueSizesForTasks( - TaskServicePb.QueueSizesRequest.newBuilder() - .addTaskTypes(taskType) - .build()); - - return sizes.getQueueForTaskOrDefault(taskType, 0); - } - - public SearchResult search(String query) { - return search(null, null, null, null, query); - } - - public SearchResult searchV2(String query) { - return searchV2(null, null, null, null, query); - } - - public SearchResult search( - @Nullable Integer start, - @Nullable Integer size, - @Nullable String sort, - @Nullable String freeText, - @Nullable String query) { - SearchPb.Request searchRequest = createSearchRequest(start, size, sort, freeText, query); - TaskServicePb.TaskSummarySearchResult result = stub.search(searchRequest); - return new SearchResult<>( - result.getTotalHits(), - result.getResultsList().stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList())); - } - - public SearchResult searchV2( - @Nullable Integer start, - @Nullable Integer size, - @Nullable String sort, - @Nullable String freeText, - @Nullable String query) { - SearchPb.Request searchRequest = createSearchRequest(start, size, sort, freeText, query); - TaskServicePb.TaskSearchResult result = stub.searchV2(searchRequest); - return new SearchResult<>( - result.getTotalHits(), - result.getResultsList().stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList())); - } -} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java deleted file mode 100644 index 99b9ee243..000000000 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -import javax.annotation.Nullable; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.WorkflowServiceGrpc; -import com.netflix.conductor.grpc.WorkflowServicePb; -import com.netflix.conductor.proto.WorkflowPb; - -import com.google.common.base.Preconditions; - -public class WorkflowClient extends ClientBase { - - private final WorkflowServiceGrpc.WorkflowServiceBlockingStub stub; - - public WorkflowClient(String address, int port) { - super(address, port); - this.stub = WorkflowServiceGrpc.newBlockingStub(this.channel); - } - - /** - * Starts a workflow - * - * @param startWorkflowRequest the {@link StartWorkflowRequest} object to start the workflow - * @return the id of the workflow instance that can be used for tracking - */ - public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { - Preconditions.checkNotNull(startWorkflowRequest, "StartWorkflowRequest cannot be null"); - return stub.startWorkflow(protoMapper.toProto(startWorkflowRequest)).getWorkflowId(); - } - - /** - * Retrieve a workflow by workflow id - * - * @param workflowId the id of the workflow - * @param includeTasks specify if the tasks in the workflow need to be returned - * @return the requested workflow - */ - public Workflow getWorkflow(String workflowId, boolean includeTasks) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - WorkflowPb.Workflow workflow = - stub.getWorkflowStatus( - WorkflowServicePb.GetWorkflowStatusRequest.newBuilder() - .setWorkflowId(workflowId) - .setIncludeTasks(includeTasks) - .build()); - return protoMapper.fromProto(workflow); - } - - /** - * Retrieve all workflows for a given correlation id and name - * - * @param name the name of the workflow - * @param correlationId the correlation id - * @param includeClosed specify if all workflows are to be returned or only running workflows - * @param includeTasks specify if the tasks in the workflow need to be returned - * @return list of workflows for the given correlation id and name - */ - public List getWorkflows( - String name, String correlationId, boolean includeClosed, boolean includeTasks) { - Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be blank"); - Preconditions.checkArgument( - StringUtils.isNotBlank(correlationId), "correlationId cannot be blank"); - - WorkflowServicePb.GetWorkflowsResponse workflows = - stub.getWorkflows( - WorkflowServicePb.GetWorkflowsRequest.newBuilder() - .setName(name) - .addCorrelationId(correlationId) - .setIncludeClosed(includeClosed) - .setIncludeTasks(includeTasks) - .build()); - - if (!workflows.containsWorkflowsById(correlationId)) { - return Collections.emptyList(); - } - - return workflows.getWorkflowsByIdOrThrow(correlationId).getWorkflowsList().stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList()); - } - - /** - * Removes a workflow from the system - * - * @param workflowId the id of the workflow to be deleted - * @param archiveWorkflow flag to indicate if the workflow should be archived before deletion - */ - public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "Workflow id cannot be blank"); - stub.removeWorkflow( - WorkflowServicePb.RemoveWorkflowRequest.newBuilder() - .setWorkflodId(workflowId) - .setArchiveWorkflow(archiveWorkflow) - .build()); - } - - /* - * Retrieve all running workflow instances for a given name and version - * - * @param workflowName the name of the workflow - * @param version the version of the wokflow definition. Defaults to 1. - * @return the list of running workflow instances - */ - public List getRunningWorkflow(String workflowName, @Nullable Integer version) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank"); - - WorkflowServicePb.GetRunningWorkflowsResponse workflows = - stub.getRunningWorkflows( - WorkflowServicePb.GetRunningWorkflowsRequest.newBuilder() - .setName(workflowName) - .setVersion(version == null ? 1 : version) - .build()); - return workflows.getWorkflowIdsList(); - } - - /** - * Retrieve all workflow instances for a given workflow name between a specific time period - * - * @param workflowName the name of the workflow - * @param version the version of the workflow definition. Defaults to 1. - * @param startTime the start time of the period - * @param endTime the end time of the period - * @return returns a list of workflows created during the specified during the time period - */ - public List getWorkflowsByTimePeriod( - String workflowName, int version, Long startTime, Long endTime) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank"); - Preconditions.checkNotNull(startTime, "Start time cannot be null"); - Preconditions.checkNotNull(endTime, "End time cannot be null"); - // TODO - return null; - } - - /* - * Starts the decision task for the given workflow instance - * - * @param workflowId the id of the workflow instance - */ - public void runDecider(String workflowId) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.decideWorkflow( - WorkflowServicePb.DecideWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .build()); - } - - /** - * Pause a workflow by workflow id - * - * @param workflowId the workflow id of the workflow to be paused - */ - public void pauseWorkflow(String workflowId) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.pauseWorkflow( - WorkflowServicePb.PauseWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .build()); - } - - /** - * Resume a paused workflow by workflow id - * - * @param workflowId the workflow id of the paused workflow - */ - public void resumeWorkflow(String workflowId) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.resumeWorkflow( - WorkflowServicePb.ResumeWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .build()); - } - - /** - * Skips a given task from a current RUNNING workflow - * - * @param workflowId the id of the workflow instance - * @param taskReferenceName the reference name of the task to be skipped - */ - public void skipTaskFromWorkflow(String workflowId, String taskReferenceName) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - Preconditions.checkArgument( - StringUtils.isNotBlank(taskReferenceName), "Task reference name cannot be blank"); - stub.skipTaskFromWorkflow( - WorkflowServicePb.SkipTaskRequest.newBuilder() - .setWorkflowId(workflowId) - .setTaskReferenceName(taskReferenceName) - .build()); - } - - /** - * Reruns the workflow from a specific task - * - * @param rerunWorkflowRequest the request containing the task to rerun from - * @return the id of the workflow - */ - public String rerunWorkflow(RerunWorkflowRequest rerunWorkflowRequest) { - Preconditions.checkNotNull(rerunWorkflowRequest, "RerunWorkflowRequest cannot be null"); - return stub.rerunWorkflow(protoMapper.toProto(rerunWorkflowRequest)).getWorkflowId(); - } - - /** - * Restart a completed workflow - * - * @param workflowId the workflow id of the workflow to be restarted - */ - public void restart(String workflowId, boolean useLatestDefinitions) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.restartWorkflow( - WorkflowServicePb.RestartWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .setUseLatestDefinitions(useLatestDefinitions) - .build()); - } - - /** - * Retries the last failed task in a workflow - * - * @param workflowId the workflow id of the workflow with the failed task - */ - public void retryLastFailedTask(String workflowId, boolean resumeSubworkflowTasks) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.retryWorkflow( - WorkflowServicePb.RetryWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .setResumeSubworkflowTasks(resumeSubworkflowTasks) - .build()); - } - - /** - * Resets the callback times of all IN PROGRESS tasks to 0 for the given workflow - * - * @param workflowId the id of the workflow - */ - public void resetCallbacksForInProgressTasks(String workflowId) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.resetWorkflowCallbacks( - WorkflowServicePb.ResetWorkflowCallbacksRequest.newBuilder() - .setWorkflowId(workflowId) - .build()); - } - - /** - * Terminates the execution of the given workflow instance - * - * @param workflowId the id of the workflow to be terminated - * @param reason the reason to be logged and displayed - */ - public void terminateWorkflow(String workflowId, String reason) { - Preconditions.checkArgument( - StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.terminateWorkflow( - WorkflowServicePb.TerminateWorkflowRequest.newBuilder() - .setWorkflowId(workflowId) - .setReason(reason) - .build()); - } - - /** - * Search for workflows based on payload - * - * @param query the search query - * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query - */ - public SearchResult search(String query) { - return search(null, null, null, null, query); - } - - /** - * Search for workflows based on payload - * - * @param query the search query - * @return the {@link SearchResult} containing the {@link Workflow} that match the query - */ - public SearchResult searchV2(String query) { - return searchV2(null, null, null, null, query); - } - - /** - * Paginated search for workflows based on payload - * - * @param start start value of page - * @param size number of workflows to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query - */ - public SearchResult search( - @Nullable Integer start, - @Nullable Integer size, - @Nullable String sort, - @Nullable String freeText, - @Nullable String query) { - - SearchPb.Request searchRequest = createSearchRequest(start, size, sort, freeText, query); - WorkflowServicePb.WorkflowSummarySearchResult result = stub.search(searchRequest); - return new SearchResult<>( - result.getTotalHits(), - result.getResultsList().stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList())); - } - - /** - * Paginated search for workflows based on payload - * - * @param start start value of page - * @param size number of workflows to be returned - * @param sort sort order - * @param freeText additional free text query - * @param query the search query - * @return the {@link SearchResult} containing the {@link Workflow} that match the query - */ - public SearchResult searchV2( - @Nullable Integer start, - @Nullable Integer size, - @Nullable String sort, - @Nullable String freeText, - @Nullable String query) { - SearchPb.Request searchRequest = createSearchRequest(start, size, sort, freeText, query); - WorkflowServicePb.WorkflowSearchResult result = stub.searchV2(searchRequest); - return new SearchResult<>( - result.getTotalHits(), - result.getResultsList().stream() - .map(protoMapper::fromProto) - .collect(Collectors.toList())); - } -} diff --git a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/EventClientTest.java b/grpc-client/src/test/java/com/netflix/conductor/client/grpc/EventClientTest.java deleted file mode 100644 index ac33b897b..000000000 --- a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/EventClientTest.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.util.ReflectionTestUtils; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.grpc.EventServiceGrpc; -import com.netflix.conductor.grpc.EventServicePb; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.proto.EventHandlerPb; - -import static junit.framework.TestCase.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@RunWith(SpringRunner.class) -public class EventClientTest { - - @Mock ProtoMapper mockedProtoMapper; - - @Mock EventServiceGrpc.EventServiceBlockingStub mockedStub; - - EventClient eventClient; - - @Before - public void init() { - eventClient = new EventClient("test", 0); - ReflectionTestUtils.setField(eventClient, "stub", mockedStub); - ReflectionTestUtils.setField(eventClient, "protoMapper", mockedProtoMapper); - } - - @Test - public void testRegisterEventHandler() { - EventHandler eventHandler = mock(EventHandler.class); - EventHandlerPb.EventHandler eventHandlerPB = mock(EventHandlerPb.EventHandler.class); - when(mockedProtoMapper.toProto(eventHandler)).thenReturn(eventHandlerPB); - - EventServicePb.AddEventHandlerRequest request = - EventServicePb.AddEventHandlerRequest.newBuilder() - .setHandler(eventHandlerPB) - .build(); - eventClient.registerEventHandler(eventHandler); - verify(mockedStub, times(1)).addEventHandler(request); - } - - @Test - public void testUpdateEventHandler() { - EventHandler eventHandler = mock(EventHandler.class); - EventHandlerPb.EventHandler eventHandlerPB = mock(EventHandlerPb.EventHandler.class); - when(mockedProtoMapper.toProto(eventHandler)).thenReturn(eventHandlerPB); - - EventServicePb.UpdateEventHandlerRequest request = - EventServicePb.UpdateEventHandlerRequest.newBuilder() - .setHandler(eventHandlerPB) - .build(); - eventClient.updateEventHandler(eventHandler); - verify(mockedStub, times(1)).updateEventHandler(request); - } - - @Test - public void testGetEventHandlers() { - EventHandler eventHandler = mock(EventHandler.class); - EventHandlerPb.EventHandler eventHandlerPB = mock(EventHandlerPb.EventHandler.class); - when(mockedProtoMapper.fromProto(eventHandlerPB)).thenReturn(eventHandler); - EventServicePb.GetEventHandlersForEventRequest request = - EventServicePb.GetEventHandlersForEventRequest.newBuilder() - .setEvent("test") - .setActiveOnly(true) - .build(); - List result = new ArrayList<>(); - result.add(eventHandlerPB); - when(mockedStub.getEventHandlersForEvent(request)).thenReturn(result.iterator()); - Iterator response = eventClient.getEventHandlers("test", true); - verify(mockedStub, times(1)).getEventHandlersForEvent(request); - assertEquals(response.next(), eventHandler); - } - - @Test - public void testUnregisterEventHandler() { - EventServicePb.RemoveEventHandlerRequest request = - EventServicePb.RemoveEventHandlerRequest.newBuilder().setName("test").build(); - eventClient.unregisterEventHandler("test"); - verify(mockedStub, times(1)).removeEventHandler(request); - } -} diff --git a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/TaskClientTest.java b/grpc-client/src/test/java/com/netflix/conductor/client/grpc/TaskClientTest.java deleted file mode 100644 index b6d61cb7f..000000000 --- a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/TaskClientTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.util.ReflectionTestUtils; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.TaskServiceGrpc; -import com.netflix.conductor.grpc.TaskServicePb; -import com.netflix.conductor.proto.TaskPb; -import com.netflix.conductor.proto.TaskSummaryPb; - -import static junit.framework.TestCase.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@RunWith(SpringRunner.class) -public class TaskClientTest { - - @Mock ProtoMapper mockedProtoMapper; - - @Mock TaskServiceGrpc.TaskServiceBlockingStub mockedStub; - - TaskClient taskClient; - - @Before - public void init() { - taskClient = new TaskClient("test", 0); - ReflectionTestUtils.setField(taskClient, "stub", mockedStub); - ReflectionTestUtils.setField(taskClient, "protoMapper", mockedProtoMapper); - } - - @Test - public void testSearch() { - TaskSummary taskSummary = mock(TaskSummary.class); - TaskSummaryPb.TaskSummary taskSummaryPB = mock(TaskSummaryPb.TaskSummary.class); - when(mockedProtoMapper.fromProto(taskSummaryPB)).thenReturn(taskSummary); - TaskServicePb.TaskSummarySearchResult result = - TaskServicePb.TaskSummarySearchResult.newBuilder() - .addResults(taskSummaryPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder().setQuery("test query").build(); - when(mockedStub.search(searchRequest)).thenReturn(result); - SearchResult searchResult = taskClient.search("test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(taskSummary, searchResult.getResults().get(0)); - } - - @Test - public void testSearchV2() { - Task task = mock(Task.class); - TaskPb.Task taskPB = mock(TaskPb.Task.class); - when(mockedProtoMapper.fromProto(taskPB)).thenReturn(task); - TaskServicePb.TaskSearchResult result = - TaskServicePb.TaskSearchResult.newBuilder() - .addResults(taskPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder().setQuery("test query").build(); - when(mockedStub.searchV2(searchRequest)).thenReturn(result); - SearchResult searchResult = taskClient.searchV2("test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(task, searchResult.getResults().get(0)); - } - - @Test - public void testSearchWithParams() { - TaskSummary taskSummary = mock(TaskSummary.class); - TaskSummaryPb.TaskSummary taskSummaryPB = mock(TaskSummaryPb.TaskSummary.class); - when(mockedProtoMapper.fromProto(taskSummaryPB)).thenReturn(taskSummary); - TaskServicePb.TaskSummarySearchResult result = - TaskServicePb.TaskSummarySearchResult.newBuilder() - .addResults(taskSummaryPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(5) - .setSort("*") - .setFreeText("*") - .setQuery("test query") - .build(); - when(mockedStub.search(searchRequest)).thenReturn(result); - SearchResult searchResult = taskClient.search(1, 5, "*", "*", "test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(taskSummary, searchResult.getResults().get(0)); - } - - @Test - public void testSearchV2WithParams() { - Task task = mock(Task.class); - TaskPb.Task taskPB = mock(TaskPb.Task.class); - when(mockedProtoMapper.fromProto(taskPB)).thenReturn(task); - TaskServicePb.TaskSearchResult result = - TaskServicePb.TaskSearchResult.newBuilder() - .addResults(taskPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(5) - .setSort("*") - .setFreeText("*") - .setQuery("test query") - .build(); - when(mockedStub.searchV2(searchRequest)).thenReturn(result); - SearchResult searchResult = taskClient.searchV2(1, 5, "*", "*", "test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(task, searchResult.getResults().get(0)); - } -} diff --git a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/WorkflowClientTest.java b/grpc-client/src/test/java/com/netflix/conductor/client/grpc/WorkflowClientTest.java deleted file mode 100644 index f4f184b9e..000000000 --- a/grpc-client/src/test/java/com/netflix/conductor/client/grpc/WorkflowClientTest.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.client.grpc; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.util.ReflectionTestUtils; - -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.WorkflowServiceGrpc; -import com.netflix.conductor.grpc.WorkflowServicePb; -import com.netflix.conductor.proto.WorkflowPb; -import com.netflix.conductor.proto.WorkflowSummaryPb; - -import static junit.framework.TestCase.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@RunWith(SpringRunner.class) -public class WorkflowClientTest { - - @Mock ProtoMapper mockedProtoMapper; - - @Mock WorkflowServiceGrpc.WorkflowServiceBlockingStub mockedStub; - - WorkflowClient workflowClient; - - @Before - public void init() { - workflowClient = new WorkflowClient("test", 0); - ReflectionTestUtils.setField(workflowClient, "stub", mockedStub); - ReflectionTestUtils.setField(workflowClient, "protoMapper", mockedProtoMapper); - } - - @Test - public void testSearch() { - WorkflowSummary workflow = mock(WorkflowSummary.class); - WorkflowSummaryPb.WorkflowSummary workflowPB = - mock(WorkflowSummaryPb.WorkflowSummary.class); - when(mockedProtoMapper.fromProto(workflowPB)).thenReturn(workflow); - WorkflowServicePb.WorkflowSummarySearchResult result = - WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() - .addResults(workflowPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder().setQuery("test query").build(); - when(mockedStub.search(searchRequest)).thenReturn(result); - SearchResult searchResult = workflowClient.search("test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow, searchResult.getResults().get(0)); - } - - @Test - public void testSearchV2() { - Workflow workflow = mock(Workflow.class); - WorkflowPb.Workflow workflowPB = mock(WorkflowPb.Workflow.class); - when(mockedProtoMapper.fromProto(workflowPB)).thenReturn(workflow); - WorkflowServicePb.WorkflowSearchResult result = - WorkflowServicePb.WorkflowSearchResult.newBuilder() - .addResults(workflowPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder().setQuery("test query").build(); - when(mockedStub.searchV2(searchRequest)).thenReturn(result); - SearchResult searchResult = workflowClient.searchV2("test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow, searchResult.getResults().get(0)); - } - - @Test - public void testSearchWithParams() { - WorkflowSummary workflow = mock(WorkflowSummary.class); - WorkflowSummaryPb.WorkflowSummary workflowPB = - mock(WorkflowSummaryPb.WorkflowSummary.class); - when(mockedProtoMapper.fromProto(workflowPB)).thenReturn(workflow); - WorkflowServicePb.WorkflowSummarySearchResult result = - WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() - .addResults(workflowPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(5) - .setSort("*") - .setFreeText("*") - .setQuery("test query") - .build(); - when(mockedStub.search(searchRequest)).thenReturn(result); - SearchResult searchResult = - workflowClient.search(1, 5, "*", "*", "test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow, searchResult.getResults().get(0)); - } - - @Test - public void testSearchV2WithParams() { - Workflow workflow = mock(Workflow.class); - WorkflowPb.Workflow workflowPB = mock(WorkflowPb.Workflow.class); - when(mockedProtoMapper.fromProto(workflowPB)).thenReturn(workflow); - WorkflowServicePb.WorkflowSearchResult result = - WorkflowServicePb.WorkflowSearchResult.newBuilder() - .addResults(workflowPB) - .setTotalHits(1) - .build(); - SearchPb.Request searchRequest = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(5) - .setSort("*") - .setFreeText("*") - .setQuery("test query") - .build(); - when(mockedStub.searchV2(searchRequest)).thenReturn(result); - SearchResult searchResult = workflowClient.searchV2(1, 5, "*", "*", "test query"); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow, searchResult.getResults().get(0)); - } -} diff --git a/grpc-client/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/grpc-client/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker deleted file mode 100644 index b3188fb9e..000000000 --- a/grpc-client/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker +++ /dev/null @@ -1 +0,0 @@ -mock-maker-inline diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle deleted file mode 100644 index d17d784a4..000000000 --- a/grpc-server/build.gradle +++ /dev/null @@ -1,14 +0,0 @@ -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - implementation project(':conductor-grpc') - - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "io.grpc:grpc-netty:${revGrpc}" - implementation "io.grpc:grpc-services:${revGrpc}" - implementation "org.apache.commons:commons-lang3" - - testImplementation "io.grpc:grpc-testing:${revGrpc}" - testImplementation "org.testinfected.hamcrest-matchers:all-matchers:${revHamcrestAllMatchers}" -} diff --git a/grpc-server/dependencies.lock b/grpc-server/dependencies.lock deleted file mode 100644 index 5d5c4985d..000000000 --- a/grpc-server/dependencies.lock +++ /dev/null @@ -1,470 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "locked": "1.47.0" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.19.2" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "locked": "1.47.0" - }, - "io.grpc:grpc-testing": { - "locked": "1.47.0" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.testinfected.hamcrest-matchers:all-matchers": { - "locked": "1.8" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.19.2" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-testing": { - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.testinfected.hamcrest-matchers:all-matchers": { - "locked": "1.8" - } - } -} \ No newline at end of file diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java deleted file mode 100644 index 7d10ac59f..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server; - -import java.io.IOException; -import java.util.List; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import io.grpc.BindableService; -import io.grpc.Server; -import io.grpc.ServerBuilder; - -public class GRPCServer { - - private static final Logger LOGGER = LoggerFactory.getLogger(GRPCServer.class); - - private final Server server; - - public GRPCServer(int port, List services) { - ServerBuilder builder = ServerBuilder.forPort(port); - services.forEach(builder::addService); - server = builder.build(); - } - - @PostConstruct - public void start() throws IOException { - server.start(); - LOGGER.info("grpc: Server started, listening on " + server.getPort()); - } - - @PreDestroy - public void stop() { - if (server != null) { - LOGGER.info("grpc: server shutting down"); - server.shutdown(); - } - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProperties.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProperties.java deleted file mode 100644 index 3b88e2056..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProperties.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -@ConfigurationProperties("conductor.grpc-server") -public class GRPCServerProperties { - - /** The port at which the gRPC server will serve requests */ - private int port = 8090; - - /** Enables the reflection service for Protobuf services */ - private boolean reflectionEnabled = true; - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public boolean isReflectionEnabled() { - return reflectionEnabled; - } - - public void setReflectionEnabled(boolean reflectionEnabled) { - this.reflectionEnabled = reflectionEnabled; - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GrpcConfiguration.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GrpcConfiguration.java deleted file mode 100644 index dee9bfdcb..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GrpcConfiguration.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server; - -import java.util.List; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import io.grpc.BindableService; -import io.grpc.protobuf.services.ProtoReflectionService; - -@Configuration -@ConditionalOnProperty(name = "conductor.grpc-server.enabled", havingValue = "true") -@EnableConfigurationProperties(GRPCServerProperties.class) -public class GrpcConfiguration { - - @Bean - public GRPCServer grpcServer( - List bindableServices, // all gRPC service implementations - GRPCServerProperties grpcServerProperties) { - if (grpcServerProperties.isReflectionEnabled()) { - bindableServices.add(ProtoReflectionService.newInstance()); - } - - return new GRPCServer(grpcServerProperties.getPort(), bindableServices); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java deleted file mode 100644 index 229af7d27..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.grpc.EventServiceGrpc; -import com.netflix.conductor.grpc.EventServicePb; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.proto.EventHandlerPb; -import com.netflix.conductor.service.MetadataService; - -import io.grpc.stub.StreamObserver; - -@Service("grpcEventService") -public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(EventServiceImpl.class); - - private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; - - private final MetadataService metadataService; - - public EventServiceImpl(MetadataService metadataService) { - this.metadataService = metadataService; - } - - @Override - public void addEventHandler( - EventServicePb.AddEventHandlerRequest req, - StreamObserver response) { - metadataService.addEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); - response.onNext(EventServicePb.AddEventHandlerResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void updateEventHandler( - EventServicePb.UpdateEventHandlerRequest req, - StreamObserver response) { - metadataService.updateEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); - response.onNext(EventServicePb.UpdateEventHandlerResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void removeEventHandler( - EventServicePb.RemoveEventHandlerRequest req, - StreamObserver response) { - metadataService.removeEventHandlerStatus(req.getName()); - response.onNext(EventServicePb.RemoveEventHandlerResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void getEventHandlers( - EventServicePb.GetEventHandlersRequest req, - StreamObserver response) { - metadataService.getAllEventHandlers().stream() - .map(PROTO_MAPPER::toProto) - .forEach(response::onNext); - response.onCompleted(); - } - - @Override - public void getEventHandlersForEvent( - EventServicePb.GetEventHandlersForEventRequest req, - StreamObserver response) { - metadataService.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly()).stream() - .map(PROTO_MAPPER::toProto) - .forEach(response::onNext); - response.onCompleted(); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java deleted file mode 100644 index 0dd626fa6..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.Arrays; - -import javax.annotation.Nonnull; - -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.slf4j.Logger; - -import com.google.rpc.DebugInfo; -import io.grpc.Metadata; -import io.grpc.Status; -import io.grpc.StatusException; -import io.grpc.protobuf.lite.ProtoLiteUtils; -import io.grpc.stub.StreamObserver; - -public class GRPCHelper { - - private final Logger logger; - - private static final Metadata.Key STATUS_DETAILS_KEY = - Metadata.Key.of( - "grpc-status-details-bin", - ProtoLiteUtils.metadataMarshaller(DebugInfo.getDefaultInstance())); - - public GRPCHelper(Logger log) { - this.logger = log; - } - - /** - * Converts an internal exception thrown by Conductor into an StatusException that uses modern - * "Status" metadata for GRPC. - * - *

    Note that this is trickier than it ought to be because the GRPC APIs have not been - * upgraded yet. Here's a quick breakdown of how this works in practice: - * - *

    Reporting a "status" result back to a client with GRPC is pretty straightforward. GRPC - * implementations simply serialize the status into several HTTP/2 trailer headers that are sent - * back to the client before shutting down the HTTP/2 stream. - * - *

    - 'grpc-status', which is a string representation of a {@link com.google.rpc.Code} - - * 'grpc-message', which is the description of the returned status - 'grpc-status-details-bin' - * (optional), which is an arbitrary payload with a serialized ProtoBuf object, containing an - * accurate description of the error in case the status is not successful. - * - *

    By convention, Google provides a default set of ProtoBuf messages for the most common - * error cases. Here, we'll be using {@link DebugInfo}, as we're reporting an internal Java - * exception which we couldn't properly handle. - * - *

    Now, how do we go about sending all those headers _and_ the {@link DebugInfo} payload - * using the Java GRPC API? - * - *

    The only way we can return an error with the Java API is by passing an instance of {@link - * io.grpc.StatusException} or {@link io.grpc.StatusRuntimeException} to {@link - * StreamObserver#onError(Throwable)}. The easiest way to create either of these exceptions is - * by using the {@link Status} class and one of its predefined code identifiers (in this case, - * {@link Status#INTERNAL} because we're reporting an internal exception). The {@link Status} - * class has setters to set its most relevant attributes, namely those that will be - * automatically serialized into the 'grpc-status' and 'grpc-message' trailers in the response. - * There is, however, no setter to pass an arbitrary ProtoBuf message to be serialized into a - * `grpc-status-details-bin` trailer. This feature exists in the other language implementations - * but it hasn't been brought to Java yet. - * - *

    Fortunately, {@link Status#asException(Metadata)} exists, allowing us to pass any amount - * of arbitrary trailers before we close the response. So we're using this API to manually craft - * the 'grpc-status-detail-bin' trailer, in the same way that the GRPC server implementations - * for Go and C++ craft and serialize the header. This will allow us to access the metadata - * cleanly from Go and C++ clients by using the 'details' method which _has_ been implemented in - * those two clients. - * - * @param t The exception to convert - * @return an instance of {@link StatusException} which will properly serialize all its headers - * into the response. - */ - private StatusException throwableToStatusException(Throwable t) { - String[] frames = ExceptionUtils.getStackFrames(t); - Metadata metadata = new Metadata(); - metadata.put( - STATUS_DETAILS_KEY, - DebugInfo.newBuilder() - .addAllStackEntries(Arrays.asList(frames)) - .setDetail(ExceptionUtils.getMessage(t)) - .build()); - - return Status.INTERNAL.withDescription(t.getMessage()).withCause(t).asException(metadata); - } - - void onError(StreamObserver response, Throwable t) { - logger.error("internal exception during GRPC request", t); - response.onError(throwableToStatusException(t)); - } - - /** - * Convert a non-null String instance to a possibly null String instance based on ProtoBuf's - * rules for optional arguments. - * - *

    This helper converts an String instance from a ProtoBuf object into a possibly null - * String. In ProtoBuf objects, String fields are not nullable, but an empty String field is - * considered to be "missing". - * - *

    The internal Conductor APIs expect missing arguments to be passed as null values, so this - * helper performs such conversion. - * - * @param str a string from a ProtoBuf object - * @return the original string, or null - */ - String optional(@Nonnull String str) { - return str.isEmpty() ? null : str; - } - - /** - * Check if a given non-null String instance is "missing" according to ProtoBuf's missing field - * rules. If the String is missing, the given default value will be returned. Otherwise, the - * string itself will be returned. - * - * @param str the input String - * @param defaults the default value for the string - * @return 'str' if it is not empty according to ProtoBuf rules; 'defaults' otherwise - */ - String optionalOr(@Nonnull String str, String defaults) { - return str.isEmpty() ? defaults : str; - } - - /** - * Convert a non-null Integer instance to a possibly null Integer instance based on ProtoBuf's - * rules for optional arguments. - * - *

    This helper converts an Integer instance from a ProtoBuf object into a possibly null - * Integer. In ProtoBuf objects, Integer fields are not nullable, but a zero-value Integer field - * is considered to be "missing". - * - *

    The internal Conductor APIs expect missing arguments to be passed as null values, so this - * helper performs such conversion. - * - * @param i an Integer from a ProtoBuf object - * @return the original Integer, or null - */ - Integer optional(@Nonnull Integer i) { - return i == 0 ? null : i; - } - - /** - * Check if a given non-null Integer instance is "missing" according to ProtoBuf's missing field - * rules. If the Integer is missing (i.e. if it has a zero-value), the given default value will - * be returned. Otherwise, the Integer itself will be returned. - * - * @param i the input Integer - * @param defaults the default value for the Integer - * @return 'i' if it is not a zero-value according to ProtoBuf rules; 'defaults' otherwise - */ - Integer optionalOr(@Nonnull Integer i, int defaults) { - return i == 0 ? defaults : i; - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java deleted file mode 100644 index 6bd26d2de..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import org.springframework.stereotype.Service; - -import io.grpc.health.v1.HealthCheckRequest; -import io.grpc.health.v1.HealthCheckResponse; -import io.grpc.health.v1.HealthGrpc; -import io.grpc.stub.StreamObserver; - -@Service("grpcHealthService") -public class HealthServiceImpl extends HealthGrpc.HealthImplBase { - - // SBMTODO: Move this Spring boot health check - @Override - public void check( - HealthCheckRequest request, StreamObserver responseObserver) { - responseObserver.onNext( - HealthCheckResponse.newBuilder() - .setStatus(HealthCheckResponse.ServingStatus.SERVING) - .build()); - responseObserver.onCompleted(); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java deleted file mode 100644 index 32aaeb0bb..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.List; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.grpc.MetadataServiceGrpc; -import com.netflix.conductor.grpc.MetadataServicePb; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.proto.TaskDefPb; -import com.netflix.conductor.proto.WorkflowDefPb; -import com.netflix.conductor.service.MetadataService; - -import io.grpc.Status; -import io.grpc.stub.StreamObserver; - -@Service("grpcMetadataService") -public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class); - private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; - private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); - - private final MetadataService service; - - public MetadataServiceImpl(MetadataService service) { - this.service = service; - } - - @Override - public void createWorkflow( - MetadataServicePb.CreateWorkflowRequest req, - StreamObserver response) { - WorkflowDef workflow = PROTO_MAPPER.fromProto(req.getWorkflow()); - service.registerWorkflowDef(workflow); - response.onNext(MetadataServicePb.CreateWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void updateWorkflows( - MetadataServicePb.UpdateWorkflowsRequest req, - StreamObserver response) { - List workflows = - req.getDefsList().stream() - .map(PROTO_MAPPER::fromProto) - .collect(Collectors.toList()); - - service.updateWorkflowDef(workflows); - response.onNext(MetadataServicePb.UpdateWorkflowsResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void getWorkflow( - MetadataServicePb.GetWorkflowRequest req, - StreamObserver response) { - try { - WorkflowDef workflowDef = - service.getWorkflowDef(req.getName(), GRPC_HELPER.optional(req.getVersion())); - WorkflowDefPb.WorkflowDef workflow = PROTO_MAPPER.toProto(workflowDef); - response.onNext( - MetadataServicePb.GetWorkflowResponse.newBuilder() - .setWorkflow(workflow) - .build()); - response.onCompleted(); - } catch (ApplicationException e) { - // TODO replace this with gRPC exception interceptor. - response.onError( - Status.NOT_FOUND - .withDescription("No such workflow found by name=" + req.getName()) - .asRuntimeException()); - } - } - - @Override - public void createTasks( - MetadataServicePb.CreateTasksRequest req, - StreamObserver response) { - service.registerTaskDef( - req.getDefsList().stream() - .map(PROTO_MAPPER::fromProto) - .collect(Collectors.toList())); - response.onNext(MetadataServicePb.CreateTasksResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void updateTask( - MetadataServicePb.UpdateTaskRequest req, - StreamObserver response) { - TaskDef task = PROTO_MAPPER.fromProto(req.getTask()); - service.updateTaskDef(task); - response.onNext(MetadataServicePb.UpdateTaskResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void getTask( - MetadataServicePb.GetTaskRequest req, - StreamObserver response) { - TaskDef def = service.getTaskDef(req.getTaskType()); - if (def != null) { - TaskDefPb.TaskDef task = PROTO_MAPPER.toProto(def); - response.onNext(MetadataServicePb.GetTaskResponse.newBuilder().setTask(task).build()); - response.onCompleted(); - } else { - response.onError( - Status.NOT_FOUND - .withDescription( - "No such TaskDef found by taskType=" + req.getTaskType()) - .asRuntimeException()); - } - } - - @Override - public void deleteTask( - MetadataServicePb.DeleteTaskRequest req, - StreamObserver response) { - service.unregisterTaskDef(req.getTaskType()); - response.onNext(MetadataServicePb.DeleteTaskResponse.getDefaultInstance()); - response.onCompleted(); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java deleted file mode 100644 index aaee4ac1b..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.TaskServiceGrpc; -import com.netflix.conductor.grpc.TaskServicePb; -import com.netflix.conductor.proto.TaskPb; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.TaskService; - -import io.grpc.Status; -import io.grpc.stub.StreamObserver; - -@Service("grpcTaskService") -public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); - private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; - private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); - - private static final int POLL_TIMEOUT_MS = 100; - private static final int MAX_POLL_TIMEOUT_MS = 5000; - - private final TaskService taskService; - private final int maxSearchSize; - private final ExecutionService executionService; - - public TaskServiceImpl( - ExecutionService executionService, - TaskService taskService, - @Value("${workflow.max.search.size:5000}") int maxSearchSize) { - this.executionService = executionService; - this.taskService = taskService; - this.maxSearchSize = maxSearchSize; - } - - @Override - public void poll( - TaskServicePb.PollRequest req, StreamObserver response) { - try { - List tasks = - executionService.poll( - req.getTaskType(), - req.getWorkerId(), - GRPC_HELPER.optional(req.getDomain()), - 1, - POLL_TIMEOUT_MS); - if (!tasks.isEmpty()) { - TaskPb.Task t = PROTO_MAPPER.toProto(tasks.get(0)); - response.onNext(TaskServicePb.PollResponse.newBuilder().setTask(t).build()); - } - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void batchPoll( - TaskServicePb.BatchPollRequest req, StreamObserver response) { - final int count = GRPC_HELPER.optionalOr(req.getCount(), 1); - final int timeout = GRPC_HELPER.optionalOr(req.getTimeout(), POLL_TIMEOUT_MS); - - if (timeout > MAX_POLL_TIMEOUT_MS) { - response.onError( - Status.INVALID_ARGUMENT - .withDescription( - "longpoll timeout cannot be longer than " - + MAX_POLL_TIMEOUT_MS - + "ms") - .asRuntimeException()); - return; - } - - try { - List polledTasks = - taskService.batchPoll( - req.getTaskType(), - req.getWorkerId(), - GRPC_HELPER.optional(req.getDomain()), - count, - timeout); - LOGGER.info("polled tasks: " + polledTasks); - polledTasks.stream().map(PROTO_MAPPER::toProto).forEach(response::onNext); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void updateTask( - TaskServicePb.UpdateTaskRequest req, - StreamObserver response) { - try { - TaskResult task = PROTO_MAPPER.fromProto(req.getResult()); - taskService.updateTask(task); - - response.onNext( - TaskServicePb.UpdateTaskResponse.newBuilder() - .setTaskId(task.getTaskId()) - .build()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void addLog( - TaskServicePb.AddLogRequest req, - StreamObserver response) { - taskService.log(req.getTaskId(), req.getLog()); - response.onNext(TaskServicePb.AddLogResponse.getDefaultInstance()); - response.onCompleted(); - } - - @Override - public void getTaskLogs( - TaskServicePb.GetTaskLogsRequest req, - StreamObserver response) { - List logs = taskService.getTaskLogs(req.getTaskId()); - response.onNext( - TaskServicePb.GetTaskLogsResponse.newBuilder() - .addAllLogs(logs.stream().map(PROTO_MAPPER::toProto)::iterator) - .build()); - response.onCompleted(); - } - - @Override - public void getTask( - TaskServicePb.GetTaskRequest req, - StreamObserver response) { - try { - Task task = taskService.getTask(req.getTaskId()); - if (task == null) { - response.onError( - Status.NOT_FOUND - .withDescription("No such task found by id=" + req.getTaskId()) - .asRuntimeException()); - } else { - response.onNext( - TaskServicePb.GetTaskResponse.newBuilder() - .setTask(PROTO_MAPPER.toProto(task)) - .build()); - response.onCompleted(); - } - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void getQueueSizesForTasks( - TaskServicePb.QueueSizesRequest req, - StreamObserver response) { - Map sizes = taskService.getTaskQueueSizes(req.getTaskTypesList()); - response.onNext( - TaskServicePb.QueueSizesResponse.newBuilder().putAllQueueForTask(sizes).build()); - response.onCompleted(); - } - - @Override - public void getQueueInfo( - TaskServicePb.QueueInfoRequest req, - StreamObserver response) { - Map queueInfo = taskService.getAllQueueDetails(); - - response.onNext( - TaskServicePb.QueueInfoResponse.newBuilder().putAllQueues(queueInfo).build()); - response.onCompleted(); - } - - @Override - public void getQueueAllInfo( - TaskServicePb.QueueAllInfoRequest req, - StreamObserver response) { - Map>> info = taskService.allVerbose(); - TaskServicePb.QueueAllInfoResponse.Builder queuesBuilder = - TaskServicePb.QueueAllInfoResponse.newBuilder(); - - for (Map.Entry>> queue : info.entrySet()) { - final String queueName = queue.getKey(); - final Map> queueShards = queue.getValue(); - - TaskServicePb.QueueAllInfoResponse.QueueInfo.Builder queueInfoBuilder = - TaskServicePb.QueueAllInfoResponse.QueueInfo.newBuilder(); - - for (Map.Entry> shard : queueShards.entrySet()) { - final String shardName = shard.getKey(); - final Map shardInfo = shard.getValue(); - - // FIXME: make shardInfo an actual type - // shardInfo is an immutable map with predefined keys, so we can always - // access 'size' and 'uacked'. It would be better if shardInfo - // were actually a POJO. - queueInfoBuilder.putShards( - shardName, - TaskServicePb.QueueAllInfoResponse.ShardInfo.newBuilder() - .setSize(shardInfo.get("size")) - .setUacked(shardInfo.get("uacked")) - .build()); - } - - queuesBuilder.putQueues(queueName, queueInfoBuilder.build()); - } - - response.onNext(queuesBuilder.build()); - response.onCompleted(); - } - - @Override - public void search( - SearchPb.Request req, StreamObserver response) { - final int start = req.getStart(); - final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); - final String sort = req.getSort(); - final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); - final String query = req.getQuery(); - if (size > maxSearchSize) { - response.onError( - Status.INVALID_ARGUMENT - .withDescription( - "Cannot return more than " + maxSearchSize + " results") - .asRuntimeException()); - return; - } - SearchResult searchResult = - taskService.search(start, size, sort, freeText, query); - response.onNext( - TaskServicePb.TaskSummarySearchResult.newBuilder() - .setTotalHits(searchResult.getTotalHits()) - .addAllResults( - searchResult.getResults().stream().map(PROTO_MAPPER::toProto) - ::iterator) - .build()); - response.onCompleted(); - } - - @Override - public void searchV2( - SearchPb.Request req, StreamObserver response) { - final int start = req.getStart(); - final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); - final String sort = req.getSort(); - final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); - final String query = req.getQuery(); - - if (size > maxSearchSize) { - response.onError( - Status.INVALID_ARGUMENT - .withDescription( - "Cannot return more than " + maxSearchSize + " results") - .asRuntimeException()); - return; - } - - SearchResult searchResult = taskService.searchV2(start, size, sort, freeText, query); - response.onNext( - TaskServicePb.TaskSearchResult.newBuilder() - .setTotalHits(searchResult.getTotalHits()) - .addAllResults( - searchResult.getResults().stream().map(PROTO_MAPPER::toProto) - ::iterator) - .build()); - response.onCompleted(); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java deleted file mode 100644 index 42493c552..000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ /dev/null @@ -1,395 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Service; - -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.grpc.ProtoMapper; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.WorkflowServiceGrpc; -import com.netflix.conductor.grpc.WorkflowServicePb; -import com.netflix.conductor.proto.RerunWorkflowRequestPb; -import com.netflix.conductor.proto.StartWorkflowRequestPb; -import com.netflix.conductor.proto.WorkflowPb; -import com.netflix.conductor.service.WorkflowService; - -import io.grpc.Status; -import io.grpc.stub.StreamObserver; - -@Service("grpcWorkflowService") -public class WorkflowServiceImpl extends WorkflowServiceGrpc.WorkflowServiceImplBase { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); - private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; - private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); - - private final WorkflowService workflowService; - private final int maxSearchSize; - - public WorkflowServiceImpl( - WorkflowService workflowService, - @Value("${workflow.max.search.size:5000}") int maxSearchSize) { - this.workflowService = workflowService; - this.maxSearchSize = maxSearchSize; - } - - @Override - public void startWorkflow( - StartWorkflowRequestPb.StartWorkflowRequest pbRequest, - StreamObserver response) { - - // TODO: better handling of optional 'version' - final StartWorkflowRequest request = PROTO_MAPPER.fromProto(pbRequest); - try { - String id = - workflowService.startWorkflow( - pbRequest.getName(), - GRPC_HELPER.optional(request.getVersion()), - request.getCorrelationId(), - request.getPriority(), - request.getInput(), - request.getExternalInputPayloadStoragePath(), - request.getTaskToDomain(), - request.getWorkflowDef()); - - response.onNext( - WorkflowServicePb.StartWorkflowResponse.newBuilder().setWorkflowId(id).build()); - response.onCompleted(); - } catch (ApplicationException ae) { - if (ae.getCode() == Code.NOT_FOUND) { - response.onError( - Status.NOT_FOUND - .withDescription( - "No such workflow found by name=" + request.getName()) - .asRuntimeException()); - } else { - GRPC_HELPER.onError(response, ae); - } - } - } - - @Override - public void getWorkflows( - WorkflowServicePb.GetWorkflowsRequest req, - StreamObserver response) { - final String name = req.getName(); - final boolean includeClosed = req.getIncludeClosed(); - final boolean includeTasks = req.getIncludeTasks(); - - WorkflowServicePb.GetWorkflowsResponse.Builder builder = - WorkflowServicePb.GetWorkflowsResponse.newBuilder(); - - for (String correlationId : req.getCorrelationIdList()) { - List workflows = - workflowService.getWorkflows(name, correlationId, includeClosed, includeTasks); - builder.putWorkflowsById( - correlationId, - WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder() - .addAllWorkflows( - workflows.stream().map(PROTO_MAPPER::toProto)::iterator) - .build()); - } - - response.onNext(builder.build()); - response.onCompleted(); - } - - @Override - public void getWorkflowStatus( - WorkflowServicePb.GetWorkflowStatusRequest req, - StreamObserver response) { - try { - Workflow workflow = - workflowService.getExecutionStatus(req.getWorkflowId(), req.getIncludeTasks()); - response.onNext(PROTO_MAPPER.toProto(workflow)); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void removeWorkflow( - WorkflowServicePb.RemoveWorkflowRequest req, - StreamObserver response) { - try { - workflowService.deleteWorkflow(req.getWorkflodId(), req.getArchiveWorkflow()); - response.onNext(WorkflowServicePb.RemoveWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void getRunningWorkflows( - WorkflowServicePb.GetRunningWorkflowsRequest req, - StreamObserver response) { - try { - List workflowIds = - workflowService.getRunningWorkflows( - req.getName(), req.getVersion(), req.getStartTime(), req.getEndTime()); - - response.onNext( - WorkflowServicePb.GetRunningWorkflowsResponse.newBuilder() - .addAllWorkflowIds(workflowIds) - .build()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void decideWorkflow( - WorkflowServicePb.DecideWorkflowRequest req, - StreamObserver response) { - try { - workflowService.decideWorkflow(req.getWorkflowId()); - response.onNext(WorkflowServicePb.DecideWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void pauseWorkflow( - WorkflowServicePb.PauseWorkflowRequest req, - StreamObserver response) { - try { - workflowService.pauseWorkflow(req.getWorkflowId()); - response.onNext(WorkflowServicePb.PauseWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void resumeWorkflow( - WorkflowServicePb.ResumeWorkflowRequest req, - StreamObserver response) { - try { - workflowService.resumeWorkflow(req.getWorkflowId()); - response.onNext(WorkflowServicePb.ResumeWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void skipTaskFromWorkflow( - WorkflowServicePb.SkipTaskRequest req, - StreamObserver response) { - try { - SkipTaskRequest skipTask = PROTO_MAPPER.fromProto(req.getRequest()); - - workflowService.skipTaskFromWorkflow( - req.getWorkflowId(), req.getTaskReferenceName(), skipTask); - response.onNext(WorkflowServicePb.SkipTaskResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void rerunWorkflow( - RerunWorkflowRequestPb.RerunWorkflowRequest req, - StreamObserver response) { - try { - String id = - workflowService.rerunWorkflow( - req.getReRunFromWorkflowId(), PROTO_MAPPER.fromProto(req)); - response.onNext( - WorkflowServicePb.RerunWorkflowResponse.newBuilder().setWorkflowId(id).build()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void restartWorkflow( - WorkflowServicePb.RestartWorkflowRequest req, - StreamObserver response) { - try { - workflowService.restartWorkflow(req.getWorkflowId(), req.getUseLatestDefinitions()); - response.onNext(WorkflowServicePb.RestartWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void retryWorkflow( - WorkflowServicePb.RetryWorkflowRequest req, - StreamObserver response) { - try { - workflowService.retryWorkflow(req.getWorkflowId(), req.getResumeSubworkflowTasks()); - response.onNext(WorkflowServicePb.RetryWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void resetWorkflowCallbacks( - WorkflowServicePb.ResetWorkflowCallbacksRequest req, - StreamObserver response) { - try { - workflowService.resetWorkflow(req.getWorkflowId()); - response.onNext(WorkflowServicePb.ResetWorkflowCallbacksResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - @Override - public void terminateWorkflow( - WorkflowServicePb.TerminateWorkflowRequest req, - StreamObserver response) { - try { - workflowService.terminateWorkflow(req.getWorkflowId(), req.getReason()); - response.onNext(WorkflowServicePb.TerminateWorkflowResponse.getDefaultInstance()); - response.onCompleted(); - } catch (Exception e) { - GRPC_HELPER.onError(response, e); - } - } - - private void doSearch( - boolean searchByTask, - SearchPb.Request req, - StreamObserver response) { - final int start = req.getStart(); - final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); - final List sort = convertSort(req.getSort()); - final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); - final String query = req.getQuery(); - - if (size > maxSearchSize) { - response.onError( - Status.INVALID_ARGUMENT - .withDescription( - "Cannot return more than " + maxSearchSize + " results") - .asRuntimeException()); - return; - } - - SearchResult search; - if (searchByTask) { - search = workflowService.searchWorkflowsByTasks(start, size, sort, freeText, query); - } else { - search = workflowService.searchWorkflows(start, size, sort, freeText, query); - } - - response.onNext( - WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() - .setTotalHits(search.getTotalHits()) - .addAllResults( - search.getResults().stream().map(PROTO_MAPPER::toProto)::iterator) - .build()); - response.onCompleted(); - } - - private void doSearchV2( - boolean searchByTask, - SearchPb.Request req, - StreamObserver response) { - final int start = req.getStart(); - final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); - final List sort = convertSort(req.getSort()); - final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); - final String query = req.getQuery(); - - if (size > maxSearchSize) { - response.onError( - Status.INVALID_ARGUMENT - .withDescription( - "Cannot return more than " + maxSearchSize + " results") - .asRuntimeException()); - return; - } - - SearchResult search; - if (searchByTask) { - search = workflowService.searchWorkflowsByTasksV2(start, size, sort, freeText, query); - } else { - search = workflowService.searchWorkflowsV2(start, size, sort, freeText, query); - } - - response.onNext( - WorkflowServicePb.WorkflowSearchResult.newBuilder() - .setTotalHits(search.getTotalHits()) - .addAllResults( - search.getResults().stream().map(PROTO_MAPPER::toProto)::iterator) - .build()); - response.onCompleted(); - } - - private List convertSort(String sortStr) { - List list = new ArrayList<>(); - if (sortStr != null && sortStr.length() != 0) { - list = Arrays.asList(sortStr.split("\\|")); - } - return list; - } - - @Override - public void search( - SearchPb.Request request, - StreamObserver responseObserver) { - doSearch(false, request, responseObserver); - } - - @Override - public void searchByTasks( - SearchPb.Request request, - StreamObserver responseObserver) { - doSearch(true, request, responseObserver); - } - - @Override - public void searchV2( - SearchPb.Request request, - StreamObserver responseObserver) { - doSearchV2(false, request, responseObserver); - } - - @Override - public void searchByTasksV2( - SearchPb.Request request, - StreamObserver responseObserver) { - doSearchV2(true, request, responseObserver); - } -} diff --git a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java b/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java deleted file mode 100644 index 88967b17f..000000000 --- a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -public class HealthServiceImplTest { - - // SBMTODO: Move this Spring boot health check - // @Rule - // public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); - // - // @Rule - // public ExpectedException thrown = ExpectedException.none(); - // - // @Test - // public void healthServing() throws Exception { - // // Generate a unique in-process server name. - // String serverName = InProcessServerBuilder.generateName(); - // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); - // CompletableFuture hcsf = mock(CompletableFuture.class); - // HealthCheckStatus hcs = mock(HealthCheckStatus.class); - // when(hcs.isHealthy()).thenReturn(true); - // when(hcsf.get()).thenReturn(hcs); - // when(hca.check()).thenReturn(hcsf); - // HealthServiceImpl healthyService = new HealthServiceImpl(hca); - // - // addService(serverName, healthyService); - // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( - // // Create a client channel and register for automatic graceful shutdown. - // - // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); - // - // - // HealthCheckResponse reply = - // blockingStub.check(HealthCheckRequest.newBuilder().build()); - // - // assertEquals(HealthCheckResponse.ServingStatus.SERVING, reply.getStatus()); - // } - // - // @Test - // public void healthNotServing() throws Exception { - // // Generate a unique in-process server name. - // String serverName = InProcessServerBuilder.generateName(); - // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); - // CompletableFuture hcsf = mock(CompletableFuture.class); - // HealthCheckStatus hcs = mock(HealthCheckStatus.class); - // when(hcs.isHealthy()).thenReturn(false); - // when(hcsf.get()).thenReturn(hcs); - // when(hca.check()).thenReturn(hcsf); - // HealthServiceImpl healthyService = new HealthServiceImpl(hca); - // - // addService(serverName, healthyService); - // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( - // // Create a client channel and register for automatic graceful shutdown. - // - // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); - // - // - // HealthCheckResponse reply = - // blockingStub.check(HealthCheckRequest.newBuilder().build()); - // - // assertEquals(HealthCheckResponse.ServingStatus.NOT_SERVING, reply.getStatus()); - // } - // - // @Test - // public void healthException() throws Exception { - // // Generate a unique in-process server name. - // String serverName = InProcessServerBuilder.generateName(); - // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); - // CompletableFuture hcsf = mock(CompletableFuture.class); - // when(hcsf.get()).thenThrow(InterruptedException.class); - // when(hca.check()).thenReturn(hcsf); - // HealthServiceImpl healthyService = new HealthServiceImpl(hca); - // - // addService(serverName, healthyService); - // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( - // // Create a client channel and register for automatic graceful shutdown. - // - // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); - // - // thrown.expect(StatusRuntimeException.class); - // thrown.expect(hasProperty("status", is(Status.INTERNAL))); - // blockingStub.check(HealthCheckRequest.newBuilder().build()); - // - // } - // - // private void addService(String name, BindableService service) throws Exception { - // // Create a server, add service, start, and register for automatic graceful shutdown. - // grpcCleanup.register(InProcessServerBuilder - // .forName(name).directExecutor().addService(service).build().start()); - // } -} diff --git a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/TaskServiceImplTest.java b/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/TaskServiceImplTest.java deleted file mode 100644 index e5e7aa4eb..000000000 --- a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/TaskServiceImplTest.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.TaskServicePb; -import com.netflix.conductor.proto.TaskPb; -import com.netflix.conductor.proto.TaskSummaryPb; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.TaskService; - -import io.grpc.stub.StreamObserver; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.*; -import static org.mockito.MockitoAnnotations.initMocks; - -public class TaskServiceImplTest { - - @Mock private TaskService taskService; - - @Mock private ExecutionService executionService; - - private TaskServiceImpl taskServiceImpl; - - @Before - public void init() { - initMocks(this); - taskServiceImpl = new TaskServiceImpl(executionService, taskService, 5000); - } - - @Test - public void searchExceptionTest() throws InterruptedException { - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference throwable = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(50000) - .setSort("strings") - .setQuery("") - .setFreeText("*") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(TaskServicePb.TaskSummarySearchResult value) {} - - @Override - public void onError(Throwable t) { - throwable.set(t); - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - taskServiceImpl.search(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - assertEquals( - "INVALID_ARGUMENT: Cannot return more than 5000 results", - throwable.get().getMessage()); - } - - @Test - public void searchV2ExceptionTest() throws InterruptedException { - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference throwable = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(50000) - .setSort("strings") - .setQuery("") - .setFreeText("*") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(TaskServicePb.TaskSearchResult value) {} - - @Override - public void onError(Throwable t) { - throwable.set(t); - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - taskServiceImpl.searchV2(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - assertEquals( - "INVALID_ARGUMENT: Cannot return more than 5000 results", - throwable.get().getMessage()); - } - - @Test - public void searchTest() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("*") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(TaskServicePb.TaskSummarySearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - TaskSummary taskSummary = new TaskSummary(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(taskSummary)); - - when(taskService.search(1, 1, "strings", "*", "")).thenReturn(searchResult); - - taskServiceImpl.search(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - TaskServicePb.TaskSummarySearchResult taskSummarySearchResult = result.get(); - - assertEquals(1, taskSummarySearchResult.getTotalHits()); - assertEquals( - TaskSummaryPb.TaskSummary.newBuilder().build(), - taskSummarySearchResult.getResultsList().get(0)); - } - - @Test - public void searchV2Test() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("*") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(TaskServicePb.TaskSearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - Task task = new Task(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(task)); - - when(taskService.searchV2(1, 1, "strings", "*", "")).thenReturn(searchResult); - - taskServiceImpl.searchV2(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - TaskServicePb.TaskSearchResult taskSearchResult = result.get(); - - assertEquals(1, taskSearchResult.getTotalHits()); - assertEquals( - TaskPb.Task.newBuilder().setCallbackFromWorker(true).build(), - taskSearchResult.getResultsList().get(0)); - } -} diff --git a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImplTest.java b/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImplTest.java deleted file mode 100644 index 17417d029..000000000 --- a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImplTest.java +++ /dev/null @@ -1,365 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc.server.service; - -import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.grpc.SearchPb; -import com.netflix.conductor.grpc.WorkflowServicePb; -import com.netflix.conductor.proto.WorkflowPb; -import com.netflix.conductor.proto.WorkflowSummaryPb; -import com.netflix.conductor.service.WorkflowService; - -import io.grpc.stub.StreamObserver; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.*; -import static org.mockito.MockitoAnnotations.initMocks; - -public class WorkflowServiceImplTest { - - private static final String WORKFLOW_ID = "anyWorkflowId"; - private static final Boolean RESUME_SUBWORKFLOW_TASKS = true; - - @Mock private WorkflowService workflowService; - - private WorkflowServiceImpl workflowServiceImpl; - - @Before - public void init() { - initMocks(this); - workflowServiceImpl = new WorkflowServiceImpl(workflowService, 5000); - } - - @SuppressWarnings("unchecked") - @Test - public void givenWorkflowIdWhenRetryWorkflowThenRetriedSuccessfully() { - // Given - WorkflowServicePb.RetryWorkflowRequest req = - WorkflowServicePb.RetryWorkflowRequest.newBuilder() - .setWorkflowId(WORKFLOW_ID) - .setResumeSubworkflowTasks(true) - .build(); - // When - workflowServiceImpl.retryWorkflow(req, mock(StreamObserver.class)); - // Then - verify(workflowService).retryWorkflow(WORKFLOW_ID, RESUME_SUBWORKFLOW_TASKS); - } - - @Test - public void searchExceptionTest() throws InterruptedException { - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference throwable = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(50000) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) {} - - @Override - public void onError(Throwable t) { - throwable.set(t); - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - workflowServiceImpl.search(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - assertEquals( - "INVALID_ARGUMENT: Cannot return more than 5000 results", - throwable.get().getMessage()); - } - - @Test - public void searchV2ExceptionTest() throws InterruptedException { - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference throwable = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(50000) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSearchResult value) {} - - @Override - public void onError(Throwable t) { - throwable.set(t); - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - workflowServiceImpl.searchV2(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - assertEquals( - "INVALID_ARGUMENT: Cannot return more than 5000 results", - throwable.get().getMessage()); - } - - @Test - public void searchTest() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = - new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - WorkflowSummary workflow = new WorkflowSummary(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(workflow)); - - when(workflowService.searchWorkflows( - anyInt(), anyInt(), anyList(), anyString(), anyString())) - .thenReturn(searchResult); - - workflowServiceImpl.search(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - WorkflowServicePb.WorkflowSummarySearchResult workflowSearchResult = result.get(); - - assertEquals(1, workflowSearchResult.getTotalHits()); - assertEquals( - WorkflowSummaryPb.WorkflowSummary.newBuilder().build(), - workflowSearchResult.getResultsList().get(0)); - } - - @Test - public void searchByTasksTest() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = - new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - WorkflowSummary workflow = new WorkflowSummary(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(workflow)); - - when(workflowService.searchWorkflowsByTasks( - anyInt(), anyInt(), anyList(), anyString(), anyString())) - .thenReturn(searchResult); - - workflowServiceImpl.searchByTasks(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - WorkflowServicePb.WorkflowSummarySearchResult workflowSearchResult = result.get(); - - assertEquals(1, workflowSearchResult.getTotalHits()); - assertEquals( - WorkflowSummaryPb.WorkflowSummary.newBuilder().build(), - workflowSearchResult.getResultsList().get(0)); - } - - @Test - public void searchV2Test() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - Workflow workflow = new Workflow(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(workflow)); - - when(workflowService.searchWorkflowsV2(1, 1, Collections.singletonList("strings"), "*", "")) - .thenReturn(searchResult); - - workflowServiceImpl.searchV2(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - WorkflowServicePb.WorkflowSearchResult workflowSearchResult = result.get(); - - assertEquals(1, workflowSearchResult.getTotalHits()); - assertEquals( - WorkflowPb.Workflow.newBuilder().build(), - workflowSearchResult.getResultsList().get(0)); - } - - @Test - public void searchByTasksV2Test() throws InterruptedException { - - CountDownLatch streamAlive = new CountDownLatch(1); - AtomicReference result = new AtomicReference<>(); - - SearchPb.Request req = - SearchPb.Request.newBuilder() - .setStart(1) - .setSize(1) - .setSort("strings") - .setQuery("") - .setFreeText("") - .build(); - - StreamObserver streamObserver = - new StreamObserver<>() { - @Override - public void onNext(WorkflowServicePb.WorkflowSearchResult value) { - result.set(value); - } - - @Override - public void onError(Throwable t) { - streamAlive.countDown(); - } - - @Override - public void onCompleted() { - streamAlive.countDown(); - } - }; - - Workflow workflow = new Workflow(); - SearchResult searchResult = new SearchResult<>(); - searchResult.setTotalHits(1); - searchResult.setResults(Collections.singletonList(workflow)); - - when(workflowService.searchWorkflowsByTasksV2( - 1, 1, Collections.singletonList("strings"), "*", "")) - .thenReturn(searchResult); - - workflowServiceImpl.searchByTasksV2(req, streamObserver); - - streamAlive.await(10, TimeUnit.MILLISECONDS); - - WorkflowServicePb.WorkflowSearchResult workflowSearchResult = result.get(); - - assertEquals(1, workflowSearchResult.getTotalHits()); - assertEquals( - WorkflowPb.Workflow.newBuilder().build(), - workflowSearchResult.getResultsList().get(0)); - } -} diff --git a/grpc-server/src/test/resources/log4j.properties b/grpc-server/src/test/resources/log4j.properties deleted file mode 100644 index a0818a9a7..000000000 --- a/grpc-server/src/test/resources/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright 2019 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set root logger level to WARN and its only appender to A1. -log4j.rootLogger=WARN, A1 - -# A1 is set to be a ConsoleAppender. -log4j.appender.A1=org.apache.log4j.ConsoleAppender - -# A1 uses PatternLayout. -log4j.appender.A1.layout=org.apache.log4j.PatternLayout -log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n \ No newline at end of file diff --git a/grpc/build.gradle b/grpc/build.gradle deleted file mode 100644 index 6d90ffaac..000000000 --- a/grpc/build.gradle +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -buildscript { - dependencies { - classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.18' - } -} - -plugins { - id 'java' - id 'idea' - id "com.google.protobuf" version "0.8.18" -} - -repositories{ - maven { url "https://dl.bintray.com/chaos-systems/mvn" } -} - -dependencies { - implementation project(':conductor-common') - - implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - implementation "io.grpc:grpc-protobuf:${revGrpc}" - implementation "io.grpc:grpc-stub:${revGrpc}" - implementation "javax.annotation:javax.annotation-api:1.3.2" -} - -protobuf { - protoc { - artifact = "com.google.protobuf:protoc:${revProtoBuf}" - } - plugins { - grpc { - artifact = "io.grpc:protoc-gen-grpc-java:${revGrpc}" - } - } - generateProtoTasks { - processResources.dependsOn extractProto - all()*.plugins { - grpc {} - } - } -} - -idea { - module { - sourceDirs += file("${projectDir}/build/generated/source/proto/main/java"); - sourceDirs += file("${projectDir}/build/generated/source/proto/main/grpc"); - } -} - -compileJava.dependsOn(tasks.getByPath(':conductor-common:protogen')) diff --git a/grpc/dependencies.lock b/grpc/dependencies.lock deleted file mode 100644 index e7b3a15ff..000000000 --- a/grpc/dependencies.lock +++ /dev/null @@ -1,448 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "compileProtoPath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - } - }, - "protobufToolsLocator_grpc": { - "io.grpc:protoc-gen-grpc-java": { - "locked": "1.47.0" - } - }, - "protobufToolsLocator_protoc": { - "com.google.protobuf:protoc": { - "locked": "3.13.0" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testCompileProtoPath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "javax.annotation:javax.annotation-api": { - "locked": "1.3.2" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java deleted file mode 100644 index 4a6fbfff3..000000000 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ /dev/null @@ -1,1383 +0,0 @@ -package com.netflix.conductor.grpc; - -import com.google.protobuf.Any; -import com.google.protobuf.Value; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; -import com.netflix.conductor.proto.DynamicForkJoinTaskPb; -import com.netflix.conductor.proto.EventExecutionPb; -import com.netflix.conductor.proto.EventHandlerPb; -import com.netflix.conductor.proto.PollDataPb; -import com.netflix.conductor.proto.RerunWorkflowRequestPb; -import com.netflix.conductor.proto.SkipTaskRequestPb; -import com.netflix.conductor.proto.StartWorkflowRequestPb; -import com.netflix.conductor.proto.SubWorkflowParamsPb; -import com.netflix.conductor.proto.TaskDefPb; -import com.netflix.conductor.proto.TaskExecLogPb; -import com.netflix.conductor.proto.TaskPb; -import com.netflix.conductor.proto.TaskResultPb; -import com.netflix.conductor.proto.TaskSummaryPb; -import com.netflix.conductor.proto.WorkflowDefPb; -import com.netflix.conductor.proto.WorkflowPb; -import com.netflix.conductor.proto.WorkflowSummaryPb; -import com.netflix.conductor.proto.WorkflowTaskPb; -import java.lang.IllegalArgumentException; -import java.lang.Object; -import java.lang.String; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import javax.annotation.Generated; - -@Generated("com.netflix.conductor.annotationsprocessor.protogen") -public abstract class AbstractProtoMapper { - public DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { - DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); - if (from.getTaskName() != null) { - to.setTaskName( from.getTaskName() ); - } - if (from.getWorkflowName() != null) { - to.setWorkflowName( from.getWorkflowName() ); - } - if (from.getReferenceName() != null) { - to.setReferenceName( from.getReferenceName() ); - } - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getType() != null) { - to.setType( from.getType() ); - } - return to.build(); - } - - public DynamicForkJoinTask fromProto(DynamicForkJoinTaskPb.DynamicForkJoinTask from) { - DynamicForkJoinTask to = new DynamicForkJoinTask(); - to.setTaskName( from.getTaskName() ); - to.setWorkflowName( from.getWorkflowName() ); - to.setReferenceName( from.getReferenceName() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - to.setType( from.getType() ); - return to; - } - - public DynamicForkJoinTaskListPb.DynamicForkJoinTaskList toProto(DynamicForkJoinTaskList from) { - DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.Builder to = DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.newBuilder(); - for (DynamicForkJoinTask elem : from.getDynamicTasks()) { - to.addDynamicTasks( toProto(elem) ); - } - return to.build(); - } - - public DynamicForkJoinTaskList fromProto( - DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) { - DynamicForkJoinTaskList to = new DynamicForkJoinTaskList(); - to.setDynamicTasks( from.getDynamicTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - return to; - } - - public EventExecutionPb.EventExecution toProto(EventExecution from) { - EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); - if (from.getId() != null) { - to.setId( from.getId() ); - } - if (from.getMessageId() != null) { - to.setMessageId( from.getMessageId() ); - } - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getEvent() != null) { - to.setEvent( from.getEvent() ); - } - to.setCreated( from.getCreated() ); - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - if (from.getAction() != null) { - to.setAction( toProto( from.getAction() ) ); - } - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public EventExecution fromProto(EventExecutionPb.EventExecution from) { - EventExecution to = new EventExecution(); - to.setId( from.getId() ); - to.setMessageId( from.getMessageId() ); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCreated( from.getCreated() ); - to.setStatus( fromProto( from.getStatus() ) ); - to.setAction( fromProto( from.getAction() ) ); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - return to; - } - - public EventExecutionPb.EventExecution.Status toProto(EventExecution.Status from) { - EventExecutionPb.EventExecution.Status to; - switch (from) { - case IN_PROGRESS: to = EventExecutionPb.EventExecution.Status.IN_PROGRESS; break; - case COMPLETED: to = EventExecutionPb.EventExecution.Status.COMPLETED; break; - case FAILED: to = EventExecutionPb.EventExecution.Status.FAILED; break; - case SKIPPED: to = EventExecutionPb.EventExecution.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status from) { - EventExecution.Status to; - switch (from) { - case IN_PROGRESS: to = EventExecution.Status.IN_PROGRESS; break; - case COMPLETED: to = EventExecution.Status.COMPLETED; break; - case FAILED: to = EventExecution.Status.FAILED; break; - case SKIPPED: to = EventExecution.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public EventHandlerPb.EventHandler toProto(EventHandler from) { - EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getEvent() != null) { - to.setEvent( from.getEvent() ); - } - if (from.getCondition() != null) { - to.setCondition( from.getCondition() ); - } - for (EventHandler.Action elem : from.getActions()) { - to.addActions( toProto(elem) ); - } - to.setActive( from.isActive() ); - if (from.getEvaluatorType() != null) { - to.setEvaluatorType( from.getEvaluatorType() ); - } - return to.build(); - } - - public EventHandler fromProto(EventHandlerPb.EventHandler from) { - EventHandler to = new EventHandler(); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCondition( from.getCondition() ); - to.setActions( from.getActionsList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setActive( from.getActive() ); - to.setEvaluatorType( from.getEvaluatorType() ); - return to; - } - - public EventHandlerPb.EventHandler.StartWorkflow toProto(EventHandler.StartWorkflow from) { - EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getVersion() != null) { - to.setVersion( from.getVersion() ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getInputMessage() != null) { - to.setInputMessage( toProto( from.getInputMessage() ) ); - } - to.putAllTaskToDomain( from.getTaskToDomain() ); - return to.build(); - } - - public EventHandler.StartWorkflow fromProto(EventHandlerPb.EventHandler.StartWorkflow from) { - EventHandler.StartWorkflow to = new EventHandler.StartWorkflow(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - if (from.hasInputMessage()) { - to.setInputMessage( fromProto( from.getInputMessage() ) ); - } - to.setTaskToDomain( from.getTaskToDomainMap() ); - return to; - } - - public EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { - EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); - if (from.getWorkflowId() != null) { - to.setWorkflowId( from.getWorkflowId() ); - } - if (from.getTaskRefName() != null) { - to.setTaskRefName( from.getTaskRefName() ); - } - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getOutputMessage() != null) { - to.setOutputMessage( toProto( from.getOutputMessage() ) ); - } - if (from.getTaskId() != null) { - to.setTaskId( from.getTaskId() ); - } - return to.build(); - } - - public EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetails from) { - EventHandler.TaskDetails to = new EventHandler.TaskDetails(); - to.setWorkflowId( from.getWorkflowId() ); - to.setTaskRefName( from.getTaskRefName() ); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - if (from.hasOutputMessage()) { - to.setOutputMessage( fromProto( from.getOutputMessage() ) ); - } - to.setTaskId( from.getTaskId() ); - return to; - } - - public EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { - EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); - if (from.getAction() != null) { - to.setAction( toProto( from.getAction() ) ); - } - if (from.getStart_workflow() != null) { - to.setStartWorkflow( toProto( from.getStart_workflow() ) ); - } - if (from.getComplete_task() != null) { - to.setCompleteTask( toProto( from.getComplete_task() ) ); - } - if (from.getFail_task() != null) { - to.setFailTask( toProto( from.getFail_task() ) ); - } - to.setExpandInlineJson( from.isExpandInlineJSON() ); - return to.build(); - } - - public EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { - EventHandler.Action to = new EventHandler.Action(); - to.setAction( fromProto( from.getAction() ) ); - if (from.hasStartWorkflow()) { - to.setStart_workflow( fromProto( from.getStartWorkflow() ) ); - } - if (from.hasCompleteTask()) { - to.setComplete_task( fromProto( from.getCompleteTask() ) ); - } - if (from.hasFailTask()) { - to.setFail_task( fromProto( from.getFailTask() ) ); - } - to.setExpandInlineJSON( from.getExpandInlineJson() ); - return to; - } - - public EventHandlerPb.EventHandler.Action.Type toProto(EventHandler.Action.Type from) { - EventHandlerPb.EventHandler.Action.Type to; - switch (from) { - case start_workflow: to = EventHandlerPb.EventHandler.Action.Type.START_WORKFLOW; break; - case complete_task: to = EventHandlerPb.EventHandler.Action.Type.COMPLETE_TASK; break; - case fail_task: to = EventHandlerPb.EventHandler.Action.Type.FAIL_TASK; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Type from) { - EventHandler.Action.Type to; - switch (from) { - case START_WORKFLOW: to = EventHandler.Action.Type.start_workflow; break; - case COMPLETE_TASK: to = EventHandler.Action.Type.complete_task; break; - case FAIL_TASK: to = EventHandler.Action.Type.fail_task; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public PollDataPb.PollData toProto(PollData from) { - PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); - if (from.getQueueName() != null) { - to.setQueueName( from.getQueueName() ); - } - if (from.getDomain() != null) { - to.setDomain( from.getDomain() ); - } - if (from.getWorkerId() != null) { - to.setWorkerId( from.getWorkerId() ); - } - to.setLastPollTime( from.getLastPollTime() ); - return to.build(); - } - - public PollData fromProto(PollDataPb.PollData from) { - PollData to = new PollData(); - to.setQueueName( from.getQueueName() ); - to.setDomain( from.getDomain() ); - to.setWorkerId( from.getWorkerId() ); - to.setLastPollTime( from.getLastPollTime() ); - return to; - } - - public RerunWorkflowRequestPb.RerunWorkflowRequest toProto(RerunWorkflowRequest from) { - RerunWorkflowRequestPb.RerunWorkflowRequest.Builder to = RerunWorkflowRequestPb.RerunWorkflowRequest.newBuilder(); - if (from.getReRunFromWorkflowId() != null) { - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - } - for (Map.Entry pair : from.getWorkflowInput().entrySet()) { - to.putWorkflowInput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getReRunFromTaskId() != null) { - to.setReRunFromTaskId( from.getReRunFromTaskId() ); - } - for (Map.Entry pair : from.getTaskInput().entrySet()) { - to.putTaskInput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - return to.build(); - } - - public RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { - RerunWorkflowRequest to = new RerunWorkflowRequest(); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - Map workflowInputMap = new HashMap(); - for (Map.Entry pair : from.getWorkflowInputMap().entrySet()) { - workflowInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setWorkflowInput(workflowInputMap); - to.setReRunFromTaskId( from.getReRunFromTaskId() ); - Map taskInputMap = new HashMap(); - for (Map.Entry pair : from.getTaskInputMap().entrySet()) { - taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskInput(taskInputMap); - to.setCorrelationId( from.getCorrelationId() ); - return to; - } - - public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { - SkipTaskRequest to = new SkipTaskRequest(); - Map taskInputMap = new HashMap(); - for (Map.Entry pair : from.getTaskInputMap().entrySet()) { - taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskInput(taskInputMap); - Map taskOutputMap = new HashMap(); - for (Map.Entry pair : from.getTaskOutputMap().entrySet()) { - taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskOutput(taskOutputMap); - if (from.hasTaskInputMessage()) { - to.setTaskInputMessage( fromProto( from.getTaskInputMessage() ) ); - } - if (from.hasTaskOutputMessage()) { - to.setTaskOutputMessage( fromProto( from.getTaskOutputMessage() ) ); - } - return to; - } - - public StartWorkflowRequestPb.StartWorkflowRequest toProto(StartWorkflowRequest from) { - StartWorkflowRequestPb.StartWorkflowRequest.Builder to = StartWorkflowRequestPb.StartWorkflowRequest.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getVersion() != null) { - to.setVersion( from.getVersion() ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - to.putAllTaskToDomain( from.getTaskToDomain() ); - if (from.getWorkflowDef() != null) { - to.setWorkflowDef( toProto( from.getWorkflowDef() ) ); - } - if (from.getExternalInputPayloadStoragePath() != null) { - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - } - if (from.getPriority() != null) { - to.setPriority( from.getPriority() ); - } - return to.build(); - } - - public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { - StartWorkflowRequest to = new StartWorkflowRequest(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - to.setTaskToDomain( from.getTaskToDomainMap() ); - if (from.hasWorkflowDef()) { - to.setWorkflowDef( fromProto( from.getWorkflowDef() ) ); - } - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - to.setPriority( from.getPriority() ); - return to; - } - - public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { - SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getVersion() != null) { - to.setVersion( from.getVersion() ); - } - to.putAllTaskToDomain( from.getTaskToDomain() ); - if (from.getWorkflowDefinition() != null) { - to.setWorkflowDefinition( toProto( from.getWorkflowDefinition() ) ); - } - return to.build(); - } - - public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { - SubWorkflowParams to = new SubWorkflowParams(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setTaskToDomain( from.getTaskToDomainMap() ); - if (from.hasWorkflowDefinition()) { - to.setWorkflowDefinition( fromProto( from.getWorkflowDefinition() ) ); - } - return to; - } - - public TaskPb.Task toProto(Task from) { - TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); - if (from.getTaskType() != null) { - to.setTaskType( from.getTaskType() ); - } - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - for (Map.Entry pair : from.getInputData().entrySet()) { - to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getReferenceTaskName() != null) { - to.setReferenceTaskName( from.getReferenceTaskName() ); - } - to.setRetryCount( from.getRetryCount() ); - to.setSeq( from.getSeq() ); - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - to.setPollCount( from.getPollCount() ); - if (from.getTaskDefName() != null) { - to.setTaskDefName( from.getTaskDefName() ); - } - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setEndTime( from.getEndTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); - if (from.getRetriedTaskId() != null) { - to.setRetriedTaskId( from.getRetriedTaskId() ); - } - to.setRetried( from.isRetried() ); - to.setExecuted( from.isExecuted() ); - to.setCallbackFromWorker( from.isCallbackFromWorker() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - if (from.getWorkflowInstanceId() != null) { - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - } - if (from.getWorkflowType() != null) { - to.setWorkflowType( from.getWorkflowType() ); - } - if (from.getTaskId() != null) { - to.setTaskId( from.getTaskId() ); - } - if (from.getReasonForIncompletion() != null) { - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - } - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - if (from.getWorkerId() != null) { - to.setWorkerId( from.getWorkerId() ); - } - for (Map.Entry pair : from.getOutputData().entrySet()) { - to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getWorkflowTask() != null) { - to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); - } - if (from.getDomain() != null) { - to.setDomain( from.getDomain() ); - } - if (from.getInputMessage() != null) { - to.setInputMessage( toProto( from.getInputMessage() ) ); - } - if (from.getOutputMessage() != null) { - to.setOutputMessage( toProto( from.getOutputMessage() ) ); - } - to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); - to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); - if (from.getExternalInputPayloadStoragePath() != null) { - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - } - if (from.getExternalOutputPayloadStoragePath() != null) { - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - } - to.setWorkflowPriority( from.getWorkflowPriority() ); - if (from.getExecutionNameSpace() != null) { - to.setExecutionNameSpace( from.getExecutionNameSpace() ); - } - if (from.getIsolationGroupId() != null) { - to.setIsolationGroupId( from.getIsolationGroupId() ); - } - to.setIteration( from.getIteration() ); - if (from.getSubWorkflowId() != null) { - to.setSubWorkflowId( from.getSubWorkflowId() ); - } - to.setSubworkflowChanged( from.isSubworkflowChanged() ); - return to.build(); - } - - public Task fromProto(TaskPb.Task from) { - Task to = new Task(); - to.setTaskType( from.getTaskType() ); - to.setStatus( fromProto( from.getStatus() ) ); - Map inputDataMap = new HashMap(); - for (Map.Entry pair : from.getInputDataMap().entrySet()) { - inputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputData(inputDataMap); - to.setReferenceTaskName( from.getReferenceTaskName() ); - to.setRetryCount( from.getRetryCount() ); - to.setSeq( from.getSeq() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setPollCount( from.getPollCount() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setEndTime( from.getEndTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); - to.setRetriedTaskId( from.getRetriedTaskId() ); - to.setRetried( from.getRetried() ); - to.setExecuted( from.getExecuted() ); - to.setCallbackFromWorker( from.getCallbackFromWorker() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - Map outputDataMap = new HashMap(); - for (Map.Entry pair : from.getOutputDataMap().entrySet()) { - outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputData(outputDataMap); - if (from.hasWorkflowTask()) { - to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); - } - to.setDomain( from.getDomain() ); - if (from.hasInputMessage()) { - to.setInputMessage( fromProto( from.getInputMessage() ) ); - } - if (from.hasOutputMessage()) { - to.setOutputMessage( fromProto( from.getOutputMessage() ) ); - } - to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); - to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - to.setWorkflowPriority( from.getWorkflowPriority() ); - to.setExecutionNameSpace( from.getExecutionNameSpace() ); - to.setIsolationGroupId( from.getIsolationGroupId() ); - to.setIteration( from.getIteration() ); - to.setSubWorkflowId( from.getSubWorkflowId() ); - to.setSubworkflowChanged( from.getSubworkflowChanged() ); - return to; - } - - public TaskPb.Task.Status toProto(Task.Status from) { - TaskPb.Task.Status to; - switch (from) { - case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; - case CANCELED: to = TaskPb.Task.Status.CANCELED; break; - case FAILED: to = TaskPb.Task.Status.FAILED; break; - case FAILED_WITH_TERMINAL_ERROR: to = TaskPb.Task.Status.FAILED_WITH_TERMINAL_ERROR; break; - case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; - case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; - case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; - case TIMED_OUT: to = TaskPb.Task.Status.TIMED_OUT; break; - case SKIPPED: to = TaskPb.Task.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public Task.Status fromProto(TaskPb.Task.Status from) { - Task.Status to; - switch (from) { - case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; - case CANCELED: to = Task.Status.CANCELED; break; - case FAILED: to = Task.Status.FAILED; break; - case FAILED_WITH_TERMINAL_ERROR: to = Task.Status.FAILED_WITH_TERMINAL_ERROR; break; - case COMPLETED: to = Task.Status.COMPLETED; break; - case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; - case SCHEDULED: to = Task.Status.SCHEDULED; break; - case TIMED_OUT: to = Task.Status.TIMED_OUT; break; - case SKIPPED: to = Task.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskDefPb.TaskDef toProto(TaskDef from) { - TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getDescription() != null) { - to.setDescription( from.getDescription() ); - } - to.setRetryCount( from.getRetryCount() ); - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - to.addAllInputKeys( from.getInputKeys() ); - to.addAllOutputKeys( from.getOutputKeys() ); - if (from.getTimeoutPolicy() != null) { - to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); - } - if (from.getRetryLogic() != null) { - to.setRetryLogic( toProto( from.getRetryLogic() ) ); - } - to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - if (from.getConcurrentExecLimit() != null) { - to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); - } - for (Map.Entry pair : from.getInputTemplate().entrySet()) { - to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getRateLimitPerFrequency() != null) { - to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); - } - if (from.getRateLimitFrequencyInSeconds() != null) { - to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); - } - if (from.getIsolationGroupId() != null) { - to.setIsolationGroupId( from.getIsolationGroupId() ); - } - if (from.getExecutionNameSpace() != null) { - to.setExecutionNameSpace( from.getExecutionNameSpace() ); - } - if (from.getOwnerEmail() != null) { - to.setOwnerEmail( from.getOwnerEmail() ); - } - if (from.getPollTimeoutSeconds() != null) { - to.setPollTimeoutSeconds( from.getPollTimeoutSeconds() ); - } - if (from.getBackoffScaleFactor() != null) { - to.setBackoffScaleFactor( from.getBackoffScaleFactor() ); - } - return to.build(); - } - - public TaskDef fromProto(TaskDefPb.TaskDef from) { - TaskDef to = new TaskDef(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setRetryCount( from.getRetryCount() ); - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - to.setInputKeys( from.getInputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setOutputKeys( from.getOutputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); - to.setRetryLogic( fromProto( from.getRetryLogic() ) ); - to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); - Map inputTemplateMap = new HashMap(); - for (Map.Entry pair : from.getInputTemplateMap().entrySet()) { - inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputTemplate(inputTemplateMap); - to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); - to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); - to.setIsolationGroupId( from.getIsolationGroupId() ); - to.setExecutionNameSpace( from.getExecutionNameSpace() ); - to.setOwnerEmail( from.getOwnerEmail() ); - to.setPollTimeoutSeconds( from.getPollTimeoutSeconds() ); - to.setBackoffScaleFactor( from.getBackoffScaleFactor() ); - return to; - } - - public TaskDefPb.TaskDef.RetryLogic toProto(TaskDef.RetryLogic from) { - TaskDefPb.TaskDef.RetryLogic to; - switch (from) { - case FIXED: to = TaskDefPb.TaskDef.RetryLogic.FIXED; break; - case EXPONENTIAL_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; - case LINEAR_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.LINEAR_BACKOFF; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskDef.RetryLogic fromProto(TaskDefPb.TaskDef.RetryLogic from) { - TaskDef.RetryLogic to; - switch (from) { - case FIXED: to = TaskDef.RetryLogic.FIXED; break; - case EXPONENTIAL_BACKOFF: to = TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; - case LINEAR_BACKOFF: to = TaskDef.RetryLogic.LINEAR_BACKOFF; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskDefPb.TaskDef.TimeoutPolicy toProto(TaskDef.TimeoutPolicy from) { - TaskDefPb.TaskDef.TimeoutPolicy to; - switch (from) { - case RETRY: to = TaskDefPb.TaskDef.TimeoutPolicy.RETRY; break; - case TIME_OUT_WF: to = TaskDefPb.TaskDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = TaskDefPb.TaskDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { - TaskDef.TimeoutPolicy to; - switch (from) { - case RETRY: to = TaskDef.TimeoutPolicy.RETRY; break; - case TIME_OUT_WF: to = TaskDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = TaskDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { - TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); - if (from.getLog() != null) { - to.setLog( from.getLog() ); - } - if (from.getTaskId() != null) { - to.setTaskId( from.getTaskId() ); - } - to.setCreatedTime( from.getCreatedTime() ); - return to.build(); - } - - public TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { - TaskExecLog to = new TaskExecLog(); - to.setLog( from.getLog() ); - to.setTaskId( from.getTaskId() ); - to.setCreatedTime( from.getCreatedTime() ); - return to; - } - - public TaskResultPb.TaskResult toProto(TaskResult from) { - TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); - if (from.getWorkflowInstanceId() != null) { - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - } - if (from.getTaskId() != null) { - to.setTaskId( from.getTaskId() ); - } - if (from.getReasonForIncompletion() != null) { - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - } - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - if (from.getWorkerId() != null) { - to.setWorkerId( from.getWorkerId() ); - } - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - for (Map.Entry pair : from.getOutputData().entrySet()) { - to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getOutputMessage() != null) { - to.setOutputMessage( toProto( from.getOutputMessage() ) ); - } - return to.build(); - } - - public TaskResult fromProto(TaskResultPb.TaskResult from) { - TaskResult to = new TaskResult(); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - to.setStatus( fromProto( from.getStatus() ) ); - Map outputDataMap = new HashMap(); - for (Map.Entry pair : from.getOutputDataMap().entrySet()) { - outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputData(outputDataMap); - if (from.hasOutputMessage()) { - to.setOutputMessage( fromProto( from.getOutputMessage() ) ); - } - return to; - } - - public TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { - TaskResultPb.TaskResult.Status to; - switch (from) { - case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; - case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; - case FAILED_WITH_TERMINAL_ERROR: to = TaskResultPb.TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; - case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { - TaskResult.Status to; - switch (from) { - case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; - case FAILED: to = TaskResult.Status.FAILED; break; - case FAILED_WITH_TERMINAL_ERROR: to = TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; - case COMPLETED: to = TaskResult.Status.COMPLETED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public TaskSummaryPb.TaskSummary toProto(TaskSummary from) { - TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); - if (from.getWorkflowId() != null) { - to.setWorkflowId( from.getWorkflowId() ); - } - if (from.getWorkflowType() != null) { - to.setWorkflowType( from.getWorkflowType() ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - if (from.getScheduledTime() != null) { - to.setScheduledTime( from.getScheduledTime() ); - } - if (from.getStartTime() != null) { - to.setStartTime( from.getStartTime() ); - } - if (from.getUpdateTime() != null) { - to.setUpdateTime( from.getUpdateTime() ); - } - if (from.getEndTime() != null) { - to.setEndTime( from.getEndTime() ); - } - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - if (from.getReasonForIncompletion() != null) { - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - } - to.setExecutionTime( from.getExecutionTime() ); - to.setQueueWaitTime( from.getQueueWaitTime() ); - if (from.getTaskDefName() != null) { - to.setTaskDefName( from.getTaskDefName() ); - } - if (from.getTaskType() != null) { - to.setTaskType( from.getTaskType() ); - } - if (from.getInput() != null) { - to.setInput( from.getInput() ); - } - if (from.getOutput() != null) { - to.setOutput( from.getOutput() ); - } - if (from.getTaskId() != null) { - to.setTaskId( from.getTaskId() ); - } - if (from.getExternalInputPayloadStoragePath() != null) { - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - } - if (from.getExternalOutputPayloadStoragePath() != null) { - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - } - to.setWorkflowPriority( from.getWorkflowPriority() ); - return to.build(); - } - - public TaskSummary fromProto(TaskSummaryPb.TaskSummary from) { - TaskSummary to = new TaskSummary(); - to.setWorkflowId( from.getWorkflowId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( fromProto( from.getStatus() ) ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setExecutionTime( from.getExecutionTime() ); - to.setQueueWaitTime( from.getQueueWaitTime() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setTaskType( from.getTaskType() ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setTaskId( from.getTaskId() ); - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - to.setWorkflowPriority( from.getWorkflowPriority() ); - return to; - } - - public WorkflowPb.Workflow toProto(Workflow from) { - WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - to.setEndTime( from.getEndTime() ); - if (from.getWorkflowId() != null) { - to.setWorkflowId( from.getWorkflowId() ); - } - if (from.getParentWorkflowId() != null) { - to.setParentWorkflowId( from.getParentWorkflowId() ); - } - if (from.getParentWorkflowTaskId() != null) { - to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); - } - for (Task elem : from.getTasks()) { - to.addTasks( toProto(elem) ); - } - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - if (from.getReRunFromWorkflowId() != null) { - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - } - if (from.getReasonForIncompletion() != null) { - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - } - if (from.getEvent() != null) { - to.setEvent( from.getEvent() ); - } - to.putAllTaskToDomain( from.getTaskToDomain() ); - to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); - if (from.getWorkflowDefinition() != null) { - to.setWorkflowDefinition( toProto( from.getWorkflowDefinition() ) ); - } - if (from.getExternalInputPayloadStoragePath() != null) { - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - } - if (from.getExternalOutputPayloadStoragePath() != null) { - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - } - to.setPriority( from.getPriority() ); - for (Map.Entry pair : from.getVariables().entrySet()) { - to.putVariables( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setLastRetriedTime( from.getLastRetriedTime() ); - return to.build(); - } - - public Workflow fromProto(WorkflowPb.Workflow from) { - Workflow to = new Workflow(); - to.setStatus( fromProto( from.getStatus() ) ); - to.setEndTime( from.getEndTime() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setParentWorkflowId( from.getParentWorkflowId() ); - to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); - to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - to.setCorrelationId( from.getCorrelationId() ); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setEvent( from.getEvent() ); - to.setTaskToDomain( from.getTaskToDomainMap() ); - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); - if (from.hasWorkflowDefinition()) { - to.setWorkflowDefinition( fromProto( from.getWorkflowDefinition() ) ); - } - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - to.setPriority( from.getPriority() ); - Map variablesMap = new HashMap(); - for (Map.Entry pair : from.getVariablesMap().entrySet()) { - variablesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setVariables(variablesMap); - to.setLastRetriedTime( from.getLastRetriedTime() ); - return to; - } - - public WorkflowPb.Workflow.WorkflowStatus toProto(Workflow.WorkflowStatus from) { - WorkflowPb.Workflow.WorkflowStatus to; - switch (from) { - case RUNNING: to = WorkflowPb.Workflow.WorkflowStatus.RUNNING; break; - case COMPLETED: to = WorkflowPb.Workflow.WorkflowStatus.COMPLETED; break; - case FAILED: to = WorkflowPb.Workflow.WorkflowStatus.FAILED; break; - case TIMED_OUT: to = WorkflowPb.Workflow.WorkflowStatus.TIMED_OUT; break; - case TERMINATED: to = WorkflowPb.Workflow.WorkflowStatus.TERMINATED; break; - case PAUSED: to = WorkflowPb.Workflow.WorkflowStatus.PAUSED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from) { - Workflow.WorkflowStatus to; - switch (from) { - case RUNNING: to = Workflow.WorkflowStatus.RUNNING; break; - case COMPLETED: to = Workflow.WorkflowStatus.COMPLETED; break; - case FAILED: to = Workflow.WorkflowStatus.FAILED; break; - case TIMED_OUT: to = Workflow.WorkflowStatus.TIMED_OUT; break; - case TERMINATED: to = Workflow.WorkflowStatus.TERMINATED; break; - case PAUSED: to = Workflow.WorkflowStatus.PAUSED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { - WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getDescription() != null) { - to.setDescription( from.getDescription() ); - } - to.setVersion( from.getVersion() ); - for (WorkflowTask elem : from.getTasks()) { - to.addTasks( toProto(elem) ); - } - to.addAllInputParameters( from.getInputParameters() ); - for (Map.Entry pair : from.getOutputParameters().entrySet()) { - to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getFailureWorkflow() != null) { - to.setFailureWorkflow( from.getFailureWorkflow() ); - } - to.setSchemaVersion( from.getSchemaVersion() ); - to.setRestartable( from.isRestartable() ); - to.setWorkflowStatusListenerEnabled( from.isWorkflowStatusListenerEnabled() ); - if (from.getOwnerEmail() != null) { - to.setOwnerEmail( from.getOwnerEmail() ); - } - if (from.getTimeoutPolicy() != null) { - to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); - } - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - for (Map.Entry pair : from.getVariables().entrySet()) { - to.putVariables( pair.getKey(), toProto( pair.getValue() ) ); - } - for (Map.Entry pair : from.getInputTemplate().entrySet()) { - to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { - WorkflowDef to = new WorkflowDef(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setVersion( from.getVersion() ); - to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - Map outputParametersMap = new HashMap(); - for (Map.Entry pair : from.getOutputParametersMap().entrySet()) { - outputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputParameters(outputParametersMap); - to.setFailureWorkflow( from.getFailureWorkflow() ); - to.setSchemaVersion( from.getSchemaVersion() ); - to.setRestartable( from.getRestartable() ); - to.setWorkflowStatusListenerEnabled( from.getWorkflowStatusListenerEnabled() ); - to.setOwnerEmail( from.getOwnerEmail() ); - to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - Map variablesMap = new HashMap(); - for (Map.Entry pair : from.getVariablesMap().entrySet()) { - variablesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setVariables(variablesMap); - Map inputTemplateMap = new HashMap(); - for (Map.Entry pair : from.getInputTemplateMap().entrySet()) { - inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputTemplate(inputTemplateMap); - return to; - } - - public WorkflowDefPb.WorkflowDef.TimeoutPolicy toProto(WorkflowDef.TimeoutPolicy from) { - WorkflowDefPb.WorkflowDef.TimeoutPolicy to; - switch (from) { - case TIME_OUT_WF: to = WorkflowDefPb.WorkflowDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = WorkflowDefPb.WorkflowDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public WorkflowDef.TimeoutPolicy fromProto(WorkflowDefPb.WorkflowDef.TimeoutPolicy from) { - WorkflowDef.TimeoutPolicy to; - switch (from) { - case TIME_OUT_WF: to = WorkflowDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = WorkflowDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { - WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); - if (from.getWorkflowType() != null) { - to.setWorkflowType( from.getWorkflowType() ); - } - to.setVersion( from.getVersion() ); - if (from.getWorkflowId() != null) { - to.setWorkflowId( from.getWorkflowId() ); - } - if (from.getCorrelationId() != null) { - to.setCorrelationId( from.getCorrelationId() ); - } - if (from.getStartTime() != null) { - to.setStartTime( from.getStartTime() ); - } - if (from.getUpdateTime() != null) { - to.setUpdateTime( from.getUpdateTime() ); - } - if (from.getEndTime() != null) { - to.setEndTime( from.getEndTime() ); - } - if (from.getStatus() != null) { - to.setStatus( toProto( from.getStatus() ) ); - } - if (from.getInput() != null) { - to.setInput( from.getInput() ); - } - if (from.getOutput() != null) { - to.setOutput( from.getOutput() ); - } - if (from.getReasonForIncompletion() != null) { - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - } - to.setExecutionTime( from.getExecutionTime() ); - if (from.getEvent() != null) { - to.setEvent( from.getEvent() ); - } - if (from.getFailedReferenceTaskNames() != null) { - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); - } - if (from.getExternalInputPayloadStoragePath() != null) { - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - } - if (from.getExternalOutputPayloadStoragePath() != null) { - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - } - to.setPriority( from.getPriority() ); - return to.build(); - } - - public WorkflowSummary fromProto(WorkflowSummaryPb.WorkflowSummary from) { - WorkflowSummary to = new WorkflowSummary(); - to.setWorkflowType( from.getWorkflowType() ); - to.setVersion( from.getVersion() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( fromProto( from.getStatus() ) ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setExecutionTime( from.getExecutionTime() ); - to.setEvent( from.getEvent() ); - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); - to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); - to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); - to.setPriority( from.getPriority() ); - return to; - } - - public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { - WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); - if (from.getName() != null) { - to.setName( from.getName() ); - } - if (from.getTaskReferenceName() != null) { - to.setTaskReferenceName( from.getTaskReferenceName() ); - } - if (from.getDescription() != null) { - to.setDescription( from.getDescription() ); - } - for (Map.Entry pair : from.getInputParameters().entrySet()) { - to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getType() != null) { - to.setType( from.getType() ); - } - if (from.getDynamicTaskNameParam() != null) { - to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); - } - if (from.getCaseValueParam() != null) { - to.setCaseValueParam( from.getCaseValueParam() ); - } - if (from.getCaseExpression() != null) { - to.setCaseExpression( from.getCaseExpression() ); - } - if (from.getScriptExpression() != null) { - to.setScriptExpression( from.getScriptExpression() ); - } - for (Map.Entry> pair : from.getDecisionCases().entrySet()) { - to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); - } - if (from.getDynamicForkTasksParam() != null) { - to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); - } - if (from.getDynamicForkTasksInputParamName() != null) { - to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); - } - for (WorkflowTask elem : from.getDefaultCase()) { - to.addDefaultCase( toProto(elem) ); - } - for (List elem : from.getForkTasks()) { - to.addForkTasks( toProto(elem) ); - } - to.setStartDelay( from.getStartDelay() ); - if (from.getSubWorkflowParam() != null) { - to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); - } - to.addAllJoinOn( from.getJoinOn() ); - if (from.getSink() != null) { - to.setSink( from.getSink() ); - } - to.setOptional( from.isOptional() ); - if (from.getTaskDefinition() != null) { - to.setTaskDefinition( toProto( from.getTaskDefinition() ) ); - } - if (from.isRateLimited() != null) { - to.setRateLimited( from.isRateLimited() ); - } - to.addAllDefaultExclusiveJoinTask( from.getDefaultExclusiveJoinTask() ); - if (from.isAsyncComplete() != null) { - to.setAsyncComplete( from.isAsyncComplete() ); - } - if (from.getLoopCondition() != null) { - to.setLoopCondition( from.getLoopCondition() ); - } - for (WorkflowTask elem : from.getLoopOver()) { - to.addLoopOver( toProto(elem) ); - } - if (from.getRetryCount() != null) { - to.setRetryCount( from.getRetryCount() ); - } - if (from.getEvaluatorType() != null) { - to.setEvaluatorType( from.getEvaluatorType() ); - } - if (from.getExpression() != null) { - to.setExpression( from.getExpression() ); - } - return to.build(); - } - - public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { - WorkflowTask to = new WorkflowTask(); - to.setName( from.getName() ); - to.setTaskReferenceName( from.getTaskReferenceName() ); - to.setDescription( from.getDescription() ); - Map inputParametersMap = new HashMap(); - for (Map.Entry pair : from.getInputParametersMap().entrySet()) { - inputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputParameters(inputParametersMap); - to.setType( from.getType() ); - to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); - to.setCaseValueParam( from.getCaseValueParam() ); - to.setCaseExpression( from.getCaseExpression() ); - to.setScriptExpression( from.getScriptExpression() ); - Map> decisionCasesMap = new HashMap>(); - for (Map.Entry pair : from.getDecisionCasesMap().entrySet()) { - decisionCasesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setDecisionCases(decisionCasesMap); - to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); - to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); - to.setDefaultCase( from.getDefaultCaseList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setForkTasks( from.getForkTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setStartDelay( from.getStartDelay() ); - if (from.hasSubWorkflowParam()) { - to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); - } - to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setSink( from.getSink() ); - to.setOptional( from.getOptional() ); - if (from.hasTaskDefinition()) { - to.setTaskDefinition( fromProto( from.getTaskDefinition() ) ); - } - to.setRateLimited( from.getRateLimited() ); - to.setDefaultExclusiveJoinTask( from.getDefaultExclusiveJoinTaskList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setAsyncComplete( from.getAsyncComplete() ); - to.setLoopCondition( from.getLoopCondition() ); - to.setLoopOver( from.getLoopOverList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setRetryCount( from.getRetryCount() ); - to.setEvaluatorType( from.getEvaluatorType() ); - to.setExpression( from.getExpression() ); - return to; - } - - public abstract WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List in); - - public abstract List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList in); - - public abstract Value toProto(Object in); - - public abstract Object fromProto(Value in); - - public abstract Any toProto(Any in); - - public abstract Any fromProto(Any in); -} diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java deleted file mode 100644 index 1b7022181..000000000 --- a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc; - -import com.google.protobuf.Any; -import com.google.protobuf.ListValue; -import com.google.protobuf.NullValue; -import com.google.protobuf.Struct; -import com.google.protobuf.Value; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.proto.WorkflowTaskPb; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -/** - * ProtoMapper implements conversion code between the internal models - * used by Conductor (POJOs) and their corresponding equivalents in - * the exposed Protocol Buffers interface. - * - * The vast majority of the mapping logic is implemented in the autogenerated - * {@link AbstractProtoMapper} class. This class only implements the custom - * logic for objects that need to be special cased in the API. - */ -public final class ProtoMapper extends AbstractProtoMapper { - public static final ProtoMapper INSTANCE = new ProtoMapper(); - private static final int NO_RETRY_VALUE = -1; - - private ProtoMapper() {} - - /** - * Convert an {@link Object} instance into its equivalent {@link Value} - * ProtoBuf object. - * - * The {@link Value} ProtoBuf message is a variant type that can define any - * value representable as a native JSON type. Consequently, this method expects - * the given {@link Object} instance to be a Java object instance of JSON-native - * value, namely: null, {@link Boolean}, {@link Double}, {@link String}, - * {@link Map}, {@link List}. - * - * Any other values will cause an exception to be thrown. - * See {@link ProtoMapper#fromProto(Value)} for the reverse mapping. - * - * @param val a Java object that can be represented natively in JSON - * @return an instance of a {@link Value} ProtoBuf message - */ - @Override - public Value toProto(Object val) { - Value.Builder builder = Value.newBuilder(); - - if (val == null) { - builder.setNullValue(NullValue.NULL_VALUE); - } else if (val instanceof Boolean) { - builder.setBoolValue((Boolean) val); - } else if (val instanceof Double) { - builder.setNumberValue((Double) val); - } else if (val instanceof String) { - builder.setStringValue((String) val); - } else if (val instanceof Map) { - Map map = (Map) val; - Struct.Builder struct = Struct.newBuilder(); - for (Map.Entry pair : map.entrySet()) { - struct.putFields(pair.getKey(), toProto(pair.getValue())); - } - builder.setStructValue(struct.build()); - } else if (val instanceof List) { - ListValue.Builder list = ListValue.newBuilder(); - for (Object obj : (List)val) { - list.addValues(toProto(obj)); - } - builder.setListValue(list.build()); - } else { - throw new ClassCastException("cannot map to Value type: "+val); - } - return builder.build(); - } - - /** - * Convert a ProtoBuf {@link Value} message into its native Java object - * equivalent. - * - * See {@link ProtoMapper#toProto(Object)} for the reverse mapping and the - * possible values that can be returned from this method. - * - * @param any an instance of a ProtoBuf {@link Value} message - * @return a native Java object representing the value - */ - @Override - public Object fromProto(Value any) { - switch (any.getKindCase()) { - case NULL_VALUE: - return null; - case BOOL_VALUE: - return any.getBoolValue(); - case NUMBER_VALUE: - return any.getNumberValue(); - case STRING_VALUE: - return any.getStringValue(); - case STRUCT_VALUE: - Struct struct = any.getStructValue(); - Map map = new HashMap<>(); - for (Map.Entry pair : struct.getFieldsMap().entrySet()) { - map.put(pair.getKey(), fromProto(pair.getValue())); - } - return map; - case LIST_VALUE: - List list = new ArrayList<>(); - for (Value val : any.getListValue().getValuesList()) { - list.add(fromProto(val)); - } - return list; - default: - throw new ClassCastException("unset Value element: "+any); - } - } - - /** - * Convert a WorkflowTaskList message wrapper into a {@link List} instance - * with its contents. - * - * @param list an instance of a ProtoBuf message - * @return a list with the contents of the message - */ - @Override - public List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { - return list.getTasksList().stream().map(this::fromProto).collect(Collectors.toList()); - } - - @Override public WorkflowTaskPb.WorkflowTask toProto(final WorkflowTask from) { - final WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(super.toProto(from)); - if (from.getRetryCount() == null) { - to.setRetryCount(NO_RETRY_VALUE); - } - return to.build(); - } - - @Override public WorkflowTask fromProto(final WorkflowTaskPb.WorkflowTask from) { - final WorkflowTask workflowTask = super.fromProto(from); - if (from.getRetryCount() == NO_RETRY_VALUE) { - workflowTask.setRetryCount(null); - } - return workflowTask; - } - - - - /** - * Convert a list of {@link WorkflowTask} instances into a ProtoBuf wrapper object. - * - * @param list a list of {@link WorkflowTask} instances - * @return a ProtoBuf message wrapping the contents of the list - */ - @Override - public WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List list) { - return WorkflowTaskPb.WorkflowTask.WorkflowTaskList.newBuilder() - .addAllTasks(list.stream().map(this::toProto)::iterator) - .build(); - } - - @Override - public Any toProto(Any in) { - return in; - } - - @Override - public Any fromProto(Any in) { - return in; - } -} diff --git a/grpc/src/main/proto/grpc/event_service.proto b/grpc/src/main/proto/grpc/event_service.proto deleted file mode 100644 index e7a61e9c5..000000000 --- a/grpc/src/main/proto/grpc/event_service.proto +++ /dev/null @@ -1,50 +0,0 @@ -syntax = "proto3"; -package conductor.grpc.events; - -import "model/eventhandler.proto"; - -option java_package = "com.netflix.conductor.grpc"; -option java_outer_classname = "EventServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/events"; - -service EventService { - // POST / - rpc AddEventHandler(AddEventHandlerRequest) returns (AddEventHandlerResponse); - - // PUT / - rpc UpdateEventHandler(UpdateEventHandlerRequest) returns (UpdateEventHandlerResponse); - - // DELETE /{name} - rpc RemoveEventHandler(RemoveEventHandlerRequest) returns (RemoveEventHandlerResponse); - - // GET / - rpc GetEventHandlers(GetEventHandlersRequest) returns (stream conductor.proto.EventHandler); - - // GET /{name} - rpc GetEventHandlersForEvent(GetEventHandlersForEventRequest) returns (stream conductor.proto.EventHandler); -} - -message AddEventHandlerRequest { - conductor.proto.EventHandler handler = 1; -} - -message AddEventHandlerResponse {} - -message UpdateEventHandlerRequest { - conductor.proto.EventHandler handler = 1; -} - -message UpdateEventHandlerResponse {} - -message RemoveEventHandlerRequest { - string name = 1; -} - -message RemoveEventHandlerResponse {} - -message GetEventHandlersRequest {} - -message GetEventHandlersForEventRequest { - string event = 1; - bool active_only = 2; -} diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto deleted file mode 100644 index 1716c6bbe..000000000 --- a/grpc/src/main/proto/grpc/metadata_service.proto +++ /dev/null @@ -1,80 +0,0 @@ -syntax = "proto3"; -package conductor.grpc.metadata; - -import "model/taskdef.proto"; -import "model/workflowdef.proto"; - -option java_package = "com.netflix.conductor.grpc"; -option java_outer_classname = "MetadataServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata"; - -service MetadataService { - // POST /workflow - rpc CreateWorkflow(CreateWorkflowRequest) returns (CreateWorkflowResponse); - - // PUT /workflow - rpc UpdateWorkflows(UpdateWorkflowsRequest) returns (UpdateWorkflowsResponse); - - // GET /workflow/{name} - rpc GetWorkflow(GetWorkflowRequest) returns (GetWorkflowResponse); - - // POST /taskdefs - rpc CreateTasks(CreateTasksRequest) returns (CreateTasksResponse); - - // PUT /taskdefs - rpc UpdateTask(UpdateTaskRequest) returns (UpdateTaskResponse); - - // GET /taskdefs/{tasktype} - rpc GetTask(GetTaskRequest) returns (GetTaskResponse); - - // DELETE /taskdefs/{tasktype} - rpc DeleteTask(DeleteTaskRequest) returns (DeleteTaskResponse); -} - -message CreateWorkflowRequest { - conductor.proto.WorkflowDef workflow = 1; -} - -message CreateWorkflowResponse {} - -message UpdateWorkflowsRequest { - repeated conductor.proto.WorkflowDef defs = 1; -} - -message UpdateWorkflowsResponse {} - -message GetWorkflowRequest { - string name = 1; - int32 version = 2; -} - -message GetWorkflowResponse { - conductor.proto.WorkflowDef workflow = 1; -} - -message CreateTasksRequest { - repeated conductor.proto.TaskDef defs = 1; -} - -message CreateTasksResponse {} - -message UpdateTaskRequest { - conductor.proto.TaskDef task = 1; -} - -message UpdateTaskResponse {} - - -message GetTaskRequest { - string task_type = 1; -} - -message GetTaskResponse { - conductor.proto.TaskDef task = 1; -} - -message DeleteTaskRequest { - string task_type = 1; -} - -message DeleteTaskResponse {} diff --git a/grpc/src/main/proto/grpc/search.proto b/grpc/src/main/proto/grpc/search.proto deleted file mode 100644 index e9ad0f069..000000000 --- a/grpc/src/main/proto/grpc/search.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; -package conductor.grpc.search; - -option java_package = "com.netflix.conductor.grpc"; -option java_outer_classname = "SearchPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search"; - -message Request { - int32 start = 1; - int32 size = 2; - string sort = 3; - string free_text = 4; - string query = 5; -} - diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto deleted file mode 100644 index b14dcc606..000000000 --- a/grpc/src/main/proto/grpc/task_service.proto +++ /dev/null @@ -1,133 +0,0 @@ -syntax = "proto3"; -package conductor.grpc.tasks; - -import "model/taskexeclog.proto"; -import "model/taskresult.proto"; -import "model/tasksummary.proto"; -import "model/task.proto"; -import "grpc/search.proto"; - -option java_package = "com.netflix.conductor.grpc"; -option java_outer_classname = "TaskServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks"; - -service TaskService { - // GET /poll/{tasktype} - rpc Poll(PollRequest) returns (PollResponse); - - // /poll/batch/{tasktype} - rpc BatchPoll(BatchPollRequest) returns (stream conductor.proto.Task); - - // POST / - rpc UpdateTask(UpdateTaskRequest) returns (UpdateTaskResponse); - - // POST /{taskId}/log - rpc AddLog(AddLogRequest) returns (AddLogResponse); - - // GET {taskId}/log - rpc GetTaskLogs(GetTaskLogsRequest) returns (GetTaskLogsResponse); - - // GET /{taskId} - rpc GetTask(GetTaskRequest) returns (GetTaskResponse); - - // GET /queue/sizes - rpc GetQueueSizesForTasks(QueueSizesRequest) returns (QueueSizesResponse); - - // GET /queue/all - rpc GetQueueInfo(QueueInfoRequest) returns (QueueInfoResponse); - - // GET /queue/all/verbose - rpc GetQueueAllInfo(QueueAllInfoRequest) returns (QueueAllInfoResponse); - - // GET /search - rpc Search(conductor.grpc.search.Request) returns (TaskSummarySearchResult); - - // GET /searchV2 - rpc SearchV2(conductor.grpc.search.Request) returns (TaskSearchResult); -} - -message PollRequest { - string task_type = 1; - string worker_id = 2; - string domain = 3; -} - -message PollResponse { - conductor.proto.Task task = 1; -} - -message BatchPollRequest { - string task_type = 1; - string worker_id = 2; - string domain = 3; - int32 count = 4; - int32 timeout = 5; -} - -message UpdateTaskRequest { - conductor.proto.TaskResult result = 1; -} - -message UpdateTaskResponse { - string task_id = 1; -} - -message AddLogRequest { - string task_id = 1; - string log = 2; -} - -message AddLogResponse {} - -message GetTaskLogsRequest { - string task_id = 1; -} - -message GetTaskLogsResponse { - repeated conductor.proto.TaskExecLog logs = 1; -} - -message GetTaskRequest { - string task_id = 1; -} - -message GetTaskResponse { - conductor.proto.Task task = 1; -} - -message QueueSizesRequest { - repeated string task_types = 1; -} - -message QueueSizesResponse { - map queue_for_task = 1; -} - -message QueueInfoRequest {} - -message QueueInfoResponse { - map queues = 1; -} - -message QueueAllInfoRequest {} - -message QueueAllInfoResponse { - message ShardInfo { - int64 size = 1; - int64 uacked = 2; - } - message QueueInfo { - map shards = 1; - } - map queues = 1; -} - -message TaskSummarySearchResult { - int64 total_hits = 1; - repeated conductor.proto.TaskSummary results = 2; -} - -message TaskSearchResult { - int64 total_hits = 1; - repeated conductor.proto.Task results = 2; -} diff --git a/grpc/src/main/proto/grpc/workflow_service.proto b/grpc/src/main/proto/grpc/workflow_service.proto deleted file mode 100644 index 6bdd9db4d..000000000 --- a/grpc/src/main/proto/grpc/workflow_service.proto +++ /dev/null @@ -1,177 +0,0 @@ -syntax = "proto3"; -package conductor.grpc.workflows; - -import "grpc/search.proto"; -import "model/workflow.proto"; -import "model/workflowsummary.proto"; -import "model/skiptaskrequest.proto"; -import "model/startworkflowrequest.proto"; -import "model/rerunworkflowrequest.proto"; - -option java_package = "com.netflix.conductor.grpc"; -option java_outer_classname = "WorkflowServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows"; - -service WorkflowService { - // POST / - rpc StartWorkflow(conductor.proto.StartWorkflowRequest) returns (StartWorkflowResponse); - - // GET /{name}/correlated/{correlationId} - rpc GetWorkflows(GetWorkflowsRequest) returns (GetWorkflowsResponse); - - // GET /{workflowId} - rpc GetWorkflowStatus(GetWorkflowStatusRequest) returns (conductor.proto.Workflow); - - // DELETE /{workflodId}/remove - rpc RemoveWorkflow(RemoveWorkflowRequest) returns (RemoveWorkflowResponse); - - // GET /running/{name} - rpc GetRunningWorkflows(GetRunningWorkflowsRequest) returns (GetRunningWorkflowsResponse); - - // PUT /decide/{workflowId} - rpc DecideWorkflow(DecideWorkflowRequest) returns (DecideWorkflowResponse); - - // PUT /{workflowId}/pause - rpc PauseWorkflow(PauseWorkflowRequest) returns (PauseWorkflowResponse); - - // PUT /{workflowId}/pause - rpc ResumeWorkflow(ResumeWorkflowRequest) returns (ResumeWorkflowResponse); - - // PUT /{workflowId}/skiptask/{taskReferenceName} - rpc SkipTaskFromWorkflow(SkipTaskRequest) returns (SkipTaskResponse); - - // POST /{workflowId}/rerun - rpc RerunWorkflow(conductor.proto.RerunWorkflowRequest) returns (RerunWorkflowResponse); - - // POST /{workflowId}/restart - rpc RestartWorkflow(RestartWorkflowRequest) returns (RestartWorkflowResponse); - - // POST /{workflowId}retry - rpc RetryWorkflow(RetryWorkflowRequest) returns (RetryWorkflowResponse); - - // POST /{workflowId}/resetcallbacks - rpc ResetWorkflowCallbacks(ResetWorkflowCallbacksRequest) returns (ResetWorkflowCallbacksResponse); - - // DELETE /{workflowId} - rpc TerminateWorkflow(TerminateWorkflowRequest) returns (TerminateWorkflowResponse); - - // GET /search - rpc Search(conductor.grpc.search.Request) returns (WorkflowSummarySearchResult); - rpc SearchByTasks(conductor.grpc.search.Request) returns (WorkflowSummarySearchResult); - - // GET /searchV2 - rpc SearchV2(conductor.grpc.search.Request) returns (WorkflowSearchResult); - rpc SearchByTasksV2(conductor.grpc.search.Request) returns (WorkflowSearchResult); -} - -message StartWorkflowResponse { - string workflow_id = 1; -} - -message GetWorkflowsRequest { - string name = 1; - repeated string correlation_id = 2; - bool include_closed = 3; - bool include_tasks = 4; -} - -message GetWorkflowsResponse { - message Workflows { - repeated conductor.proto.Workflow workflows = 1; - } - map workflows_by_id = 1; -} - -message GetWorkflowStatusRequest { - string workflow_id = 1; - bool include_tasks = 2; -} - -message GetWorkflowStatusResponse { - conductor.proto.Workflow workflow = 1; -} - -message RemoveWorkflowRequest { - string workflod_id = 1; - bool archive_workflow = 2; -} - -message RemoveWorkflowResponse {} - -message GetRunningWorkflowsRequest { - string name = 1; - int32 version = 2; - int64 start_time = 3; - int64 end_time = 4; -} - -message GetRunningWorkflowsResponse { - repeated string workflow_ids = 1; -} - -message DecideWorkflowRequest { - string workflow_id = 1; -} - -message DecideWorkflowResponse {} - -message PauseWorkflowRequest { - string workflow_id = 1; -} - -message PauseWorkflowResponse {} - -message ResumeWorkflowRequest { - string workflow_id = 1; -} - -message ResumeWorkflowResponse {} - -message SkipTaskRequest { - string workflow_id = 1; - string task_reference_name = 2; - conductor.proto.SkipTaskRequest request = 3; -} - -message SkipTaskResponse {} - -message RerunWorkflowResponse { - string workflow_id = 1; -} - -message RestartWorkflowRequest { - string workflow_id = 1; - bool use_latest_definitions = 2; -} - -message RestartWorkflowResponse {} - -message RetryWorkflowRequest { - string workflow_id = 1; - bool resume_subworkflow_tasks = 2; -} - -message RetryWorkflowResponse {} - -message ResetWorkflowCallbacksRequest { - string workflow_id = 1; -} - -message ResetWorkflowCallbacksResponse {} - -message TerminateWorkflowRequest { - string workflow_id = 1; - string reason = 2; -} - -message TerminateWorkflowResponse {} - -message WorkflowSummarySearchResult { - int64 total_hits = 1; - repeated conductor.proto.WorkflowSummary results = 2; -} - -message WorkflowSearchResult { - int64 total_hits = 1; - repeated conductor.proto.Workflow results = 2; -} diff --git a/grpc/src/main/proto/model/dynamicforkjointask.proto b/grpc/src/main/proto/model/dynamicforkjointask.proto deleted file mode 100644 index 12e66bb1e..000000000 --- a/grpc/src/main/proto/model/dynamicforkjointask.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "DynamicForkJoinTaskPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message DynamicForkJoinTask { - string task_name = 1; - string workflow_name = 2; - string reference_name = 3; - map input = 4; - string type = 5; -} diff --git a/grpc/src/main/proto/model/dynamicforkjointasklist.proto b/grpc/src/main/proto/model/dynamicforkjointasklist.proto deleted file mode 100644 index 3ac3f44d9..000000000 --- a/grpc/src/main/proto/model/dynamicforkjointasklist.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/dynamicforkjointask.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "DynamicForkJoinTaskListPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message DynamicForkJoinTaskList { - repeated DynamicForkJoinTask dynamic_tasks = 1; -} diff --git a/grpc/src/main/proto/model/eventexecution.proto b/grpc/src/main/proto/model/eventexecution.proto deleted file mode 100644 index e4aee81aa..000000000 --- a/grpc/src/main/proto/model/eventexecution.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/eventhandler.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "EventExecutionPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message EventExecution { - enum Status { - IN_PROGRESS = 0; - COMPLETED = 1; - FAILED = 2; - SKIPPED = 3; - } - string id = 1; - string message_id = 2; - string name = 3; - string event = 4; - int64 created = 5; - EventExecution.Status status = 6; - EventHandler.Action.Type action = 7; - map output = 8; -} diff --git a/grpc/src/main/proto/model/eventhandler.proto b/grpc/src/main/proto/model/eventhandler.proto deleted file mode 100644 index cfc623b53..000000000 --- a/grpc/src/main/proto/model/eventhandler.proto +++ /dev/null @@ -1,45 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; -import "google/protobuf/any.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "EventHandlerPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message EventHandler { - message StartWorkflow { - string name = 1; - int32 version = 2; - string correlation_id = 3; - map input = 4; - google.protobuf.Any input_message = 5; - map task_to_domain = 6; - } - message TaskDetails { - string workflow_id = 1; - string task_ref_name = 2; - map output = 3; - google.protobuf.Any output_message = 4; - string task_id = 5; - } - message Action { - enum Type { - START_WORKFLOW = 0; - COMPLETE_TASK = 1; - FAIL_TASK = 2; - } - EventHandler.Action.Type action = 1; - EventHandler.StartWorkflow start_workflow = 2; - EventHandler.TaskDetails complete_task = 3; - EventHandler.TaskDetails fail_task = 4; - bool expand_inline_json = 5; - } - string name = 1; - string event = 2; - string condition = 3; - repeated EventHandler.Action actions = 4; - bool active = 5; - string evaluator_type = 6; -} diff --git a/grpc/src/main/proto/model/polldata.proto b/grpc/src/main/proto/model/polldata.proto deleted file mode 100644 index 59169430c..000000000 --- a/grpc/src/main/proto/model/polldata.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "PollDataPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message PollData { - string queue_name = 1; - string domain = 2; - string worker_id = 3; - int64 last_poll_time = 4; -} diff --git a/grpc/src/main/proto/model/rerunworkflowrequest.proto b/grpc/src/main/proto/model/rerunworkflowrequest.proto deleted file mode 100644 index 280e8cfae..000000000 --- a/grpc/src/main/proto/model/rerunworkflowrequest.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "RerunWorkflowRequestPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message RerunWorkflowRequest { - string re_run_from_workflow_id = 1; - map workflow_input = 2; - string re_run_from_task_id = 3; - map task_input = 4; - string correlation_id = 5; -} diff --git a/grpc/src/main/proto/model/skiptaskrequest.proto b/grpc/src/main/proto/model/skiptaskrequest.proto deleted file mode 100644 index 323e5162f..000000000 --- a/grpc/src/main/proto/model/skiptaskrequest.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; -import "google/protobuf/any.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "SkipTaskRequestPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message SkipTaskRequest { - map task_input = 1; - map task_output = 2; - google.protobuf.Any task_input_message = 3; - google.protobuf.Any task_output_message = 4; -} diff --git a/grpc/src/main/proto/model/startworkflowrequest.proto b/grpc/src/main/proto/model/startworkflowrequest.proto deleted file mode 100644 index 4a71f28ed..000000000 --- a/grpc/src/main/proto/model/startworkflowrequest.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/workflowdef.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "StartWorkflowRequestPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message StartWorkflowRequest { - string name = 1; - int32 version = 2; - string correlation_id = 3; - map input = 4; - map task_to_domain = 5; - WorkflowDef workflow_def = 6; - string external_input_payload_storage_path = 7; - int32 priority = 8; -} diff --git a/grpc/src/main/proto/model/subworkflowparams.proto b/grpc/src/main/proto/model/subworkflowparams.proto deleted file mode 100644 index 4a52f45bc..000000000 --- a/grpc/src/main/proto/model/subworkflowparams.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "SubWorkflowParamsPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message SubWorkflowParams { - string name = 1; - int32 version = 2; - map task_to_domain = 3; - google.protobuf.Value workflow_definition = 4; -} diff --git a/grpc/src/main/proto/model/task.proto b/grpc/src/main/proto/model/task.proto deleted file mode 100644 index 410aa0a06..000000000 --- a/grpc/src/main/proto/model/task.proto +++ /dev/null @@ -1,64 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/workflowtask.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/any.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "TaskPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message Task { - enum Status { - IN_PROGRESS = 0; - CANCELED = 1; - FAILED = 2; - FAILED_WITH_TERMINAL_ERROR = 3; - COMPLETED = 4; - COMPLETED_WITH_ERRORS = 5; - SCHEDULED = 6; - TIMED_OUT = 7; - SKIPPED = 8; - } - string task_type = 1; - Task.Status status = 2; - map input_data = 3; - string reference_task_name = 4; - int32 retry_count = 5; - int32 seq = 6; - string correlation_id = 7; - int32 poll_count = 8; - string task_def_name = 9; - int64 scheduled_time = 10; - int64 start_time = 11; - int64 end_time = 12; - int64 update_time = 13; - int32 start_delay_in_seconds = 14; - string retried_task_id = 15; - bool retried = 16; - bool executed = 17; - bool callback_from_worker = 18; - int64 response_timeout_seconds = 19; - string workflow_instance_id = 20; - string workflow_type = 21; - string task_id = 22; - string reason_for_incompletion = 23; - int64 callback_after_seconds = 24; - string worker_id = 25; - map output_data = 26; - WorkflowTask workflow_task = 27; - string domain = 28; - google.protobuf.Any input_message = 29; - google.protobuf.Any output_message = 30; - int32 rate_limit_per_frequency = 32; - int32 rate_limit_frequency_in_seconds = 33; - string external_input_payload_storage_path = 34; - string external_output_payload_storage_path = 35; - int32 workflow_priority = 36; - string execution_name_space = 37; - string isolation_group_id = 38; - int32 iteration = 40; - string sub_workflow_id = 41; - bool subworkflow_changed = 42; -} diff --git a/grpc/src/main/proto/model/taskdef.proto b/grpc/src/main/proto/model/taskdef.proto deleted file mode 100644 index dd15508d6..000000000 --- a/grpc/src/main/proto/model/taskdef.proto +++ /dev/null @@ -1,40 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "TaskDefPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message TaskDef { - enum RetryLogic { - FIXED = 0; - EXPONENTIAL_BACKOFF = 1; - LINEAR_BACKOFF = 2; - } - enum TimeoutPolicy { - RETRY = 0; - TIME_OUT_WF = 1; - ALERT_ONLY = 2; - } - string name = 1; - string description = 2; - int32 retry_count = 3; - int64 timeout_seconds = 4; - repeated string input_keys = 5; - repeated string output_keys = 6; - TaskDef.TimeoutPolicy timeout_policy = 7; - TaskDef.RetryLogic retry_logic = 8; - int32 retry_delay_seconds = 9; - int64 response_timeout_seconds = 10; - int32 concurrent_exec_limit = 11; - map input_template = 12; - int32 rate_limit_per_frequency = 14; - int32 rate_limit_frequency_in_seconds = 15; - string isolation_group_id = 16; - string execution_name_space = 17; - string owner_email = 18; - int32 poll_timeout_seconds = 19; - int32 backoff_scale_factor = 20; -} diff --git a/grpc/src/main/proto/model/taskexeclog.proto b/grpc/src/main/proto/model/taskexeclog.proto deleted file mode 100644 index f67b2e4b2..000000000 --- a/grpc/src/main/proto/model/taskexeclog.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "TaskExecLogPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message TaskExecLog { - string log = 1; - string task_id = 2; - int64 created_time = 3; -} diff --git a/grpc/src/main/proto/model/taskresult.proto b/grpc/src/main/proto/model/taskresult.proto deleted file mode 100644 index bb7f4a23e..000000000 --- a/grpc/src/main/proto/model/taskresult.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "google/protobuf/struct.proto"; -import "google/protobuf/any.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "TaskResultPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message TaskResult { - enum Status { - IN_PROGRESS = 0; - FAILED = 1; - FAILED_WITH_TERMINAL_ERROR = 2; - COMPLETED = 3; - } - string workflow_instance_id = 1; - string task_id = 2; - string reason_for_incompletion = 3; - int64 callback_after_seconds = 4; - string worker_id = 5; - TaskResult.Status status = 6; - map output_data = 7; - google.protobuf.Any output_message = 8; -} diff --git a/grpc/src/main/proto/model/tasksummary.proto b/grpc/src/main/proto/model/tasksummary.proto deleted file mode 100644 index 270a4f015..000000000 --- a/grpc/src/main/proto/model/tasksummary.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/task.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "TaskSummaryPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message TaskSummary { - string workflow_id = 1; - string workflow_type = 2; - string correlation_id = 3; - string scheduled_time = 4; - string start_time = 5; - string update_time = 6; - string end_time = 7; - Task.Status status = 8; - string reason_for_incompletion = 9; - int64 execution_time = 10; - int64 queue_wait_time = 11; - string task_def_name = 12; - string task_type = 13; - string input = 14; - string output = 15; - string task_id = 16; - string external_input_payload_storage_path = 17; - string external_output_payload_storage_path = 18; - int32 workflow_priority = 19; -} diff --git a/grpc/src/main/proto/model/workflow.proto b/grpc/src/main/proto/model/workflow.proto deleted file mode 100644 index c535389b1..000000000 --- a/grpc/src/main/proto/model/workflow.proto +++ /dev/null @@ -1,41 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/workflowdef.proto"; -import "model/task.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "WorkflowPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message Workflow { - enum WorkflowStatus { - RUNNING = 0; - COMPLETED = 1; - FAILED = 2; - TIMED_OUT = 3; - TERMINATED = 4; - PAUSED = 5; - } - Workflow.WorkflowStatus status = 1; - int64 end_time = 2; - string workflow_id = 3; - string parent_workflow_id = 4; - string parent_workflow_task_id = 5; - repeated Task tasks = 6; - map input = 8; - map output = 9; - string correlation_id = 12; - string re_run_from_workflow_id = 13; - string reason_for_incompletion = 14; - string event = 16; - map task_to_domain = 17; - repeated string failed_reference_task_names = 18; - WorkflowDef workflow_definition = 19; - string external_input_payload_storage_path = 20; - string external_output_payload_storage_path = 21; - int32 priority = 22; - map variables = 23; - int64 last_retried_time = 24; -} diff --git a/grpc/src/main/proto/model/workflowdef.proto b/grpc/src/main/proto/model/workflowdef.proto deleted file mode 100644 index ddf75e38a..000000000 --- a/grpc/src/main/proto/model/workflowdef.proto +++ /dev/null @@ -1,31 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/workflowtask.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "WorkflowDefPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message WorkflowDef { - enum TimeoutPolicy { - TIME_OUT_WF = 0; - ALERT_ONLY = 1; - } - string name = 1; - string description = 2; - int32 version = 3; - repeated WorkflowTask tasks = 4; - repeated string input_parameters = 5; - map output_parameters = 6; - string failure_workflow = 7; - int32 schema_version = 8; - bool restartable = 9; - bool workflow_status_listener_enabled = 10; - string owner_email = 11; - WorkflowDef.TimeoutPolicy timeout_policy = 12; - int64 timeout_seconds = 13; - map variables = 14; - map input_template = 15; -} diff --git a/grpc/src/main/proto/model/workflowsummary.proto b/grpc/src/main/proto/model/workflowsummary.proto deleted file mode 100644 index 63adf2e9a..000000000 --- a/grpc/src/main/proto/model/workflowsummary.proto +++ /dev/null @@ -1,28 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/workflow.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "WorkflowSummaryPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message WorkflowSummary { - string workflow_type = 1; - int32 version = 2; - string workflow_id = 3; - string correlation_id = 4; - string start_time = 5; - string update_time = 6; - string end_time = 7; - Workflow.WorkflowStatus status = 8; - string input = 9; - string output = 10; - string reason_for_incompletion = 11; - int64 execution_time = 12; - string event = 13; - string failed_reference_task_names = 14; - string external_input_payload_storage_path = 15; - string external_output_payload_storage_path = 16; - int32 priority = 17; -} diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto deleted file mode 100644 index 8855a714f..000000000 --- a/grpc/src/main/proto/model/workflowtask.proto +++ /dev/null @@ -1,44 +0,0 @@ -syntax = "proto3"; -package conductor.proto; - -import "model/taskdef.proto"; -import "model/subworkflowparams.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "com.netflix.conductor.proto"; -option java_outer_classname = "WorkflowTaskPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message WorkflowTask { - message WorkflowTaskList { - repeated WorkflowTask tasks = 1; - } - string name = 1; - string task_reference_name = 2; - string description = 3; - map input_parameters = 4; - string type = 5; - string dynamic_task_name_param = 6; - string case_value_param = 7; - string case_expression = 8; - string script_expression = 22; - map decision_cases = 9; - string dynamic_fork_tasks_param = 10; - string dynamic_fork_tasks_input_param_name = 11; - repeated WorkflowTask default_case = 12; - repeated WorkflowTask.WorkflowTaskList fork_tasks = 13; - int32 start_delay = 14; - SubWorkflowParams sub_workflow_param = 15; - repeated string join_on = 16; - string sink = 17; - bool optional = 18; - TaskDef task_definition = 19; - bool rate_limited = 20; - repeated string default_exclusive_join_task = 21; - bool async_complete = 23; - string loop_condition = 24; - repeated WorkflowTask loop_over = 25; - int32 retry_count = 26; - string evaluator_type = 27; - string expression = 28; -} diff --git a/grpc/src/test/java/com/netflix/conductor/grpc/TestProtoMapper.java b/grpc/src/test/java/com/netflix/conductor/grpc/TestProtoMapper.java deleted file mode 100644 index 31286609a..000000000 --- a/grpc/src/test/java/com/netflix/conductor/grpc/TestProtoMapper.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.grpc; - -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.proto.WorkflowTaskPb; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class TestProtoMapper { - private final ProtoMapper mapper = ProtoMapper.INSTANCE; - - @Test - public void workflowTaskToProto() { - final WorkflowTask taskWithDefaultRetryCount = new WorkflowTask(); - final WorkflowTask taskWith1RetryCount = new WorkflowTask(); - taskWith1RetryCount.setRetryCount(1); - final WorkflowTask taskWithNoRetryCount = new WorkflowTask(); - taskWithNoRetryCount.setRetryCount(0); - assertEquals(-1, mapper.toProto(taskWithDefaultRetryCount).getRetryCount()); - assertEquals(1, mapper.toProto(taskWith1RetryCount).getRetryCount()); - assertEquals(0, mapper.toProto(taskWithNoRetryCount).getRetryCount()); - } - - @Test - public void workflowTaskFromProto() { - final WorkflowTaskPb.WorkflowTask taskWithDefaultRetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().build(); - final WorkflowTaskPb.WorkflowTask taskWith1RetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().setRetryCount(1).build(); - final WorkflowTaskPb.WorkflowTask taskWithNoRetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().setRetryCount(-1).build(); - assertEquals(new Integer(0), mapper.fromProto(taskWithDefaultRetryCount).getRetryCount()); - assertEquals(1, mapper.fromProto(taskWith1RetryCount).getRetryCount().intValue()); - assertNull(mapper.fromProto(taskWithNoRetryCount).getRetryCount()); - } -} diff --git a/hardcoded-endpoints.html b/hardcoded-endpoints.html new file mode 100644 index 000000000..a8fe9cd06 --- /dev/null +++ b/hardcoded-endpoints.html @@ -0,0 +1,115 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + +

    + + +
    +
    +
    + View More on GitHub +

    Hard-Coded Endpoints (HCE)

    +
    +

    Hardcoded IP addresses and ports are used to communicate between services. By hard-coding the endpoints, the application becomes more brittle to change and reduces the application’s scalability.

    +
    +
    + +
    +
    + +
    +
    +
    Hard-coded Endpoints
    +
    + +
    + +
    +
    +
    +
    + Hardcoded Ports +
    +
    +
    + 0 +
    +
    +
    +
    + +
    +
    +
    +
    + Hardcoded IPs +
    +
    +
    + 0 +
    +
    +
    +
    + +
    +
    +
    + + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/http-task/build.gradle b/http-task/build.gradle deleted file mode 100644 index c525897a7..000000000 --- a/http-task/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - compileOnly 'org.springframework.boot:spring-boot-starter-web' - - implementation "javax.ws.rs:jsr311-api:${revJsr311Api}" - - testImplementation 'org.springframework.boot:spring-boot-starter-web' - testImplementation "org.testcontainers:mockserver:${revTestContainer}" - testImplementation "org.mock-server:mockserver-client-java:${revMockServerClient}" -} \ No newline at end of file diff --git a/http-task/dependencies.lock b/http-task/dependencies.lock deleted file mode 100644 index a63945ba8..000000000 --- a/http-task/dependencies.lock +++ /dev/null @@ -1,399 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "javax.ws.rs:jsr311-api": { - "locked": "1.1.1" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.ws.rs:jsr311-api": { - "locked": "1.1.1" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "javax.ws.rs:jsr311-api": { - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.mock-server:mockserver-client-java": { - "locked": "5.12.0" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - }, - "org.testcontainers:mockserver": { - "locked": "1.15.3" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.11.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.ws.rs:jsr311-api": { - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.mock-server:mockserver-client-java": { - "locked": "5.12.0" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - }, - "org.testcontainers:mockserver": { - "locked": "1.15.3" - } - } -} \ No newline at end of file diff --git a/http-task/src/main/java/com/netflix/conductor/tasks/http/HttpTask.java b/http-task/src/main/java/com/netflix/conductor/tasks/http/HttpTask.java deleted file mode 100644 index 6c69554a7..000000000 --- a/http-task/src/main/java/com/netflix/conductor/tasks/http/HttpTask.java +++ /dev/null @@ -1,395 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.http; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.*; -import org.springframework.stereotype.Component; -import org.springframework.util.MultiValueMap; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.tasks.http.providers.RestTemplateProvider; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HTTP; - -/** Task that enables calling another HTTP endpoint as part of its execution */ -@Component(TASK_TYPE_HTTP) -public class HttpTask extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(HttpTask.class); - - public static final String REQUEST_PARAMETER_NAME = "http_request"; - - static final String MISSING_REQUEST = - "Missing HTTP request. Task input MUST have a '" - + REQUEST_PARAMETER_NAME - + "' key with HttpTask.Input as value. See documentation for HttpTask for required input parameters"; - - private final TypeReference> mapOfObj = - new TypeReference>() {}; - private final TypeReference> listOfObj = new TypeReference>() {}; - protected ObjectMapper objectMapper; - protected RestTemplateProvider restTemplateProvider; - private final String requestParameter; - - @Autowired - public HttpTask(RestTemplateProvider restTemplateProvider, ObjectMapper objectMapper) { - this(TASK_TYPE_HTTP, restTemplateProvider, objectMapper); - } - - public HttpTask( - String name, RestTemplateProvider restTemplateProvider, ObjectMapper objectMapper) { - super(name); - this.restTemplateProvider = restTemplateProvider; - this.objectMapper = objectMapper; - this.requestParameter = REQUEST_PARAMETER_NAME; - LOGGER.info("{} initialized...", getTaskType()); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - Object request = task.getInputData().get(requestParameter); - task.setWorkerId(Utils.getServerId()); - if (request == null) { - task.setReasonForIncompletion(MISSING_REQUEST); - task.setStatus(TaskModel.Status.FAILED); - return; - } - - Input input = objectMapper.convertValue(request, Input.class); - if (input.getUri() == null) { - String reason = - "Missing HTTP URI. See documentation for HttpTask for required input parameters"; - task.setReasonForIncompletion(reason); - task.setStatus(TaskModel.Status.FAILED); - return; - } - - if (input.getMethod() == null) { - String reason = "No HTTP method specified"; - task.setReasonForIncompletion(reason); - task.setStatus(TaskModel.Status.FAILED); - return; - } - - try { - HttpResponse response = httpCall(input); - LOGGER.debug( - "Response: {}, {}, task:{}", - response.statusCode, - response.body, - task.getTaskId()); - if (response.statusCode > 199 && response.statusCode < 300) { - if (isAsyncComplete(task)) { - task.setStatus(TaskModel.Status.IN_PROGRESS); - } else { - task.setStatus(TaskModel.Status.COMPLETED); - } - } else { - if (response.body != null) { - task.setReasonForIncompletion(response.body.toString()); - } else { - task.setReasonForIncompletion("No response from the remote service"); - } - task.setStatus(TaskModel.Status.FAILED); - } - //noinspection ConstantConditions - if (response != null) { - task.getOutputData().put("response", response.asMap()); - } - - } catch (Exception e) { - LOGGER.error( - "Failed to invoke {} task: {} - uri: {}, vipAddress: {} in workflow: {}", - getTaskType(), - task.getTaskId(), - input.getUri(), - input.getVipAddress(), - task.getWorkflowInstanceId(), - e); - task.setStatus(TaskModel.Status.FAILED); - task.setReasonForIncompletion( - "Failed to invoke " + getTaskType() + " task due to: " + e); - task.getOutputData().put("response", e.toString()); - } - } - - /** - * @param input HTTP Request - * @return Response of the http call - * @throws Exception If there was an error making http call Note: protected access is so that - * tasks extended from this task can re-use this to make http calls - */ - protected HttpResponse httpCall(Input input) throws Exception { - RestTemplate restTemplate = restTemplateProvider.getRestTemplate(input); - - HttpHeaders headers = new HttpHeaders(); - headers.setContentType(MediaType.valueOf(input.getContentType())); - headers.setAccept(Collections.singletonList(MediaType.valueOf(input.getAccept()))); - - input.headers.forEach( - (key, value) -> { - if (value != null) { - headers.add(key, value.toString()); - } - }); - - HttpEntity request = new HttpEntity<>(input.getBody(), headers); - - HttpResponse response = new HttpResponse(); - try { - ResponseEntity responseEntity = - restTemplate.exchange(input.getUri(), input.getMethod(), request, String.class); - if (responseEntity.getStatusCode().is2xxSuccessful() && responseEntity.hasBody()) { - response.body = extractBody(responseEntity.getBody()); - } - - response.statusCode = responseEntity.getStatusCodeValue(); - response.reasonPhrase = responseEntity.getStatusCode().getReasonPhrase(); - response.headers = responseEntity.getHeaders(); - return response; - } catch (RestClientException ex) { - LOGGER.error( - String.format( - "Got unexpected http response - uri: %s, vipAddress: %s", - input.getUri(), input.getVipAddress()), - ex); - String reason = ex.getLocalizedMessage(); - LOGGER.error(reason, ex); - throw new Exception(reason); - } - } - - private Object extractBody(String responseBody) { - try { - JsonNode node = objectMapper.readTree(responseBody); - if (node.isArray()) { - return objectMapper.convertValue(node, listOfObj); - } else if (node.isObject()) { - return objectMapper.convertValue(node, mapOfObj); - } else if (node.isNumber()) { - return objectMapper.convertValue(node, Double.class); - } else { - return node.asText(); - } - } catch (IOException jpe) { - LOGGER.error("Error extracting response body", jpe); - return responseBody; - } - } - - @Override - public boolean execute(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - return false; - } - - @Override - public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - task.setStatus(TaskModel.Status.CANCELED); - } - - @Override - public boolean isAsync() { - return true; - } - - public static class HttpResponse { - - public Object body; - public MultiValueMap headers; - public int statusCode; - public String reasonPhrase; - - @Override - public String toString() { - return "HttpResponse [body=" - + body - + ", headers=" - + headers - + ", statusCode=" - + statusCode - + ", reasonPhrase=" - + reasonPhrase - + "]"; - } - - public Map asMap() { - Map map = new HashMap<>(); - map.put("body", body); - map.put("headers", headers); - map.put("statusCode", statusCode); - map.put("reasonPhrase", reasonPhrase); - return map; - } - } - - public static class Input { - - private HttpMethod method; // PUT, POST, GET, DELETE, OPTIONS, HEAD - private String vipAddress; - private String appName; - private Map headers = new HashMap<>(); - private String uri; - private Object body; - private String accept = MediaType.APPLICATION_JSON_VALUE; - private String contentType = MediaType.APPLICATION_JSON_VALUE; - private Integer connectionTimeOut; - private Integer readTimeOut; - - /** - * @return the method - */ - public HttpMethod getMethod() { - return method; - } - - /** - * @param method the method to set - */ - public void setMethod(String method) { - this.method = HttpMethod.valueOf(method); - } - - /** - * @return the headers - */ - public Map getHeaders() { - return headers; - } - - /** - * @param headers the headers to set - */ - public void setHeaders(Map headers) { - this.headers = headers; - } - - /** - * @return the body - */ - public Object getBody() { - return body; - } - - /** - * @param body the body to set - */ - public void setBody(Object body) { - this.body = body; - } - - /** - * @return the uri - */ - public String getUri() { - return uri; - } - - /** - * @param uri the uri to set - */ - public void setUri(String uri) { - this.uri = uri; - } - - /** - * @return the vipAddress - */ - public String getVipAddress() { - return vipAddress; - } - - /** - * @param vipAddress the vipAddress to set - */ - public void setVipAddress(String vipAddress) { - this.vipAddress = vipAddress; - } - - /** - * @return the accept - */ - public String getAccept() { - return accept; - } - - /** - * @param accept the accept to set - */ - public void setAccept(String accept) { - this.accept = accept; - } - - /** - * @return the MIME content type to use for the request - */ - public String getContentType() { - return contentType; - } - - /** - * @param contentType the MIME content type to set - */ - public void setContentType(String contentType) { - this.contentType = contentType; - } - - public String getAppName() { - return appName; - } - - public void setAppName(String appName) { - this.appName = appName; - } - - /** - * @return the connectionTimeOut - */ - public Integer getConnectionTimeOut() { - return connectionTimeOut; - } - - /** - * @return the readTimeOut - */ - public Integer getReadTimeOut() { - return readTimeOut; - } - - public void setConnectionTimeOut(Integer connectionTimeOut) { - this.connectionTimeOut = connectionTimeOut; - } - - public void setReadTimeOut(Integer readTimeOut) { - this.readTimeOut = readTimeOut; - } - } -} diff --git a/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProvider.java b/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProvider.java deleted file mode 100644 index d460e36f6..000000000 --- a/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProvider.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.http.providers; - -import java.time.Duration; -import java.util.Optional; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.web.client.RestTemplateBuilder; -import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; -import org.springframework.lang.NonNull; -import org.springframework.stereotype.Component; -import org.springframework.web.client.RestTemplate; - -import com.netflix.conductor.tasks.http.HttpTask; - -/** - * Provider for a customized RestTemplateBuilder. This class provides a default {@link - * RestTemplateBuilder} which can be configured or extended as needed. - */ -@Component -public class DefaultRestTemplateProvider implements RestTemplateProvider { - - private final ThreadLocal threadLocalRestTemplate; - - private final int defaultReadTimeout; - private final int defaultConnectTimeout; - - @Autowired - public DefaultRestTemplateProvider( - @Value("${conductor.tasks.http.readTimeout:150ms}") Duration readTimeout, - @Value("${conductor.tasks.http.connectTimeout:100ms}") Duration connectTimeout) { - this.threadLocalRestTemplate = ThreadLocal.withInitial(RestTemplate::new); - this.defaultReadTimeout = (int) readTimeout.toMillis(); - this.defaultConnectTimeout = (int) connectTimeout.toMillis(); - } - - @Override - public @NonNull RestTemplate getRestTemplate(@NonNull HttpTask.Input input) { - RestTemplate restTemplate = threadLocalRestTemplate.get(); - HttpComponentsClientHttpRequestFactory requestFactory = - new HttpComponentsClientHttpRequestFactory(); - requestFactory.setConnectTimeout( - Optional.ofNullable(input.getConnectionTimeOut()).orElse(defaultConnectTimeout)); - requestFactory.setReadTimeout( - Optional.ofNullable(input.getReadTimeOut()).orElse(defaultReadTimeout)); - restTemplate.setRequestFactory(requestFactory); - return restTemplate; - } -} diff --git a/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/RestTemplateProvider.java b/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/RestTemplateProvider.java deleted file mode 100644 index 968904d2e..000000000 --- a/http-task/src/main/java/com/netflix/conductor/tasks/http/providers/RestTemplateProvider.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.http.providers; - -import org.springframework.lang.NonNull; -import org.springframework.web.client.RestTemplate; - -import com.netflix.conductor.tasks.http.HttpTask; - -@FunctionalInterface -public interface RestTemplateProvider { - - RestTemplate getRestTemplate(@NonNull HttpTask.Input input); -} diff --git a/http-task/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/http-task/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index 17c071f9a..000000000 --- a/http-task/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.tasks.http.readTimeout", - "type": "java.lang.Integer", - "description": "The read timeout of the underlying HttpClient used by the HTTP task." - }, - { - "name": "conductor.tasks.http.connectTimeout", - "type": "java.lang.Integer", - "description": "The connection timeout of the underlying HttpClient used by the HTTP task." - } - ] -} diff --git a/http-task/src/test/java/com/netflix/conductor/tasks/http/HttpTaskTest.java b/http-task/src/test/java/com/netflix/conductor/tasks/http/HttpTaskTest.java deleted file mode 100644 index f82662556..000000000 --- a/http-task/src/test/java/com/netflix/conductor/tasks/http/HttpTaskTest.java +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.http; - -import java.time.Duration; -import java.time.Instant; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.mockserver.client.MockServerClient; -import org.mockserver.model.HttpRequest; -import org.mockserver.model.HttpResponse; -import org.mockserver.model.MediaType; -import org.testcontainers.containers.MockServerContainer; -import org.testcontainers.utility.DockerImageName; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; -import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.core.utils.ParametersUtils; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.tasks.http.providers.DefaultRestTemplateProvider; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.*; -import static org.mockito.Mockito.mock; - -@SuppressWarnings("unchecked") -public class HttpTaskTest { - - private static final String ERROR_RESPONSE = "Something went wrong!"; - private static final String TEXT_RESPONSE = "Text Response"; - private static final double NUM_RESPONSE = 42.42d; - - private HttpTask httpTask; - private WorkflowExecutor workflowExecutor; - private final WorkflowModel workflow = new WorkflowModel(); - - private static final ObjectMapper objectMapper = new ObjectMapper(); - private static String JSON_RESPONSE; - - @ClassRule - public static MockServerContainer mockServer = - new MockServerContainer( - DockerImageName.parse("mockserver/mockserver").withTag("mockserver-5.12.0")); - - @BeforeClass - public static void init() throws Exception { - Map map = new HashMap<>(); - map.put("key", "value1"); - map.put("num", 42); - map.put("SomeKey", null); - JSON_RESPONSE = objectMapper.writeValueAsString(map); - - final TypeReference> mapOfObj = new TypeReference<>() {}; - MockServerClient client = - new MockServerClient(mockServer.getHost(), mockServer.getServerPort()); - client.when(HttpRequest.request().withPath("/post").withMethod("POST")) - .respond( - request -> { - Map reqBody = - objectMapper.readValue(request.getBody().toString(), mapOfObj); - Set keys = reqBody.keySet(); - Map respBody = new HashMap<>(); - keys.forEach(k -> respBody.put(k, k)); - return HttpResponse.response() - .withContentType(MediaType.APPLICATION_JSON) - .withBody(objectMapper.writeValueAsString(respBody)); - }); - client.when(HttpRequest.request().withPath("/post2").withMethod("POST")) - .respond(HttpResponse.response().withStatusCode(204)); - client.when(HttpRequest.request().withPath("/failure").withMethod("GET")) - .respond( - HttpResponse.response() - .withStatusCode(500) - .withContentType(MediaType.TEXT_PLAIN) - .withBody(ERROR_RESPONSE)); - client.when(HttpRequest.request().withPath("/text").withMethod("GET")) - .respond(HttpResponse.response().withBody(TEXT_RESPONSE)); - client.when(HttpRequest.request().withPath("/numeric").withMethod("GET")) - .respond(HttpResponse.response().withBody(String.valueOf(NUM_RESPONSE))); - client.when(HttpRequest.request().withPath("/json").withMethod("GET")) - .respond( - HttpResponse.response() - .withContentType(MediaType.APPLICATION_JSON) - .withBody(JSON_RESPONSE)); - } - - @Before - public void setup() { - workflowExecutor = mock(WorkflowExecutor.class); - DefaultRestTemplateProvider defaultRestTemplateProvider = - new DefaultRestTemplateProvider(Duration.ofMillis(150), Duration.ofMillis(100)); - httpTask = new HttpTask(defaultRestTemplateProvider, objectMapper); - } - - @Test - public void testPost() { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/post"); - Map body = new HashMap<>(); - body.put("input_key1", "value1"); - body.put("input_key2", 45.3d); - body.put("someKey", null); - input.setBody(body); - input.setMethod("POST"); - input.setReadTimeOut(1000); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals(task.getReasonForIncompletion(), TaskModel.Status.COMPLETED, task.getStatus()); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertTrue("response is: " + response, response instanceof Map); - Map map = (Map) response; - Set inputKeys = body.keySet(); - Set responseKeys = map.keySet(); - inputKeys.containsAll(responseKeys); - responseKeys.containsAll(inputKeys); - } - - @Test - public void testPostNoContent() { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri( - "http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/post2"); - Map body = new HashMap<>(); - body.put("input_key1", "value1"); - body.put("input_key2", 45.3d); - input.setBody(body); - input.setMethod("POST"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals(task.getReasonForIncompletion(), TaskModel.Status.COMPLETED, task.getStatus()); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertNull("response is: " + response, response); - } - - @Test - public void testFailure() { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri( - "http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/failure"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals( - "Task output: " + task.getOutputData(), TaskModel.Status.FAILED, task.getStatus()); - assertTrue(task.getReasonForIncompletion().contains(ERROR_RESPONSE)); - - task.setStatus(TaskModel.Status.SCHEDULED); - task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); - httpTask.start(workflow, task, workflowExecutor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); - } - - @Test - public void testPostAsyncComplete() { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/post"); - Map body = new HashMap<>(); - body.put("input_key1", "value1"); - body.put("input_key2", 45.3d); - input.setBody(body); - input.setMethod("POST"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - task.getInputData().put("asyncComplete", true); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals( - task.getReasonForIncompletion(), TaskModel.Status.IN_PROGRESS, task.getStatus()); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus()); - assertTrue("response is: " + response, response instanceof Map); - Map map = (Map) response; - Set inputKeys = body.keySet(); - Set responseKeys = map.keySet(); - inputKeys.containsAll(responseKeys); - responseKeys.containsAll(inputKeys); - } - - @Test - public void testTextGET() { - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/text"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(TEXT_RESPONSE, response); - } - - @Test - public void testNumberGET() { - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri( - "http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/numeric"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertEquals(NUM_RESPONSE, response); - assertTrue(response instanceof Number); - } - - @Test - public void testJsonGET() throws JsonProcessingException { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/json"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(TaskModel.Status.COMPLETED, task.getStatus()); - assertTrue(response instanceof Map); - Map map = (Map) response; - assertEquals(JSON_RESPONSE, objectMapper.writeValueAsString(map)); - } - - @Test - public void testExecute() { - - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/json"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setScheduledTime(0); - - boolean executed = httpTask.execute(workflow, task, workflowExecutor); - assertFalse(executed); - } - - @Test - public void testHTTPGetConnectionTimeOut() { - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - Instant start = Instant.now(); - input.setConnectionTimeOut(110); - input.setMethod("GET"); - input.setUri("http://10.255.14.15"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setScheduledTime(0); - httpTask.start(workflow, task, workflowExecutor); - Instant end = Instant.now(); - long diff = end.toEpochMilli() - start.toEpochMilli(); - assertEquals(task.getStatus(), TaskModel.Status.FAILED); - assertTrue(diff >= 110L); - } - - @Test - public void testHTTPGETReadTimeOut() { - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setReadTimeOut(-1); - input.setMethod("GET"); - input.setUri("http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/json"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - task.setStatus(TaskModel.Status.SCHEDULED); - task.setScheduledTime(0); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals(task.getStatus(), TaskModel.Status.FAILED); - } - - @Test - public void testOptional() { - TaskModel task = new TaskModel(); - HttpTask.Input input = new HttpTask.Input(); - input.setUri( - "http://" + mockServer.getHost() + ":" + mockServer.getServerPort() + "/failure"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, workflowExecutor); - assertEquals( - "Task output: " + task.getOutputData(), TaskModel.Status.FAILED, task.getStatus()); - assertTrue(task.getReasonForIncompletion().contains(ERROR_RESPONSE)); - assertFalse(task.getStatus().isSuccessful()); - - task.setStatus(TaskModel.Status.SCHEDULED); - task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); - task.setReferenceTaskName("t1"); - httpTask.start(workflow, task, workflowExecutor); - assertEquals(TaskModel.Status.FAILED, task.getStatus()); - assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); - assertFalse(task.getStatus().isSuccessful()); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setOptional(true); - workflowTask.setName("HTTP"); - workflowTask.setWorkflowTaskType(TaskType.USER_DEFINED); - workflowTask.setTaskReferenceName("t1"); - - WorkflowDef def = new WorkflowDef(); - def.getTasks().add(workflowTask); - - WorkflowModel workflow = new WorkflowModel(); - workflow.setWorkflowDefinition(def); - workflow.getTasks().add(task); - - MetadataDAO metadataDAO = mock(MetadataDAO.class); - ExternalPayloadStorageUtils externalPayloadStorageUtils = - mock(ExternalPayloadStorageUtils.class); - ParametersUtils parametersUtils = mock(ParametersUtils.class); - SystemTaskRegistry systemTaskRegistry = mock(SystemTaskRegistry.class); - - new DeciderService( - new IDGenerator(), - parametersUtils, - metadataDAO, - externalPayloadStorageUtils, - systemTaskRegistry, - Collections.emptyMap(), - Duration.ofMinutes(60)) - .decide(workflow); - } -} diff --git a/http-task/src/test/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProviderTest.java b/http-task/src/test/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProviderTest.java deleted file mode 100644 index 816f5d6f9..000000000 --- a/http-task/src/test/java/com/netflix/conductor/tasks/http/providers/DefaultRestTemplateProviderTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.http.providers; - -import java.time.Duration; - -import org.junit.Test; -import org.springframework.web.client.RestTemplate; - -import com.netflix.conductor.tasks.http.HttpTask; - -import static org.junit.Assert.*; - -public class DefaultRestTemplateProviderTest { - - @Test - public void differentObjectsForDifferentThreads() throws InterruptedException { - DefaultRestTemplateProvider defaultRestTemplateProvider = - new DefaultRestTemplateProvider(Duration.ofMillis(150), Duration.ofMillis(100)); - final RestTemplate restTemplate = - defaultRestTemplateProvider.getRestTemplate(new HttpTask.Input()); - final StringBuilder result = new StringBuilder(); - Thread t1 = - new Thread( - () -> { - RestTemplate restTemplate1 = - defaultRestTemplateProvider.getRestTemplate( - new HttpTask.Input()); - if (restTemplate1 != restTemplate) { - result.append("different"); - } - }); - t1.start(); - t1.join(); - assertEquals(result.toString(), "different"); - } - - @Test - public void sameObjectForSameThread() { - DefaultRestTemplateProvider defaultRestTemplateProvider = - new DefaultRestTemplateProvider(Duration.ofMillis(150), Duration.ofMillis(100)); - RestTemplate client1 = defaultRestTemplateProvider.getRestTemplate(new HttpTask.Input()); - RestTemplate client2 = defaultRestTemplateProvider.getRestTemplate(new HttpTask.Input()); - assertSame(client1, client2); - assertNotNull(client1); - } -} diff --git a/inappropriate-service-intimacy.html b/inappropriate-service-intimacy.html new file mode 100644 index 000000000..2faeb96b7 --- /dev/null +++ b/inappropriate-service-intimacy.html @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + +

    + + +
    +
    +
    + View More on GitHub +

    Inappropriate Service Intimacy (ISI)

    +
    +

    One module requesting private data from a separate module also breaks the microservice definition. Each microservice should have control over its private data.

    +
    +
    + +
    +
    + + +
    +
    +
    + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/index.html b/index.html new file mode 100644 index 000000000..f0ddc7d90 --- /dev/null +++ b/index.html @@ -0,0 +1,190 @@ + + + + + + + + + + + + + + + + + + + + + 𝜇Sensor + + + + + + +
    +
    +
    +

    Summary

    +
    +
    + +
    +
    +
    +
    +
    +
    +

    + API GATEWAY +

    +
    +
    + + +
    +
    +
    +
    + +
    +
    +
    +

    + Shared Persistency +

    +
    +
    + 26 + +
    +
    +
    +
    +
    +
    +
    +

    + Shared Library +

    +
    +
    + 33 + +
    +
    +
    +
    +
    +
    +
    +

    + ESB +

    +
    +
    + 1 + +
    +
    +
    +
    +
    +
    +
    +

    + Unversioned API +

    +
    +
    + 6193 + +
    +
    +
    +
    +
    +
    +
    +

    + Hardcoded Enpoints +

    +
    +
    + 0 + +
    +
    +
    +
    +
    +
    +
    +

    + Cyclic Dependency +

    +
    +
    + 0 + +
    +
    +
    +
    +
    +
    +
    +

    + ISI +

    +
    +
    + 0 + +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/java-sdk/README.md b/java-sdk/README.md deleted file mode 100644 index 111470ec5..000000000 --- a/java-sdk/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# SDK for Conductor -Conductor SDK allows developers to create, test and execute workflows using code. - -There are three main features of the SDK: - -1. [Create and run workflows using code](workflow_sdk.md) -2. [Create and run strongly typed workers](worker_sdk.md) -3. [Unit Testing framework for workflows and workers](testing_framework.md) - - - diff --git a/java-sdk/build.gradle b/java-sdk/build.gradle deleted file mode 100644 index f9055ce37..000000000 --- a/java-sdk/build.gradle +++ /dev/null @@ -1,31 +0,0 @@ -apply plugin: 'groovy' - -dependencies { - - implementation project(':conductor-common') - implementation project(':conductor-client') - - implementation "com.fasterxml.jackson.core:jackson-databind" - implementation "com.google.guava:guava:${revGuava}" - implementation "cglib:cglib:3.3.0" - implementation "com.sun.jersey:jersey-client:${revJersey}" - - testImplementation "org.springframework:spring-web" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" - - testImplementation "com.fasterxml.jackson.core:jackson-core" - testImplementation "org.apache.commons:commons-lang3" - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "javax.ws.rs:javax.ws.rs-api:${revJAXRS}" - testImplementation "org.glassfish.jersey.core:jersey-common:${revJerseyCommon}" -} - -test { - testLogging { - exceptionFormat = 'full' - } -} -sourceSets.main.java.srcDirs += ['example/java', 'example/resources'] - diff --git a/java-sdk/dependencies.lock b/java-sdk/dependencies.lock deleted file mode 100644 index 1d910ebba..000000000 --- a/java-sdk/dependencies.lock +++ /dev/null @@ -1,421 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "cglib:cglib": { - "locked": "3.3.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "runtimeClasspath": { - "cglib:cglib": { - "locked": "3.3.0" - }, - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "project": true - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.7" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.7.36" - } - }, - "testCompileClasspath": { - "cglib:cglib": { - "locked": "3.3.0" - }, - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.sun.jersey:jersey-client": { - "locked": "1.19.4" - }, - "javax.ws.rs:javax.ws.rs-api": { - "locked": "2.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.glassfish.jersey.core:jersey-common": { - "locked": "2.22.2" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework:spring-web": { - "locked": "5.3.19" - } - }, - "testRuntimeClasspath": { - "cglib:cglib": { - "locked": "3.3.0" - }, - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "project": true - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.10.10" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.122.0" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.7" - }, - "javax.ws.rs:javax.ws.rs-api": { - "locked": "2.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.glassfish.jersey.core:jersey-common": { - "locked": "2.22.2" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.7.36" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework:spring-web": { - "locked": "5.3.19" - } - } -} \ No newline at end of file diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Order.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Order.java deleted file mode 100644 index 2258c2918..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Order.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -import java.math.BigDecimal; - -public class Order { - - public enum ShippingMethod { - GROUND, - NEXT_DAY_AIR, - SAME_DAY - } - - private String orderNumber; - - private String sku; - - private int quantity; - - private BigDecimal unitPrice; - - private String zipCode; - - private String countryCode; - - private ShippingMethod shippingMethod; - - public Order(String orderNumber, String sku, int quantity, BigDecimal unitPrice) { - this.orderNumber = orderNumber; - this.sku = sku; - this.quantity = quantity; - this.unitPrice = unitPrice; - } - - public Order() {} - - public String getOrderNumber() { - return orderNumber; - } - - public void setOrderNumber(String orderNumber) { - this.orderNumber = orderNumber; - } - - public String getSku() { - return sku; - } - - public void setSku(String sku) { - this.sku = sku; - } - - public int getQuantity() { - return quantity; - } - - public void setQuantity(int quantity) { - this.quantity = quantity; - } - - public BigDecimal getUnitPrice() { - return unitPrice; - } - - public void setUnitPrice(BigDecimal unitPrice) { - this.unitPrice = unitPrice; - } - - public String getZipCode() { - return zipCode; - } - - public void setZipCode(String zipCode) { - this.zipCode = zipCode; - } - - public String getCountryCode() { - return countryCode; - } - - public void setCountryCode(String countryCode) { - this.countryCode = countryCode; - } - - public ShippingMethod getShippingMethod() { - return shippingMethod; - } - - public void setShippingMethod(ShippingMethod shippingMethod) { - this.shippingMethod = shippingMethod; - } -} diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Shipment.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Shipment.java deleted file mode 100644 index f5f32d6a4..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/Shipment.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -public class Shipment { - - private String userId; - - private String orderNo; - - public Shipment(String userId, String orderNo) { - this.userId = userId; - this.orderNo = orderNo; - } - - public Shipment() {} - - public String getUserId() { - return userId; - } - - public void setUserId(String userId) { - this.userId = userId; - } - - public String getOrderNo() { - return orderNo; - } - - public void setOrderNo(String orderNo) { - this.orderNo = orderNo; - } -} diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentState.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentState.java deleted file mode 100644 index c0abd12df..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentState.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -public class ShipmentState { - - private boolean paymentCompleted; - - private boolean emailSent; - - private boolean shipped; - - private String trackingNumber; - - public boolean isPaymentCompleted() { - return paymentCompleted; - } - - public void setPaymentCompleted(boolean paymentCompleted) { - this.paymentCompleted = paymentCompleted; - } - - public boolean isEmailSent() { - return emailSent; - } - - public void setEmailSent(boolean emailSent) { - this.emailSent = emailSent; - } - - public boolean isShipped() { - return shipped; - } - - public void setShipped(boolean shipped) { - this.shipped = shipped; - } - - public String getTrackingNumber() { - return trackingNumber; - } - - public void setTrackingNumber(String trackingNumber) { - this.trackingNumber = trackingNumber; - } -} diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkers.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkers.java deleted file mode 100644 index 3cfd41e1d..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkers.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -import java.math.BigDecimal; -import java.util.*; - -import com.netflix.conductor.sdk.workflow.def.tasks.DynamicForkInput; -import com.netflix.conductor.sdk.workflow.def.tasks.SubWorkflow; -import com.netflix.conductor.sdk.workflow.def.tasks.Task; -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.task.OutputParam; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; - -public class ShipmentWorkers { - - @WorkerTask("generateDynamicFork") - public DynamicForkInput generateDynamicFork( - @InputParam("orderDetails") List orderDetails, - @InputParam("userDetails") User userDetails) { - DynamicForkInput input = new DynamicForkInput(); - List> tasks = new ArrayList<>(); - Map inputs = new HashMap<>(); - - for (int i = 0; i < orderDetails.size(); i++) { - Order detail = orderDetails.get(i); - String referenceName = "order_flow_sub_" + i; - tasks.add( - new SubWorkflow(referenceName, "order_flow", null) - .input("orderDetail", detail) - .input("userDetails", userDetails)); - inputs.put(referenceName, new HashMap<>()); - } - input.setInputs(inputs); - input.setTasks(tasks); - return input; - } - - @WorkerTask("get_order_details") - public List getOrderDetails(@InputParam("orderNo") String orderNo) { - int lineItemCount = new Random().nextInt(10); - List orderDetails = new ArrayList<>(); - for (int i = 0; i < lineItemCount; i++) { - Order orderDetail = new Order(orderNo, "sku_" + i, 2, BigDecimal.valueOf(20.5)); - orderDetail.setOrderNumber(UUID.randomUUID().toString()); - orderDetail.setCountryCode(i % 2 == 0 ? "US" : "CA"); - if (i % 3 == 0) { - orderDetail.setCountryCode("UK"); - } - - if (orderDetail.getCountryCode().equals("US")) - orderDetail.setShippingMethod(Order.ShippingMethod.SAME_DAY); - else if (orderDetail.getCountryCode().equals("CA")) - orderDetail.setShippingMethod(Order.ShippingMethod.NEXT_DAY_AIR); - else orderDetail.setShippingMethod(Order.ShippingMethod.GROUND); - - orderDetails.add(orderDetail); - } - return orderDetails; - } - - @WorkerTask("get_user_details") - public User getUserDetails(@InputParam("userId") String userId) { - User user = - new User( - "User Name", - userId + "@example.com", - "1234 forline street", - "mountain view", - "95030", - "US", - "Paypal", - "biling_001"); - - return user; - } - - @WorkerTask("calculate_tax_and_total") - public @OutputParam("total_amount") BigDecimal calculateTax( - @InputParam("orderDetail") Order orderDetails) { - BigDecimal preTaxAmount = - orderDetails.getUnitPrice().multiply(new BigDecimal(orderDetails.getQuantity())); - BigDecimal tax = BigDecimal.valueOf(0.2).multiply(preTaxAmount); - if (!"US".equals(orderDetails.getCountryCode())) { - tax = BigDecimal.ZERO; - } - return preTaxAmount.add(tax); - } - - @WorkerTask("ground_shipping_label") - public @OutputParam("reference_number") String prepareGroundShipping( - @InputParam("name") String name, - @InputParam("address") String address, - @InputParam("orderNo") String orderNo) { - - return "Ground_" + orderNo; - } - - @WorkerTask("air_shipping_label") - public @OutputParam("reference_number") String prepareAirShipping( - @InputParam("name") String name, - @InputParam("address") String address, - @InputParam("orderNo") String orderNo) { - - return "Air_" + orderNo; - } - - @WorkerTask("same_day_shipping_label") - public @OutputParam("reference_number") String prepareSameDayShipping( - @InputParam("name") String name, - @InputParam("address") String address, - @InputParam("orderNo") String orderNo) { - - return "SameDay_" + orderNo; - } - - @WorkerTask("charge_payment") - public @OutputParam("reference") String chargePayment( - @InputParam("amount") BigDecimal amount, - @InputParam("billingId") String billingId, - @InputParam("billingType") String billingType) { - - return UUID.randomUUID().toString(); - } - - @WorkerTask("send_email") - public void sendEmail( - @InputParam("name") String name, - @InputParam("email") String email, - @InputParam("orderNo") String orderNo) {} -} diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkflow.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkflow.java deleted file mode 100644 index c22ba662a..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/ShipmentWorkflow.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; -import com.netflix.conductor.sdk.workflow.def.WorkflowBuilder; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; - -public class ShipmentWorkflow { - - private final WorkflowExecutor executor; - - public ShipmentWorkflow(WorkflowExecutor executor) { - this.executor = executor; - this.executor.initWorkers(ShipmentWorkflow.class.getPackageName()); - } - - public ConductorWorkflow createOrderFlow() { - WorkflowBuilder builder = new WorkflowBuilder<>(executor); - builder.name("order_flow") - .version(1) - .ownerEmail("user@example.com") - .timeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF, 60) // 1 day max - .description("Workflow to track shipment") - .add( - new SimpleTask("calculate_tax_and_total", "calculate_tax_and_total") - .input("orderDetail", ConductorWorkflow.input.get("orderDetail"))) - .add( - new SimpleTask("charge_payment", "charge_payment") - .input( - "billingId", - ConductorWorkflow.input - .map("userDetails") - .get("billingId"), - "billingType", - ConductorWorkflow.input - .map("userDetails") - .get("billingType"), - "amount", "${calculate_tax_and_total.output.total_amount}")) - .add( - new Switch("shipping_label", "${workflow.input.orderDetail.shippingMethod}") - .switchCase( - Order.ShippingMethod.GROUND.toString(), - new SimpleTask( - "ground_shipping_label", - "ground_shipping_label") - .input( - "name", - ConductorWorkflow.input - .map("userDetails") - .get("name"), - "address", - ConductorWorkflow.input - .map("userDetails") - .get("addressLine"), - "orderNo", - ConductorWorkflow.input - .map("orderDetail") - .get("orderNumber"))) - .switchCase( - Order.ShippingMethod.NEXT_DAY_AIR.toString(), - new SimpleTask("air_shipping_label", "air_shipping_label") - .input( - "name", - ConductorWorkflow.input - .map("userDetails") - .get("name"), - "address", - ConductorWorkflow.input - .map("userDetails") - .get("addressLine"), - "orderNo", - ConductorWorkflow.input - .map("orderDetail") - .get("orderNumber"))) - .switchCase( - Order.ShippingMethod.SAME_DAY.toString(), - new SimpleTask( - "same_day_shipping_label", - "same_day_shipping_label") - .input( - "name", - ConductorWorkflow.input - .map("userDetails") - .get("name"), - "address", - ConductorWorkflow.input - .map("userDetails") - .get("addressLine"), - "orderNo", - ConductorWorkflow.input - .map("orderDetail") - .get("orderNumber"))) - .defaultCase( - new Terminate( - "unsupported_shipping_type", - Workflow.WorkflowStatus.FAILED, - "Unsupported Shipping Method"))) - .add( - new SimpleTask("send_email", "send_email") - .input( - "name", - ConductorWorkflow.input - .map("userDetails") - .get("name"), - "email", - ConductorWorkflow.input - .map("userDetails") - .get("email"), - "orderNo", - ConductorWorkflow.input - .map("orderDetail") - .get("orderNumber"))); - ConductorWorkflow conductorWorkflow = builder.build(); - conductorWorkflow.registerWorkflow(true, true); - return conductorWorkflow; - } - - public ConductorWorkflow createShipmentWorkflow() { - - WorkflowBuilder builder = new WorkflowBuilder<>(executor); - - SimpleTask getOrderDetails = - new SimpleTask("get_order_details", "get_order_details") - .input("orderNo", ConductorWorkflow.input.get("orderNo")); - - SimpleTask getUserDetails = - new SimpleTask("get_user_details", "get_user_details") - .input("userId", ConductorWorkflow.input.get("userId")); - - ConductorWorkflow conductorWorkflow = - builder.name("shipment_workflow") - .version(1) - .ownerEmail("user@example.com") - .variables(new ShipmentState()) - .timeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF, 60) // 30 days - .description("Workflow to track shipment") - .add( - new ForkJoin( - "get_in_parallel", - new Task[] {getOrderDetails}, - new Task[] {getUserDetails})) - - // For all the line items in the order, run in parallel: - // (calculate tax, charge payment, set state, prepare shipment, send - // shipment, set state) - .add( - new DynamicFork( - "process_order", - new SimpleTask("generateDynamicFork", "generateDynamicFork") - .input( - "orderDetails", - getOrderDetails.taskOutput.get("result")) - .input("userDetails", getUserDetails.taskOutput))) - - // Update the workflow state with shipped = true - .add(new SetVariable("update_state").input("shipped", true)) - .build(); - - conductorWorkflow.registerWorkflow(true, true); - - return conductorWorkflow; - } - - public static void main(String[] args) { - - String conductorServerURL = - "http://localhost:8080/api/"; // Change this to your Conductor server - WorkflowExecutor executor = new WorkflowExecutor(conductorServerURL); - - // Create the new shipment workflow - ShipmentWorkflow shipmentWorkflow = new ShipmentWorkflow(executor); - - // Create two workflows - - // 1. Order flow that ships an individual order - // 2. Shipment Workflow that tracks multiple orders in a shipment - shipmentWorkflow.createOrderFlow(); - ConductorWorkflow workflow = shipmentWorkflow.createShipmentWorkflow(); - - // Execute the workflow and wait for it to complete - try { - Shipment workflowInput = new Shipment("userA", "order123"); - - // Execute returns a completable future. - CompletableFuture executionFuture = workflow.execute(workflowInput); - - // Wait for a maximum of a minute for the workflow to complete. - Workflow run = executionFuture.get(1, TimeUnit.MINUTES); - - System.out.println("Workflow Id: " + run); - System.out.println("Workflow Status: " + run.getStatus()); - System.out.println("Workflow Output: " + run.getOutput()); - - } catch (Exception e) { - e.printStackTrace(); - } finally { - System.exit(0); - } - - System.out.println("Done"); - } -} diff --git a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/User.java b/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/User.java deleted file mode 100644 index e5a5233d2..000000000 --- a/java-sdk/example/java/com/netflix/conductor/sdk/example/shipment/User.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.example.shipment; - -public class User { - - private String name; - - private String email; - - private String addressLine; - - private String city; - - private String zipCode; - - private String countryCode; - - private String billingType; - - private String billingId; - - public User( - String name, - String email, - String addressLine, - String city, - String zipCode, - String countryCode, - String billingType, - String billingId) { - this.name = name; - this.email = email; - this.addressLine = addressLine; - this.city = city; - this.zipCode = zipCode; - this.countryCode = countryCode; - this.billingType = billingType; - this.billingId = billingId; - } - - public User() {} - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - public String getAddressLine() { - return addressLine; - } - - public void setAddressLine(String addressLine) { - this.addressLine = addressLine; - } - - public String getCity() { - return city; - } - - public void setCity(String city) { - this.city = city; - } - - public String getZipCode() { - return zipCode; - } - - public void setZipCode(String zipCode) { - this.zipCode = zipCode; - } - - public String getCountryCode() { - return countryCode; - } - - public void setCountryCode(String countryCode) { - this.countryCode = countryCode; - } - - public String getBillingType() { - return billingType; - } - - public void setBillingType(String billingType) { - this.billingType = billingType; - } - - public String getBillingId() { - return billingId; - } - - public void setBillingId(String billingId) { - this.billingId = billingId; - } -} diff --git a/java-sdk/example/resources/script.js b/java-sdk/example/resources/script.js deleted file mode 100644 index af6d42c43..000000000 --- a/java-sdk/example/resources/script.js +++ /dev/null @@ -1,11 +0,0 @@ -function e() { - if ($.value > 1){ - return { - "key": "value", - "key2": 42 - }; - } else { - return {}; - } -} -e(); \ No newline at end of file diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/healthcheck/HealthCheckClient.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/healthcheck/HealthCheckClient.java deleted file mode 100644 index e8a83b30a..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/healthcheck/HealthCheckClient.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.healthcheck; - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.net.URL; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; - -public class HealthCheckClient { - - private final String healthCheckURL; - - private final ObjectMapper objectMapper; - - public HealthCheckClient(String healthCheckURL) { - this.healthCheckURL = healthCheckURL; - this.objectMapper = - new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - } - - public boolean isServerRunning() { - try { - - BufferedReader in = - new BufferedReader(new InputStreamReader(new URL(healthCheckURL).openStream())); - StringBuilder response = new StringBuilder(); - String inputLine; - while ((inputLine = in.readLine()) != null) { - response.append(inputLine); - } - in.close(); - HealthCheckResults healthCheckResults = - objectMapper.readValue(response.toString(), HealthCheckResults.class); - return healthCheckResults.healthy; - } catch (Throwable t) { - return false; - } - } - - private static final class HealthCheckResults { - - private boolean healthy; - - public boolean isHealthy() { - return healthy; - } - - public void setHealthy(boolean healthy) { - this.healthy = healthy; - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/LocalServerRunner.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/LocalServerRunner.java deleted file mode 100644 index f406fa051..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/LocalServerRunner.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.testing; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.sdk.healthcheck.HealthCheckClient; - -import com.google.common.util.concurrent.Uninterruptibles; - -public class LocalServerRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(LocalServerRunner.class); - - private final HealthCheckClient healthCheck; - - private Process serverProcess; - - private final ScheduledExecutorService healthCheckExecutor = - Executors.newSingleThreadScheduledExecutor(); - - private final CountDownLatch serverProcessLatch = new CountDownLatch(1); - - private final int port; - - private final String conductorVersion; - - private final String serverURL; - - private static Map serverInstances = new HashMap<>(); - - public LocalServerRunner(int port, String conductorVersion) { - this.port = port; - this.conductorVersion = conductorVersion; - this.serverURL = "http://localhost:" + port + "/"; - healthCheck = new HealthCheckClient(serverURL + "health"); - } - - public String getServerAPIUrl() { - return this.serverURL + "api/"; - } - /** - * Starts the local server. Downloads the latest conductor build from the maven repo If you want - * to start the server from a specific download location, set `repositoryURL` system property - * with the link to the actual downloadable server boot jar file. - * - *

    System Properties that can be set conductorVersion: when specified, uses this - * version of conductor to run tests (and downloads from maven repo) repositoryURL: full url - * where the server boot jar can be downloaded from. This can be a public repo or internal - * repository, allowing full control over the location and version of the conductor server - */ - public void startLocalServer() { - synchronized (serverInstances) { - if (serverInstances.get(port) != null) { - throw new IllegalStateException( - "Another server has already been started at port " + port); - } - serverInstances.put(port, this); - } - - try { - String downloadURL = - "https://repo1.maven.org/maven2/com/netflix/conductor/conductor-server/" - + conductorVersion - + "/conductor-server-" - + conductorVersion - + "-boot.jar"; - - String repositoryURL = - Optional.ofNullable(System.getProperty("repositoryURL")).orElse(downloadURL); - - LOGGER.info( - "Running conductor with version {} from repo url {}", - conductorVersion, - repositoryURL); - - Runtime.getRuntime().addShutdownHook(new Thread(this::shutdown)); - installAndStartServer(repositoryURL, port); - healthCheckExecutor.scheduleAtFixedRate( - () -> { - try { - if (serverProcessLatch.getCount() > 0) { - boolean isRunning = healthCheck.isServerRunning(); - if (isRunning) { - serverProcessLatch.countDown(); - } - } - } catch (Exception e) { - LOGGER.warn( - "Caught an exception while polling for server running status {}", - e.getMessage()); - } - }, - 100, - 100, - TimeUnit.MILLISECONDS); - Uninterruptibles.awaitUninterruptibly(serverProcessLatch, 1, TimeUnit.MINUTES); - - if (serverProcessLatch.getCount() > 0) { - throw new RuntimeException("Server not healthy"); - } - healthCheckExecutor.shutdownNow(); - - } catch (IOException e) { - throw new Error(e); - } - } - - public void shutdown() { - if (serverProcess != null) { - serverProcess.destroyForcibly(); - serverInstances.remove(port); - } - } - - private synchronized void installAndStartServer(String repositoryURL, int localServerPort) - throws IOException { - if (serverProcess != null) { - return; - } - - String configFile = - LocalServerRunner.class.getResource("/test-server.properties").getFile(); - String tempDir = System.getProperty("java.io.tmpdir"); - Path serverFile = Paths.get(tempDir, "conductor-server.jar"); - if (!Files.exists(serverFile)) { - Files.copy(new URL(repositoryURL).openStream(), serverFile); - } - - String command = - "java -Dserver.port=" - + localServerPort - + " -DCONDUCTOR_CONFIG_FILE=" - + configFile - + " -jar " - + serverFile; - LOGGER.info("Running command {}", command); - - serverProcess = Runtime.getRuntime().exec(command); - BufferedReader error = - new BufferedReader(new InputStreamReader(serverProcess.getErrorStream())); - BufferedReader op = - new BufferedReader(new InputStreamReader(serverProcess.getInputStream())); - - // This captures the stream and copies to a visible log for tracking errors asynchronously - // using a separate thread - Executors.newSingleThreadScheduledExecutor() - .execute( - () -> { - String line = null; - while (true) { - try { - if ((line = error.readLine()) == null) break; - } catch (IOException e) { - LOGGER.error("Exception reading input stream:", e); - } - // copy to standard error - LOGGER.error("Server error stream - {}", line); - } - }); - - // This captures the stream and copies to a visible log for tracking errors asynchronously - // using a separate thread - Executors.newSingleThreadScheduledExecutor() - .execute( - () -> { - String line = null; - while (true) { - try { - if ((line = op.readLine()) == null) break; - } catch (IOException e) { - LOGGER.error("Exception reading input stream:", e); - } - // copy to standard out - LOGGER.trace("Server input stream - {}", line); - } - }); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/WorkflowTestRunner.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/WorkflowTestRunner.java deleted file mode 100644 index 758445c03..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/testing/WorkflowTestRunner.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.testing; - -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.executor.task.AnnotatedWorkerExecutor; - -public class WorkflowTestRunner { - - private LocalServerRunner localServerRunner; - - private final AnnotatedWorkerExecutor annotatedWorkerExecutor; - - private final WorkflowExecutor workflowExecutor; - - public WorkflowTestRunner(String serverApiUrl) { - - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI(serverApiUrl); - this.annotatedWorkerExecutor = new AnnotatedWorkerExecutor(taskClient); - - this.workflowExecutor = new WorkflowExecutor(serverApiUrl); - } - - public WorkflowTestRunner(int port, String conductorVersion) { - - localServerRunner = new LocalServerRunner(port, conductorVersion); - - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI(localServerRunner.getServerAPIUrl()); - this.annotatedWorkerExecutor = new AnnotatedWorkerExecutor(taskClient); - - this.workflowExecutor = new WorkflowExecutor(localServerRunner.getServerAPIUrl()); - } - - public WorkflowExecutor getWorkflowExecutor() { - return workflowExecutor; - } - - public void init(String basePackages) { - if (localServerRunner != null) { - localServerRunner.startLocalServer(); - } - annotatedWorkerExecutor.initWorkers(basePackages); - } - - public void shutdown() { - localServerRunner.shutdown(); - annotatedWorkerExecutor.shutdown(); - workflowExecutor.shutdown(); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ConductorWorkflow.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ConductorWorkflow.java deleted file mode 100644 index 009aa517a..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ConductorWorkflow.java +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -import java.util.*; -import java.util.concurrent.CompletableFuture; - -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.tasks.Task; -import com.netflix.conductor.sdk.workflow.def.tasks.TaskRegistry; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.utils.InputOutputGetter; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * @param Type of the workflow input - */ -public class ConductorWorkflow { - - public static final InputOutputGetter input = - new InputOutputGetter("workflow", InputOutputGetter.Field.input); - - public static final InputOutputGetter output = - new InputOutputGetter("workflow", InputOutputGetter.Field.output); - - private String name; - - private String description; - - private int version; - - private String failureWorkflow; - - private String ownerEmail; - - private WorkflowDef.TimeoutPolicy timeoutPolicy; - - private Map workflowOutput; - - private long timeoutSeconds; - - private boolean restartable = true; - - private T defaultInput; - - private Map variables; - - private List tasks = new ArrayList<>(); - - private final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private final WorkflowExecutor workflowExecutor; - - public ConductorWorkflow(WorkflowExecutor workflowExecutor) { - this.workflowOutput = new HashMap<>(); - this.workflowExecutor = workflowExecutor; - this.restartable = true; - } - - public void setName(String name) { - this.name = name; - } - - public void setVersion(int version) { - this.version = version; - } - - public void setDescription(String description) { - this.description = description; - } - - public void setFailureWorkflow(String failureWorkflow) { - this.failureWorkflow = failureWorkflow; - } - - public void add(Task task) { - this.tasks.add(task); - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - - public int getVersion() { - return version; - } - - public String getFailureWorkflow() { - return failureWorkflow; - } - - public String getOwnerEmail() { - return ownerEmail; - } - - public void setOwnerEmail(String ownerEmail) { - this.ownerEmail = ownerEmail; - } - - public WorkflowDef.TimeoutPolicy getTimeoutPolicy() { - return timeoutPolicy; - } - - public void setTimeoutPolicy(WorkflowDef.TimeoutPolicy timeoutPolicy) { - this.timeoutPolicy = timeoutPolicy; - } - - public long getTimeoutSeconds() { - return timeoutSeconds; - } - - public void setTimeoutSeconds(long timeoutSeconds) { - this.timeoutSeconds = timeoutSeconds; - } - - public boolean isRestartable() { - return restartable; - } - - public void setRestartable(boolean restartable) { - this.restartable = restartable; - } - - public T getDefaultInput() { - return defaultInput; - } - - public void setDefaultInput(T defaultInput) { - this.defaultInput = defaultInput; - } - - public Map getWorkflowOutput() { - return workflowOutput; - } - - public void setWorkflowOutput(Map workflowOutput) { - this.workflowOutput = workflowOutput; - } - - public Object getVariables() { - return variables; - } - - public void setVariables(Map variables) { - this.variables = variables; - } - - /** - * Execute a dynamic workflow without creating a definition in metadata store. - * - *


    - * Note: Use this with caution - as this does not promote re-usability of the workflows - * - * @param input Workflow Input - The input object is converted a JSON doc as an input to the - * workflow - * @return - */ - public CompletableFuture executeDynamic(T input) { - return workflowExecutor.executeWorkflow(this, input); - } - - /** - * Executes the workflow using registered metadata definitions - * - * @see #registerWorkflow() - * @param input - * @return - */ - public CompletableFuture execute(T input) { - return workflowExecutor.executeWorkflow(this.getName(), this.getVersion(), input); - } - - /** - * Registers a new workflow in the server. - * - * @return true if the workflow is successfully registered. False if the workflow cannot be - * registered and the workflow definition already exists on the server with given name + - * version The call will throw a runtime exception if any of the tasks are missing - * definitions on the server. - */ - public boolean registerWorkflow() { - return registerWorkflow(false, false); - } - - /** - * @param overwrite set to true if the workflow should be overwritten if the definition already - * exists with the given name and version. Use with caution - * @return true if success, false otherwise. - */ - public boolean registerWorkflow(boolean overwrite) { - return registerWorkflow(overwrite, false); - } - - /** - * @param overwrite set to true if the workflow should be overwritten if the definition already - * exists with the given name and version. Use with caution - * @param registerTasks if set to true, missing task definitions are registered with the default - * configuration. - * @return true if success, false otherwise. - */ - public boolean registerWorkflow(boolean overwrite, boolean registerTasks) { - WorkflowDef workflowDef = toWorkflowDef(); - List missing = getMissingTasks(workflowDef); - if (!missing.isEmpty()) { - if (!registerTasks) { - throw new RuntimeException( - "Workflow cannot be registered. The following tasks do not have definitions. " - + "Please register these tasks before creating the workflow. Missing Tasks = " - + missing); - } else { - String ownerEmail = this.ownerEmail; - missing.stream().forEach(taskName -> registerTaskDef(taskName, ownerEmail)); - } - } - return workflowExecutor.registerWorkflow(workflowDef, overwrite); - } - - /** - * @return Convert to the WorkflowDef model used by the Metadata APIs - */ - public WorkflowDef toWorkflowDef() { - - WorkflowDef def = new WorkflowDef(); - def.setName(name); - def.setDescription(description); - def.setVersion(version); - def.setFailureWorkflow(failureWorkflow); - def.setOwnerEmail(ownerEmail); - def.setTimeoutPolicy(timeoutPolicy); - def.setTimeoutSeconds(timeoutSeconds); - def.setRestartable(restartable); - def.setOutputParameters(workflowOutput); - def.setVariables(variables); - def.setInputTemplate(objectMapper.convertValue(defaultInput, Map.class)); - - for (Task task : tasks) { - def.getTasks().addAll(task.getWorkflowDefTasks()); - } - return def; - } - - /** - * Generate ConductorWorkflow based on the workflow metadata definition - * - * @param def - * @return - */ - public static ConductorWorkflow fromWorkflowDef(WorkflowDef def) { - ConductorWorkflow workflow = new ConductorWorkflow<>(null); - fromWorkflowDef(workflow, def); - return workflow; - } - - public ConductorWorkflow from(String workflowName, Integer workflowVersion) { - WorkflowDef def = - workflowExecutor.getMetadataClient().getWorkflowDef(workflowName, workflowVersion); - fromWorkflowDef(this, def); - return this; - } - - private static void fromWorkflowDef(ConductorWorkflow workflow, WorkflowDef def) { - workflow.setName(def.getName()); - workflow.setVersion(def.getVersion()); - workflow.setFailureWorkflow(def.getFailureWorkflow()); - workflow.setRestartable(def.isRestartable()); - workflow.setVariables(def.getVariables()); - workflow.setDefaultInput((T) def.getInputTemplate()); - - workflow.setWorkflowOutput(def.getOutputParameters()); - workflow.setOwnerEmail(def.getOwnerEmail()); - workflow.setDescription(def.getDescription()); - workflow.setTimeoutSeconds(def.getTimeoutSeconds()); - workflow.setTimeoutPolicy(def.getTimeoutPolicy()); - - List workflowTasks = def.getTasks(); - for (WorkflowTask workflowTask : workflowTasks) { - Task task = TaskRegistry.getTask(workflowTask); - workflow.tasks.add(task); - } - } - - private List getMissingTasks(WorkflowDef workflowDef) { - List missing = new ArrayList<>(); - workflowDef.collectTasks().stream() - .filter(workflowTask -> workflowTask.getType().equals(TaskType.TASK_TYPE_SIMPLE)) - .map(WorkflowTask::getName) - .distinct() - .parallel() - .forEach( - taskName -> { - try { - TaskDef taskDef = - workflowExecutor.getMetadataClient().getTaskDef(taskName); - } catch (ConductorClientException cce) { - if (cce.getStatus() == 404) { - missing.add(taskName); - } else { - throw cce; - } - } - }); - return missing; - } - - private void registerTaskDef(String taskName, String ownerEmail) { - TaskDef taskDef = new TaskDef(); - taskDef.setName(taskName); - taskDef.setOwnerEmail(ownerEmail); - workflowExecutor.getMetadataClient().registerTaskDefs(Arrays.asList(taskDef)); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConductorWorkflow workflow = (ConductorWorkflow) o; - return version == workflow.version && Objects.equals(name, workflow.name); - } - - @Override - public int hashCode() { - return Objects.hash(name, version); - } - - @Override - public String toString() { - try { - return objectMapper.writeValueAsString(toWorkflowDef()); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ValidationError.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ValidationError.java deleted file mode 100644 index 761835955..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ValidationError.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -public class ValidationError extends RuntimeException { - - public ValidationError(String message) { - super(message); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/WorkflowBuilder.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/WorkflowBuilder.java deleted file mode 100644 index caf33d8cf..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/WorkflowBuilder.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -import java.util.*; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.utils.InputOutputGetter; -import com.netflix.conductor.sdk.workflow.utils.MapBuilder; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * @param Input type for the workflow - */ -public class WorkflowBuilder { - - private String name; - - private String description; - - private int version; - - private String failureWorkflow; - - private String ownerEmail; - - private WorkflowDef.TimeoutPolicy timeoutPolicy; - - private long timeoutSeconds; - - private boolean restartable = true; - - private T defaultInput; - - private Map output = new HashMap<>(); - - private Map state; - - protected List> tasks = new ArrayList<>(); - - private WorkflowExecutor workflowExecutor; - - public final InputOutputGetter input = - new InputOutputGetter("workflow", InputOutputGetter.Field.input); - - private final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - public WorkflowBuilder(WorkflowExecutor workflowExecutor) { - this.workflowExecutor = workflowExecutor; - this.tasks = new ArrayList<>(); - } - - public WorkflowBuilder name(String name) { - this.name = name; - return this; - } - - public WorkflowBuilder version(int version) { - this.version = version; - return this; - } - - public WorkflowBuilder description(String description) { - this.description = description; - return this; - } - - public WorkflowBuilder failureWorkflow(String failureWorkflow) { - this.failureWorkflow = failureWorkflow; - return this; - } - - public WorkflowBuilder ownerEmail(String ownerEmail) { - this.ownerEmail = ownerEmail; - return this; - } - - public WorkflowBuilder timeoutPolicy( - WorkflowDef.TimeoutPolicy timeoutPolicy, long timeoutSeconds) { - this.timeoutPolicy = timeoutPolicy; - this.timeoutSeconds = timeoutSeconds; - return this; - } - - public WorkflowBuilder add(Task... tasks) { - Collections.addAll(this.tasks, tasks); - return this; - } - - public WorkflowBuilder defaultInput(T defaultInput) { - this.defaultInput = defaultInput; - return this; - } - - public WorkflowBuilder restartable(boolean restartable) { - this.restartable = restartable; - return this; - } - - public WorkflowBuilder variables(Object variables) { - try { - this.state = objectMapper.convertValue(variables, Map.class); - } catch (Exception e) { - throw new IllegalArgumentException( - "Workflow Variables cannot be converted to Map. Supplied: " - + variables.getClass().getName()); - } - return this; - } - - public WorkflowBuilder output(String key, boolean value) { - output.put(key, value); - return this; - } - - public WorkflowBuilder output(String key, String value) { - output.put(key, value); - return this; - } - - public WorkflowBuilder output(String key, Number value) { - output.put(key, value); - return this; - } - - public WorkflowBuilder output(String key, Object value) { - output.put(key, value); - return this; - } - - public WorkflowBuilder output(MapBuilder mapBuilder) { - output.putAll(mapBuilder.build()); - return this; - } - - public ConductorWorkflow build() throws ValidationError { - - validate(); - - ConductorWorkflow workflow = new ConductorWorkflow(workflowExecutor); - if (description != null) { - workflow.setDescription(description); - } - - workflow.setName(name); - workflow.setVersion(version); - workflow.setDescription(description); - workflow.setFailureWorkflow(failureWorkflow); - workflow.setOwnerEmail(ownerEmail); - workflow.setTimeoutPolicy(timeoutPolicy); - workflow.setTimeoutSeconds(timeoutSeconds); - workflow.setRestartable(restartable); - workflow.setDefaultInput(defaultInput); - workflow.setWorkflowOutput(output); - workflow.setVariables(state); - - for (Task task : tasks) { - workflow.add(task); - } - - return workflow; - } - - /** - * Validate: 1. There are no tasks with duplicate reference names 2. Each of the task is - * consistent with its definition 3. - */ - private void validate() throws ValidationError { - - List allTasks = new ArrayList<>(); - for (Task task : tasks) { - List workflowDefTasks = task.getWorkflowDefTasks(); - for (WorkflowTask workflowDefTask : workflowDefTasks) { - allTasks.addAll(workflowDefTask.collectTasks()); - } - } - - Map taskMap = new HashMap<>(); - Set duplicateTasks = new HashSet<>(); - for (WorkflowTask task : allTasks) { - if (taskMap.containsKey(task.getTaskReferenceName())) { - duplicateTasks.add(task.getTaskReferenceName()); - } else { - taskMap.put(task.getTaskReferenceName(), task); - } - } - if (!duplicateTasks.isEmpty()) { - throw new ValidationError( - "Task Reference Names MUST be unique across all the tasks in the workkflow. " - + "Please update/change reference names to be unique for the following tasks: " - + duplicateTasks); - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DoWhile.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DoWhile.java deleted file mode 100644 index 20634179e..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DoWhile.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -public class DoWhile extends Task { - - private String loopCondition; - - private List> loopTasks = new ArrayList<>(); - - /** - * Execute tasks in a loop determined by the condition set using condition parameter. The loop - * will continue till the condition is true - * - * @param taskReferenceName - * @param condition Javascript that evaluates to a boolean value - * @param tasks - */ - public DoWhile(String taskReferenceName, String condition, Task... tasks) { - super(taskReferenceName, TaskType.DO_WHILE); - Collections.addAll(this.loopTasks, tasks); - this.loopCondition = condition; - } - - /** - * Similar to a for loop, run tasks for N times - * - * @param taskReferenceName - * @param loopCount - * @param tasks - */ - public DoWhile(String taskReferenceName, int loopCount, Task... tasks) { - super(taskReferenceName, TaskType.DO_WHILE); - Collections.addAll(this.loopTasks, tasks); - this.loopCondition = getForLoopCondition(loopCount); - } - - DoWhile(WorkflowTask workflowTask) { - super(workflowTask); - this.loopCondition = workflowTask.getLoopCondition(); - for (WorkflowTask task : workflowTask.getLoopOver()) { - Task loopTask = TaskRegistry.getTask(task); - this.loopTasks.add(loopTask); - } - } - - public DoWhile loopOver(Task... tasks) { - for (Task task : tasks) { - this.loopTasks.add(task); - } - return this; - } - - private String getForLoopCondition(int loopCount) { - return "if ( $." - + getTaskReferenceName() - + "['iteration'] < " - + loopCount - + ") { true; } else { false; }"; - } - - public String getLoopCondition() { - return loopCondition; - } - - public List getLoopTasks() { - return loopTasks; - } - - @Override - public void updateWorkflowTask(WorkflowTask workflowTask) { - workflowTask.setLoopCondition(loopCondition); - - List loopWorkflowTasks = new ArrayList<>(); - for (Task task : this.loopTasks) { - loopWorkflowTasks.addAll(task.getWorkflowDefTasks()); - } - workflowTask.setLoopOver(loopWorkflowTasks); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Dynamic.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Dynamic.java deleted file mode 100644 index ae1bb0777..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Dynamic.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.google.common.base.Strings; - -/** Wait task */ -public class Dynamic extends Task { - - public static final String TASK_NAME_INPUT_PARAM = "taskToExecute"; - - public Dynamic(String taskReferenceName, String dynamicTaskNameValue) { - super(taskReferenceName, TaskType.DYNAMIC); - if (Strings.isNullOrEmpty(dynamicTaskNameValue)) { - throw new AssertionError("Null/Empty dynamicTaskNameValue"); - } - super.input(TASK_NAME_INPUT_PARAM, dynamicTaskNameValue); - } - - Dynamic(WorkflowTask workflowTask) { - super(workflowTask); - } - - @Override - public void updateWorkflowTask(WorkflowTask task) { - task.setDynamicTaskNameParam(TASK_NAME_INPUT_PARAM); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicFork.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicFork.java deleted file mode 100644 index f5d2c11ab..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicFork.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.ArrayList; -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -public class DynamicFork extends Task { - - public static final String FORK_TASK_PARAM = "forkedTasks"; - - public static final String FORK_TASK_INPUT_PARAM = "forkedTasksInputs"; - - private String forkTasksParameter; - - private String forkTasksInputsParameter; - - private Join join; - - private SimpleTask forkPrepareTask; - - /** - * Dynamic fork task that executes a set of tasks in parallel which are determined at run time. - * Use cases: Based on the input, you want to fork N number of processes in parallel to be - * executed. The number N is not pre-determined at the definition time and so a regular ForkJoin - * cannot be used. - * - * @param taskReferenceName - */ - public DynamicFork( - String taskReferenceName, String forkTasksParameter, String forkTasksInputsParameter) { - super(taskReferenceName, TaskType.FORK_JOIN_DYNAMIC); - this.join = new Join(taskReferenceName + "_join"); - this.forkTasksParameter = forkTasksParameter; - this.forkTasksInputsParameter = forkTasksInputsParameter; - super.input(FORK_TASK_PARAM, forkTasksParameter); - super.input(FORK_TASK_INPUT_PARAM, forkTasksInputsParameter); - } - - /** - * Dynamic fork task that executes a set of tasks in parallel which are determined at run time. - * Use cases: Based on the input, you want to fork N number of processes in parallel to be - * executed. The number N is not pre-determined at the definition time and so a regular ForkJoin - * cannot be used. - * - * @param taskReferenceName - * @param forkPrepareTask A Task that produces the output as {@link DynamicForkInput} to specify - * which tasks to fork. - */ - public DynamicFork(String taskReferenceName, SimpleTask forkPrepareTask) { - super(taskReferenceName, TaskType.FORK_JOIN_DYNAMIC); - this.forkPrepareTask = forkPrepareTask; - this.join = new Join(taskReferenceName + "_join"); - this.forkTasksParameter = forkPrepareTask.taskOutput.get(FORK_TASK_PARAM); - this.forkTasksInputsParameter = forkPrepareTask.taskOutput.get(FORK_TASK_INPUT_PARAM); - super.input(FORK_TASK_PARAM, forkTasksParameter); - super.input(FORK_TASK_INPUT_PARAM, forkTasksInputsParameter); - } - - DynamicFork(WorkflowTask workflowTask) { - super(workflowTask); - String nameOfParamForForkTask = workflowTask.getDynamicForkTasksParam(); - String nameOfParamForForkTaskInput = workflowTask.getDynamicForkTasksInputParamName(); - this.forkTasksParameter = - (String) workflowTask.getInputParameters().get(nameOfParamForForkTask); - this.forkTasksInputsParameter = - (String) workflowTask.getInputParameters().get(nameOfParamForForkTaskInput); - } - - public Join getJoin() { - return join; - } - - public String getForkTasksParameter() { - return forkTasksParameter; - } - - public String getForkTasksInputsParameter() { - return forkTasksInputsParameter; - } - - @Override - public void updateWorkflowTask(WorkflowTask task) { - task.setDynamicForkTasksParam("forkedTasks"); - task.setDynamicForkTasksInputParamName("forkedTasksInputs"); - } - - @Override - protected List getChildrenTasks() { - List tasks = new ArrayList<>(); - tasks.addAll(join.getWorkflowDefTasks()); - return tasks; - } - - @Override - protected List getParentTasks() { - if (forkPrepareTask != null) { - return List.of(forkPrepareTask.toWorkflowTask()); - } - return List.of(); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicForkInput.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicForkInput.java deleted file mode 100644 index 7133e0ac3..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicForkInput.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.List; -import java.util.Map; - -public class DynamicForkInput { - - /** List of tasks to execute in parallel */ - private List> tasks; - - /** - * Input to the tasks. Key is the reference name of the task and value is an Object that is sent - * as input to the task - */ - private Map inputs; - - public DynamicForkInput(List> tasks, Map inputs) { - this.tasks = tasks; - this.inputs = inputs; - } - - public DynamicForkInput() {} - - public List> getTasks() { - return tasks; - } - - public void setTasks(List> tasks) { - this.tasks = tasks; - } - - public Map getInputs() { - return inputs; - } - - public void setInputs(Map inputs) { - this.inputs = inputs; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Event.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Event.java deleted file mode 100644 index 26a55fab7..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Event.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.google.common.base.Strings; - -/** Task to publish Events to external queuing systems like SQS, NATS, AMQP etc. */ -public class Event extends Task { - - private static final String SINK_PARAMETER = "sink"; - - /** - * @param taskReferenceName Unique reference name within the workflow - * @param eventSink qualified name of the event sink where the message is published. Using the - * format sink_type:location e.g. sqs:sqs_queue_name, amqp_queue:queue_name, - * amqp_exchange:queue_name, nats:queue_name - */ - public Event(String taskReferenceName, String eventSink) { - super(taskReferenceName, TaskType.EVENT); - if (Strings.isNullOrEmpty(eventSink)) { - throw new AssertionError("Null/Empty eventSink"); - } - super.input(SINK_PARAMETER, eventSink); - } - - Event(WorkflowTask workflowTask) { - super(workflowTask); - } - - public String getSink() { - return (String) getInput().get(SINK_PARAMETER); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/ForkJoin.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/ForkJoin.java deleted file mode 100644 index 69af0ddcb..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/ForkJoin.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -/** ForkJoin task */ -public class ForkJoin extends Task { - - private Join join; - - private Task[][] forkedTasks; - - /** - * execute task specified in the forkedTasks parameter in parallel. - * - *

    forkedTask is a two-dimensional list that executes the outermost list in parallel and list - * within that is executed sequentially. - * - *

    e.g. [[task1, task2],[task3, task4],[task5]] are executed as: - * - *

    -     *                    ---------------
    -     *                    |     fork    |
    -     *                    ---------------
    -     *                    |       |     |
    -     *                    |       |     |
    -     *                  task1  task3  task5
    -     *                  task2  task4    |
    -     *                    |      |      |
    -     *                 ---------------------
    -     *                 |       join        |
    -     *                 ---------------------
    -     * 
    - * - *

    This method automatically adds a join that waits for all the *last* tasks in the fork - * (e.g. task2, task4 and task5 in the above example) to be completed.* - * - *

    Use join method @see {@link ForkJoin#joinOn(String...)} to override this behavior (note: - * not a common scenario) - * - * @param taskReferenceName unique task reference name - * @param forkedTasks List of tasks to be executed in parallel - */ - public ForkJoin(String taskReferenceName, Task[]... forkedTasks) { - super(taskReferenceName, TaskType.FORK_JOIN); - this.forkedTasks = forkedTasks; - } - - ForkJoin(WorkflowTask workflowTask) { - super(workflowTask); - int size = workflowTask.getForkTasks().size(); - this.forkedTasks = new Task[size][]; - int i = 0; - for (List forkTasks : workflowTask.getForkTasks()) { - Task[] tasks = new Task[forkTasks.size()]; - for (int j = 0; j < forkTasks.size(); j++) { - WorkflowTask forkWorkflowTask = forkTasks.get(j); - Task task = TaskRegistry.getTask(forkWorkflowTask); - tasks[j] = task; - } - this.forkedTasks[i++] = tasks; - } - } - - public ForkJoin joinOn(String... joinOn) { - this.join = new Join(getTaskReferenceName() + "_join", joinOn); - return this; - } - - @Override - protected List getChildrenTasks() { - WorkflowTask fork = toWorkflowTask(); - - WorkflowTask joinWorkflowTask = null; - if (this.join != null) { - List joinTasks = this.join.getWorkflowDefTasks(); - joinWorkflowTask = joinTasks.get(0); - } else { - joinWorkflowTask = new WorkflowTask(); - joinWorkflowTask.setWorkflowTaskType(TaskType.JOIN); - joinWorkflowTask.setTaskReferenceName(getTaskReferenceName() + "_join"); - joinWorkflowTask.setName(joinWorkflowTask.getTaskReferenceName()); - joinWorkflowTask.setJoinOn(fork.getJoinOn()); - } - return Arrays.asList(joinWorkflowTask); - } - - @Override - public void updateWorkflowTask(WorkflowTask fork) { - List joinOnTaskRefNames = new ArrayList<>(); - List> forkTasks = new ArrayList<>(); - - for (Task[] forkedTaskList : forkedTasks) { - List forkedWorkflowTasks = new ArrayList<>(); - for (Task baseWorkflowTask : forkedTaskList) { - forkedWorkflowTasks.addAll(baseWorkflowTask.getWorkflowDefTasks()); - } - forkTasks.add(forkedWorkflowTasks); - joinOnTaskRefNames.add( - forkedWorkflowTasks.get(forkedWorkflowTasks.size() - 1).getTaskReferenceName()); - } - if (this.join != null) { - fork.setJoinOn(List.of(this.join.getJoinOn())); - } else { - fork.setJoinOn(joinOnTaskRefNames); - } - - fork.setForkTasks(forkTasks); - } - - public Task[][] getForkedTasks() { - return forkedTasks; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Http.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Http.java deleted file mode 100644 index b1685d395..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Http.java +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** Wait task */ -public class Http extends Task { - - private static final Logger LOGGER = LoggerFactory.getLogger(Http.class); - - private static final String INPUT_PARAM = "http_request"; - - private ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private Input httpRequest; - - public Http(String taskReferenceName) { - super(taskReferenceName, TaskType.HTTP); - this.httpRequest = new Input(); - this.httpRequest.method = Input.HttpMethod.GET; - super.input(INPUT_PARAM, httpRequest); - } - - Http(WorkflowTask workflowTask) { - super(workflowTask); - - Object inputRequest = workflowTask.getInputParameters().get(INPUT_PARAM); - if (inputRequest != null) { - try { - this.httpRequest = objectMapper.convertValue(inputRequest, Input.class); - } catch (Exception e) { - LOGGER.error("Error while trying to convert input request " + e.getMessage(), e); - } - } - } - - public Http input(Input httpRequest) { - this.httpRequest = httpRequest; - return this; - } - - public Http url(String url) { - this.httpRequest.setUri(url); - return this; - } - - public Http method(Input.HttpMethod method) { - this.httpRequest.setMethod(method); - return this; - } - - public Http headers(Map headers) { - this.httpRequest.setHeaders(headers); - return this; - } - - public Http body(Object body) { - this.httpRequest.setBody(body); - return this; - } - - public Http readTimeout(int readTimeout) { - this.httpRequest.setReadTimeOut(readTimeout); - return this; - } - - public Input getHttpRequest() { - return httpRequest; - } - - @Override - protected void updateWorkflowTask(WorkflowTask workflowTask) { - workflowTask.getInputParameters().put(INPUT_PARAM, httpRequest); - } - - public static class Input { - public enum HttpMethod { - PUT, - POST, - GET, - DELETE, - OPTIONS, - HEAD - } - - private HttpMethod method; // PUT, POST, GET, DELETE, OPTIONS, HEAD - private String vipAddress; - private String appName; - private Map headers = new HashMap<>(); - private String uri; - private Object body; - private String accept = "application/json"; - private String contentType = "application/json"; - private Integer connectionTimeOut; - private Integer readTimeOut; - - /** - * @return the method - */ - public HttpMethod getMethod() { - return method; - } - - /** - * @param method the method to set - */ - public void setMethod(HttpMethod method) { - this.method = method; - } - - /** - * @return the headers - */ - public Map getHeaders() { - return headers; - } - - /** - * @param headers the headers to set - */ - public void setHeaders(Map headers) { - this.headers = headers; - } - - /** - * @return the body - */ - public Object getBody() { - return body; - } - - /** - * @param body the body to set - */ - public void setBody(Object body) { - this.body = body; - } - - /** - * @return the uri - */ - public String getUri() { - return uri; - } - - /** - * @param uri the uri to set - */ - public void setUri(String uri) { - this.uri = uri; - } - - /** - * @return the vipAddress - */ - public String getVipAddress() { - return vipAddress; - } - - /** - * @param vipAddress the vipAddress to set - */ - public void setVipAddress(String vipAddress) { - this.vipAddress = vipAddress; - } - - /** - * @return the accept - */ - public String getAccept() { - return accept; - } - - /** - * @param accept the accept to set - */ - public void setAccept(String accept) { - this.accept = accept; - } - - /** - * @return the MIME content type to use for the request - */ - public String getContentType() { - return contentType; - } - - /** - * @param contentType the MIME content type to set - */ - public void setContentType(String contentType) { - this.contentType = contentType; - } - - public String getAppName() { - return appName; - } - - public void setAppName(String appName) { - this.appName = appName; - } - - /** - * @return the connectionTimeOut - */ - public Integer getConnectionTimeOut() { - return connectionTimeOut; - } - - /** - * @return the readTimeOut - */ - public Integer getReadTimeOut() { - return readTimeOut; - } - - public void setConnectionTimeOut(Integer connectionTimeOut) { - this.connectionTimeOut = connectionTimeOut; - } - - public void setReadTimeOut(Integer readTimeOut) { - this.readTimeOut = readTimeOut; - } - - @Override - public String toString() { - return "Input{" - + "method=" - + method - + ", vipAddress='" - + vipAddress - + '\'' - + ", appName='" - + appName - + '\'' - + ", headers=" - + headers - + ", uri='" - + uri - + '\'' - + ", body=" - + body - + ", accept='" - + accept - + '\'' - + ", contentType='" - + contentType - + '\'' - + ", connectionTimeOut=" - + connectionTimeOut - + ", readTimeOut=" - + readTimeOut - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Input input = (Input) o; - return method == input.method - && Objects.equals(vipAddress, input.vipAddress) - && Objects.equals(appName, input.appName) - && Objects.equals(headers, input.headers) - && Objects.equals(uri, input.uri) - && Objects.equals(body, input.body) - && Objects.equals(accept, input.accept) - && Objects.equals(contentType, input.contentType) - && Objects.equals(connectionTimeOut, input.connectionTimeOut) - && Objects.equals(readTimeOut, input.readTimeOut); - } - - @Override - public int hashCode() { - return Objects.hash( - method, - vipAddress, - appName, - headers, - uri, - body, - accept, - contentType, - connectionTimeOut, - readTimeOut); - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/JQ.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/JQ.java deleted file mode 100644 index d1eff0580..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/JQ.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import com.google.common.base.Strings; - -/** - * JQ Transformation task See https://stedolan.github.io/jq/ for how to form the queries to parse - * JSON payloads - */ -public class JQ extends Task { - - private static final String QUERY_EXPRESSION_PARAMETER = "queryExpression"; - - public JQ(String taskReferenceName, String queryExpression) { - super(taskReferenceName, TaskType.JSON_JQ_TRANSFORM); - if (Strings.isNullOrEmpty(queryExpression)) { - throw new AssertionError("Null/Empty queryExpression"); - } - super.input(QUERY_EXPRESSION_PARAMETER, queryExpression); - } - - JQ(WorkflowTask workflowTask) { - super(workflowTask); - } - - public String getQueryExpression() { - return (String) getInput().get(QUERY_EXPRESSION_PARAMETER); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Javascript.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Javascript.java deleted file mode 100644 index 6468faa8a..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Javascript.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; - -import javax.script.Bindings; -import javax.script.ScriptEngine; -import javax.script.ScriptEngineManager; -import javax.script.ScriptException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.ValidationError; - -import com.google.common.base.Strings; - -/** - * JQ Transformation task See https://stedolan.github.io/jq/ for how to form the queries to parse - * JSON payloads - */ -public class Javascript extends Task { - - private static final Logger LOGGER = LoggerFactory.getLogger(Javascript.class); - - private static final String EXPRESSION_PARAMETER = "expression"; - - private static final String EVALUATOR_TYPE_PARAMETER = "evaluatorType"; - - private static final String ENGINE = "nashorn"; - - /** - * Javascript tasks are executed on the Conductor server without having to write worker code - * - *

    Use {@link Javascript#validate()} method to validate the javascript to ensure the script - * is valid. - * - * @param taskReferenceName - * @param script script to execute - */ - public Javascript(String taskReferenceName, String script) { - super(taskReferenceName, TaskType.INLINE); - if (Strings.isNullOrEmpty(script)) { - throw new AssertionError("Null/Empty script"); - } - super.input(EVALUATOR_TYPE_PARAMETER, "javascript"); - super.input(EXPRESSION_PARAMETER, script); - } - - /** - * Javascript tasks are executed on the Conductor server without having to write worker code - * - *

    Use {@link Javascript#validate()} method to validate the javascript to ensure the script - * is valid. - * - * @param taskReferenceName - * @param stream stream to load the script file from - */ - public Javascript(String taskReferenceName, InputStream stream) { - super(taskReferenceName, TaskType.INLINE); - if (stream == null) { - throw new AssertionError("Stream is empty"); - } - super.input(EVALUATOR_TYPE_PARAMETER, "javascript"); - try { - String script = new String(stream.readAllBytes()); - super.input(EXPRESSION_PARAMETER, script); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - Javascript(WorkflowTask workflowTask) { - super(workflowTask); - } - - public String getExpression() { - return (String) getInput().get(EXPRESSION_PARAMETER); - } - - /** - * Validates the script. - * - * @return - */ - public Javascript validate() { - ScriptEngine scriptEngine = new ScriptEngineManager().getEngineByName(ENGINE); - if (scriptEngine == null) { - LOGGER.error("missing " + ENGINE + " engine. Ensure you are running supported JVM"); - return this; - } - - try { - - Bindings bindings = scriptEngine.createBindings(); - bindings.put("$", new HashMap<>()); - scriptEngine.eval(getExpression(), bindings); - - } catch (ScriptException e) { - String message = e.getMessage(); - throw new ValidationError(message); - } - return this; - } - - /** - * Helper method to unit test your javascript. The method is not used for creating or executing - * workflow but is meant for testing only. - * - * @param input Input that against which the script will be executed - * @return Output of the script - */ - public Object test(Map input) { - - ScriptEngine scriptEngine = new ScriptEngineManager().getEngineByName(ENGINE); - if (scriptEngine == null) { - LOGGER.error("missing " + ENGINE + " engine. Ensure you are running supported JVM"); - return this; - } - - try { - - Bindings bindings = scriptEngine.createBindings(); - bindings.put("$", input); - return scriptEngine.eval(getExpression(), bindings); - - } catch (ScriptException e) { - String message = e.getMessage(); - throw new ValidationError(message); - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Join.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Join.java deleted file mode 100644 index f60777aa2..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Join.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.Arrays; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -public class Join extends Task { - - private String[] joinOn; - - /** - * @param taskReferenceName - * @param joinOn List of task reference names to join on - */ - public Join(String taskReferenceName, String... joinOn) { - super(taskReferenceName, TaskType.JOIN); - this.joinOn = joinOn; - } - - Join(WorkflowTask workflowTask) { - super(workflowTask); - this.joinOn = workflowTask.getJoinOn().toArray(new String[0]); - } - - @Override - protected void updateWorkflowTask(WorkflowTask workflowTask) { - workflowTask.setJoinOn(Arrays.asList(joinOn)); - } - - public String[] getJoinOn() { - return joinOn; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SetVariable.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SetVariable.java deleted file mode 100644 index d4863dec2..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SetVariable.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.WorkflowBuilder; - -public class SetVariable extends Task { - /** - * Sets the value of the variable in workflow. Used for workflow state management. Workflow - * state is a Map that is initialized using @see {@link WorkflowBuilder#variables(Object)} - * - * @param taskReferenceName Use input methods to set the variable values - */ - public SetVariable(String taskReferenceName) { - super(taskReferenceName, TaskType.SET_VARIABLE); - } - - SetVariable(WorkflowTask workflowTask) { - super(workflowTask); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SimpleTask.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SimpleTask.java deleted file mode 100644 index 369aa60f5..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SimpleTask.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -/** Workflow task executed by a worker */ -public class SimpleTask extends Task { - - private TaskDef taskDef; - - public SimpleTask(String taskDefName, String taskReferenceName) { - super(taskReferenceName, TaskType.SIMPLE); - super.name(taskDefName); - } - - SimpleTask(WorkflowTask workflowTask) { - super(workflowTask); - this.taskDef = workflowTask.getTaskDefinition(); - } - - public TaskDef getTaskDef() { - return taskDef; - } - - public SimpleTask setTaskDef(TaskDef taskDef) { - this.taskDef = taskDef; - return this; - } - - @Override - protected void updateWorkflowTask(WorkflowTask workflowTask) { - workflowTask.setTaskDefinition(taskDef); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SubWorkflow.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SubWorkflow.java deleted file mode 100644 index 2e54be19f..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SubWorkflow.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; - -public class SubWorkflow extends Task { - - private ConductorWorkflow conductorWorkflow; - - private String workflowName; - - private Integer workflowVersion; - - /** - * Start a workflow as a sub-workflow - * - * @param taskReferenceName - * @param workflowName - * @param workflowVersion - */ - public SubWorkflow(String taskReferenceName, String workflowName, Integer workflowVersion) { - super(taskReferenceName, TaskType.SUB_WORKFLOW); - this.workflowName = workflowName; - this.workflowVersion = workflowVersion; - } - - /** - * Start a workflow as a sub-workflow - * - * @param taskReferenceName - * @param conductorWorkflow - */ - public SubWorkflow(String taskReferenceName, ConductorWorkflow conductorWorkflow) { - super(taskReferenceName, TaskType.SUB_WORKFLOW); - this.conductorWorkflow = conductorWorkflow; - } - - SubWorkflow(WorkflowTask workflowTask) { - super(workflowTask); - SubWorkflowParams subworkflowParam = workflowTask.getSubWorkflowParam(); - this.workflowName = subworkflowParam.getName(); - this.workflowVersion = subworkflowParam.getVersion(); - if (subworkflowParam.getWorkflowDef() != null) { - this.conductorWorkflow = - ConductorWorkflow.fromWorkflowDef(subworkflowParam.getWorkflowDef()); - } - } - - public ConductorWorkflow getConductorWorkflow() { - return conductorWorkflow; - } - - public String getWorkflowName() { - return workflowName; - } - - public int getWorkflowVersion() { - return workflowVersion; - } - - @Override - protected void updateWorkflowTask(WorkflowTask workflowTask) { - SubWorkflowParams subWorkflowParam = new SubWorkflowParams(); - - if (conductorWorkflow != null) { - subWorkflowParam.setWorkflowDef(conductorWorkflow.toWorkflowDef()); - } else { - subWorkflowParam.setName(workflowName); - subWorkflowParam.setVersion(workflowVersion); - } - workflowTask.setSubWorkflowParam(subWorkflowParam); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Switch.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Switch.java deleted file mode 100644 index f0f34b2db..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Switch.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.*; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -/** Switch Task */ -public class Switch extends Task { - - public static final String VALUE_PARAM_NAME = "value-param"; - - public static final String JAVASCRIPT_NAME = "javascript"; - - private String caseExpression; - - private boolean useJavascript; - - private List> defaultTasks = new ArrayList<>(); - - private Map>> branches = new HashMap<>(); - - /** - * Switch case (similar to if...then...else or switch in java language) - * - * @param taskReferenceName - * @param caseExpression An expression that outputs a string value to be used as case branches. - * Case expression can be a support value parameter e.g. ${workflow.input.key} or - * ${task.output.key} or a Javascript statement. - * @param useJavascript set to true if the caseExpression is a javascript statement - */ - public Switch(String taskReferenceName, String caseExpression, boolean useJavascript) { - super(taskReferenceName, TaskType.SWITCH); - this.caseExpression = caseExpression; - this.useJavascript = useJavascript; - } - - /** - * Switch case (similar to if...then...else or switch in java language) - * - * @param taskReferenceName - * @param caseExpression - */ - public Switch(String taskReferenceName, String caseExpression) { - super(taskReferenceName, TaskType.SWITCH); - this.caseExpression = caseExpression; - this.useJavascript = false; - } - - Switch(WorkflowTask workflowTask) { - super(workflowTask); - Map> decisions = workflowTask.getDecisionCases(); - - decisions.entrySet().stream() - .forEach( - branch -> { - String branchName = branch.getKey(); - List branchWorkflowTasks = branch.getValue(); - List> branchTasks = new ArrayList<>(); - for (WorkflowTask branchWorkflowTask : branchWorkflowTasks) { - branchTasks.add(TaskRegistry.getTask(branchWorkflowTask)); - } - this.branches.put(branchName, branchTasks); - }); - - List defaultCases = workflowTask.getDefaultCase(); - for (WorkflowTask defaultCase : defaultCases) { - this.defaultTasks.add(TaskRegistry.getTask(defaultCase)); - } - } - - public Switch defaultCase(Task... tasks) { - defaultTasks = Arrays.asList(tasks); - return this; - } - - public Switch defaultCase(List> defaultTasks) { - this.defaultTasks = defaultTasks; - return this; - } - - public Switch decisionCases(Map>> branches) { - this.branches = branches; - return this; - } - - public Switch defaultCase(String... workerTasks) { - for (String workerTask : workerTasks) { - this.defaultTasks.add(new SimpleTask(workerTask, workerTask)); - } - return this; - } - - public Switch switchCase(String caseValue, Task... tasks) { - branches.put(caseValue, Arrays.asList(tasks)); - return this; - } - - public Switch switchCase(String caseValue, String... workerTasks) { - List> tasks = new ArrayList<>(workerTasks.length); - int i = 0; - for (String workerTask : workerTasks) { - tasks.add(new SimpleTask(workerTask, workerTask)); - } - branches.put(caseValue, tasks); - return this; - } - - public List> getDefaultTasks() { - return defaultTasks; - } - - public Map>> getBranches() { - return branches; - } - - @Override - public void updateWorkflowTask(WorkflowTask workflowTask) { - - if (useJavascript) { - workflowTask.setEvaluatorType(JAVASCRIPT_NAME); - workflowTask.setExpression(caseExpression); - - } else { - workflowTask.setEvaluatorType(VALUE_PARAM_NAME); - workflowTask.getInputParameters().put("switchCaseValue", caseExpression); - workflowTask.setExpression("switchCaseValue"); - } - - Map> decisionCases = new HashMap<>(); - branches.entrySet() - .forEach( - entry -> { - String decisionCase = entry.getKey(); - List> decisionTasks = entry.getValue(); - List decionTaskDefs = - new ArrayList<>(decisionTasks.size()); - for (Task decisionTask : decisionTasks) { - decionTaskDefs.addAll(decisionTask.getWorkflowDefTasks()); - } - decisionCases.put(decisionCase, decionTaskDefs); - }); - - workflowTask.setDecisionCases(decisionCases); - List defaultCaseTaskDefs = new ArrayList<>(defaultTasks.size()); - for (Task defaultTask : defaultTasks) { - defaultCaseTaskDefs.addAll(defaultTask.getWorkflowDefTasks()); - } - workflowTask.setDefaultCase(defaultCaseTaskDefs); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Task.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Task.java deleted file mode 100644 index eff1f8bdd..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Task.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.*; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.utils.InputOutputGetter; -import com.netflix.conductor.sdk.workflow.utils.MapBuilder; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Strings; - -/** Workflow Task */ -public abstract class Task { - - private String name; - - private String description; - - private String taskReferenceName; - - private boolean optional; - - private int startDelay; - - private TaskType type; - - private Map input = new HashMap<>(); - - protected final ObjectMapper om = new ObjectMapperProvider().getObjectMapper(); - - public final InputOutputGetter taskInput; - - public final InputOutputGetter taskOutput; - - public Task(String taskReferenceName, TaskType type) { - if (Strings.isNullOrEmpty(taskReferenceName)) { - throw new AssertionError("taskReferenceName cannot be null"); - } - if (type == null) { - throw new AssertionError("type cannot be null"); - } - - this.name = taskReferenceName; - this.taskReferenceName = taskReferenceName; - this.type = type; - this.taskInput = new InputOutputGetter(taskReferenceName, InputOutputGetter.Field.input); - this.taskOutput = new InputOutputGetter(taskReferenceName, InputOutputGetter.Field.output); - } - - Task(WorkflowTask workflowTask) { - this(workflowTask.getTaskReferenceName(), TaskType.valueOf(workflowTask.getType())); - this.input = workflowTask.getInputParameters(); - this.description = workflowTask.getDescription(); - this.name = workflowTask.getName(); - } - - public T name(String name) { - this.name = name; - return (T) this; - } - - public T description(String description) { - this.description = description; - return (T) this; - } - - public T input(String key, boolean value) { - input.put(key, value); - return (T) this; - } - - public T input(String key, Object value) { - input.put(key, value); - return (T) this; - } - - public T input(String key, char value) { - input.put(key, value); - return (T) this; - } - - public T input(String key, InputOutputGetter value) { - input.put(key, value.getParent()); - return (T) this; - } - - public T input(InputOutputGetter value) { - return input("input", value); - } - - public T input(String key, String value) { - input.put(key, value); - return (T) this; - } - - public T input(String key, Number value) { - input.put(key, value); - return (T) this; - } - - public T input(String key, Map value) { - input.put(key, value); - return (T) this; - } - - public T input(Map map) { - input.putAll(map); - return (T) this; - } - - public T input(MapBuilder builder) { - input.putAll(builder.build()); - return (T) this; - } - - public T input(Object... keyValues) { - if (keyValues.length == 1) { - Object kv = keyValues[0]; - Map objectMap = om.convertValue(kv, Map.class); - input.putAll(objectMap); - return (T) this; - } - if (keyValues.length % 2 == 1) { - throw new IllegalArgumentException("Not all keys have value specified"); - } - for (int i = 0; i < keyValues.length; ) { - String key = keyValues[i].toString(); - Object value = keyValues[i + 1]; - input.put(key, value); - i += 2; - } - return (T) this; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getTaskReferenceName() { - return taskReferenceName; - } - - public void setTaskReferenceName(String taskReferenceName) { - this.taskReferenceName = taskReferenceName; - } - - public boolean isOptional() { - return optional; - } - - public void setOptional(boolean optional) { - this.optional = optional; - } - - public int getStartDelay() { - return startDelay; - } - - public void setStartDelay(int startDelay) { - this.startDelay = startDelay; - } - - public TaskType getType() { - return type; - } - - public String getDescription() { - return description; - } - - public Map getInput() { - return input; - } - - public final List getWorkflowDefTasks() { - List workflowTasks = new ArrayList<>(); - workflowTasks.addAll(getParentTasks()); - workflowTasks.add(toWorkflowTask()); - workflowTasks.addAll(getChildrenTasks()); - return workflowTasks; - } - - protected final WorkflowTask toWorkflowTask() { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName(name); - workflowTask.setTaskReferenceName(taskReferenceName); - workflowTask.setWorkflowTaskType(type); - workflowTask.setDescription(description); - workflowTask.setInputParameters(input); - workflowTask.setStartDelay(startDelay); - workflowTask.setOptional(optional); - - // Let the sub-classes enrich the workflow task before returning back - updateWorkflowTask(workflowTask); - - return workflowTask; - } - - /** - * Override this method when the sub-class should update the default WorkflowTask generated - * using {@link #toWorkflowTask()} - * - * @param workflowTask - */ - protected void updateWorkflowTask(WorkflowTask workflowTask) {} - - /** - * Override this method when sub-classes will generate multiple workflow tasks. Used by tasks - * which have children tasks such as do_while, fork, etc. - * - * @return - */ - protected List getChildrenTasks() { - return List.of(); - } - - protected List getParentTasks() { - return List.of(); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/TaskRegistry.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/TaskRegistry.java deleted file mode 100644 index f0f964330..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/TaskRegistry.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -public class TaskRegistry { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskRegistry.class); - - private static Map> taskTypeMap = new HashMap<>(); - - public static void register(String taskType, Class taskImplementation) { - taskTypeMap.put(taskType, taskImplementation); - } - - public static Task getTask(WorkflowTask workflowTask) { - Class clazz = taskTypeMap.get(workflowTask.getType()); - if (clazz == null) { - throw new UnsupportedOperationException( - "No support to convert " + workflowTask.getType()); - } - Task task = null; - try { - task = clazz.getDeclaredConstructor(WorkflowTask.class).newInstance(workflowTask); - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - return task; - } - return task; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Terminate.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Terminate.java deleted file mode 100644 index 01ca245fa..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Terminate.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.util.HashMap; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -public class Terminate extends Task { - - private static final String TERMINATION_STATUS_PARAMETER = "terminationStatus"; - - private static final String TERMINATION_WORKFLOW_OUTPUT = "workflowOutput"; - - private static final String TERMINATION_REASON_PARAMETER = "terminationReason"; - - /** - * Terminate the workflow and mark it as FAILED - * - * @param taskReferenceName - * @param reason - */ - public Terminate(String taskReferenceName, String reason) { - this(taskReferenceName, Workflow.WorkflowStatus.FAILED, reason, new HashMap<>()); - } - - /** - * Terminate the workflow with a specific terminate status - * - * @param taskReferenceName - * @param terminationStatus - * @param reason - */ - public Terminate( - String taskReferenceName, Workflow.WorkflowStatus terminationStatus, String reason) { - this(taskReferenceName, terminationStatus, reason, new HashMap<>()); - } - - public Terminate( - String taskReferenceName, - Workflow.WorkflowStatus terminationStatus, - String reason, - Object workflowOutput) { - super(taskReferenceName, TaskType.TERMINATE); - - input(TERMINATION_STATUS_PARAMETER, terminationStatus.name()); - input(TERMINATION_WORKFLOW_OUTPUT, workflowOutput); - input(TERMINATION_REASON_PARAMETER, reason); - } - - Terminate(WorkflowTask workflowTask) { - super(workflowTask); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Wait.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Wait.java deleted file mode 100644 index 1d15592b4..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Wait.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def.tasks; - -import java.time.Duration; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; - -import javax.swing.*; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -/** Wait task */ -public class Wait extends Task { - - public static final String DURATION_INPUT = "duration"; - public static final String UNTIL_INPUT = "until"; - - public static final DateTimeFormatter dateTimeFormatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm z"); - - /** - * Wait until and external signal completes the task. The external signal can be either an API - * call (POST /api/task) to update the task status or an event coming from a supported external - * queue integration like SQS, Kafka, NATS, AMQP etc. - * - *


    - * see - * https://netflix.github.io/conductor/reference-docs/wait-task for more details - * - * @param taskReferenceName - */ - public Wait(String taskReferenceName) { - super(taskReferenceName, TaskType.WAIT); - } - - public Wait(String taskReferenceName, Duration waitFor) { - super(taskReferenceName, TaskType.WAIT); - long seconds = waitFor.getSeconds(); - input(DURATION_INPUT, seconds + "s"); - } - - public Wait(String taskReferenceName, ZonedDateTime waitUntil) { - super(taskReferenceName, TaskType.WAIT); - String formattedDateTime = waitUntil.format(dateTimeFormatter); - input(UNTIL_INPUT, formattedDateTime); - } - - Wait(WorkflowTask workflowTask) { - super(workflowTask); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/WorkflowExecutor.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/WorkflowExecutor.java deleted file mode 100644 index d371194a5..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/WorkflowExecutor.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.executor; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.client.http.jersey.JerseyRequestHandler; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.task.AnnotatedWorkerExecutor; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.api.client.filter.ClientFilter; - -public class WorkflowExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutor.class); - - private final TypeReference> listOfTaskDefs = new TypeReference<>() {}; - - private Map> runningWorkflowFutures = - new ConcurrentHashMap<>(); - - private ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private TaskClient taskClient; - - private WorkflowClient workflowClient; - - private MetadataClient metadataClient; - - private final AnnotatedWorkerExecutor annotatedWorkerExecutor; - - private ScheduledExecutorService scheduledWorkflowMonitor = - Executors.newSingleThreadScheduledExecutor(); - - static { - initTaskImplementations(); - } - - public static void initTaskImplementations() { - TaskRegistry.register(TaskType.DO_WHILE.name(), DoWhile.class); - TaskRegistry.register(TaskType.DYNAMIC.name(), Dynamic.class); - TaskRegistry.register(TaskType.FORK_JOIN_DYNAMIC.name(), DynamicFork.class); - TaskRegistry.register(TaskType.FORK_JOIN.name(), ForkJoin.class); - TaskRegistry.register(TaskType.HTTP.name(), Http.class); - TaskRegistry.register(TaskType.INLINE.name(), Javascript.class); - TaskRegistry.register(TaskType.JOIN.name(), Join.class); - TaskRegistry.register(TaskType.JSON_JQ_TRANSFORM.name(), JQ.class); - TaskRegistry.register(TaskType.SET_VARIABLE.name(), SetVariable.class); - TaskRegistry.register(TaskType.SIMPLE.name(), SimpleTask.class); - TaskRegistry.register(TaskType.SUB_WORKFLOW.name(), SubWorkflow.class); - TaskRegistry.register(TaskType.SWITCH.name(), Switch.class); - TaskRegistry.register(TaskType.TERMINATE.name(), Terminate.class); - TaskRegistry.register(TaskType.WAIT.name(), Wait.class); - TaskRegistry.register(TaskType.EVENT.name(), Event.class); - } - - public WorkflowExecutor(String apiServerURL) { - this(apiServerURL, 100); - } - - public WorkflowExecutor( - String apiServerURL, int pollingInterval, ClientFilter... clientFilter) { - - taskClient = - new TaskClient( - new JerseyRequestHandler( - new DefaultClientConfig(), - null, - new ObjectMapperProvider().getObjectMapper(), - clientFilter)); - taskClient.setRootURI(apiServerURL); - - workflowClient = - new WorkflowClient( - new JerseyRequestHandler( - new DefaultClientConfig(), - null, - new ObjectMapperProvider().getObjectMapper(), - clientFilter)); - workflowClient.setRootURI(apiServerURL); - - metadataClient = - new MetadataClient( - new JerseyRequestHandler( - new DefaultClientConfig(), - null, - new ObjectMapperProvider().getObjectMapper(), - clientFilter)); - metadataClient.setRootURI(apiServerURL); - - annotatedWorkerExecutor = new AnnotatedWorkerExecutor(taskClient, pollingInterval); - scheduledWorkflowMonitor.scheduleAtFixedRate( - () -> { - for (Map.Entry> entry : - runningWorkflowFutures.entrySet()) { - String workflowId = entry.getKey(); - CompletableFuture future = entry.getValue(); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - if (workflow.getStatus().isTerminal()) { - future.complete(workflow); - } - } - }, - 100, - 100, - TimeUnit.MILLISECONDS); - } - - public WorkflowExecutor( - TaskClient taskClient, - WorkflowClient workflowClient, - MetadataClient metadataClient, - int pollingInterval) { - - this.taskClient = taskClient; - this.workflowClient = workflowClient; - this.metadataClient = metadataClient; - annotatedWorkerExecutor = new AnnotatedWorkerExecutor(taskClient, pollingInterval); - scheduledWorkflowMonitor.scheduleAtFixedRate( - () -> { - for (Map.Entry> entry : - runningWorkflowFutures.entrySet()) { - String workflowId = entry.getKey(); - CompletableFuture future = entry.getValue(); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - if (workflow.getStatus().isTerminal()) { - future.complete(workflow); - } - } - }, - 100, - 100, - TimeUnit.MILLISECONDS); - } - - public void initWorkers(String packagesToScan) { - annotatedWorkerExecutor.initWorkers(packagesToScan); - } - - public CompletableFuture executeWorkflow(String name, Integer version, Object input) { - CompletableFuture future = new CompletableFuture<>(); - Map inputMap = objectMapper.convertValue(input, Map.class); - - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setInput(inputMap); - request.setName(name); - request.setVersion(version); - - String workflowId = workflowClient.startWorkflow(request); - runningWorkflowFutures.put(workflowId, future); - return future; - } - - public CompletableFuture executeWorkflow( - ConductorWorkflow conductorWorkflow, Object input) { - - CompletableFuture future = new CompletableFuture<>(); - - Map inputMap = objectMapper.convertValue(input, Map.class); - - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setInput(inputMap); - request.setName(conductorWorkflow.getName()); - request.setVersion(conductorWorkflow.getVersion()); - request.setWorkflowDef(conductorWorkflow.toWorkflowDef()); - - String workflowId = workflowClient.startWorkflow(request); - runningWorkflowFutures.put(workflowId, future); - - return future; - } - - public void loadTaskDefs(String resourcePath) throws IOException { - InputStream resource = WorkflowExecutor.class.getResourceAsStream(resourcePath); - if (resource != null) { - List taskDefs = objectMapper.readValue(resource, listOfTaskDefs); - loadMetadata(taskDefs); - } - } - - public void loadWorkflowDefs(String resourcePath) throws IOException { - InputStream resource = WorkflowExecutor.class.getResourceAsStream(resourcePath); - if (resource != null) { - WorkflowDef workflowDef = objectMapper.readValue(resource, WorkflowDef.class); - loadMetadata(workflowDef); - } - } - - public void loadMetadata(WorkflowDef workflowDef) { - metadataClient.registerWorkflowDef(workflowDef); - } - - public void loadMetadata(List taskDefs) { - metadataClient.registerTaskDefs(taskDefs); - } - - public void shutdown() { - scheduledWorkflowMonitor.shutdown(); - annotatedWorkerExecutor.shutdown(); - } - - public boolean registerWorkflow(WorkflowDef workflowDef, boolean overwrite) { - try { - if (overwrite) { - metadataClient.updateWorkflowDefs(Arrays.asList(workflowDef)); - } else { - metadataClient.registerWorkflowDef(workflowDef); - } - return true; - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - return false; - } - } - - public MetadataClient getMetadataClient() { - return metadataClient; - } - - public TaskClient getTaskClient() { - return taskClient; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorker.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorker.java deleted file mode 100644 index ba0802e9e..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorker.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.executor.task; - -import java.lang.annotation.Annotation; -import java.lang.reflect.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.tasks.DynamicFork; -import com.netflix.conductor.sdk.workflow.def.tasks.DynamicForkInput; -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.task.OutputParam; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; - -public class AnnotatedWorker implements Worker { - - private String name; - - private Method workerMethod; - - private Object obj; - - private ObjectMapper om = new ObjectMapperProvider().getObjectMapper(); - - private int pollingInterval = 100; - - public AnnotatedWorker(String name, Method workerMethod, Object obj) { - this.name = name; - this.workerMethod = workerMethod; - this.obj = obj; - om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - } - - @Override - public String getTaskDefName() { - return name; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - try { - Object[] parameters = getInvocationParameters(task); - Object invocationResult = workerMethod.invoke(obj, parameters); - result = setValue(invocationResult, task); - } catch (Exception e) { - throw new RuntimeException(e); - } - return result; - } - - private Object[] getInvocationParameters(Task task) { - - Class[] parameterTypes = workerMethod.getParameterTypes(); - Parameter[] parameters = workerMethod.getParameters(); - - if (parameterTypes.length == 1 && parameterTypes[0].equals(Task.class)) { - return new Object[] {task}; - } else if (parameterTypes.length == 1 && parameterTypes[0].equals(Map.class)) { - return new Object[] {task.getInputData()}; - } - - Annotation[][] parameterAnnotations = workerMethod.getParameterAnnotations(); - Object[] values = new Object[parameterTypes.length]; - for (int i = 0; i < parameterTypes.length; i++) { - Annotation[] paramAnnotation = parameterAnnotations[i]; - if (paramAnnotation != null && paramAnnotation.length > 0) { - for (Annotation ann : paramAnnotation) { - if (ann.annotationType().equals(InputParam.class)) { - InputParam ip = (InputParam) ann; - String name = ip.value(); - Object value = task.getInputData().get(name); - if (List.class.isAssignableFrom(parameterTypes[i])) { - Type type = parameters[i].getParameterizedType(); - if (type instanceof ParameterizedType) { - ParameterizedType parameterizedType = (ParameterizedType) type; - Class typeOfParameter = - (Class) parameterizedType.getActualTypeArguments()[0]; - List list = om.convertValue(value, List.class); - List parameterizedList = new ArrayList<>(); - for (Object item : list) { - parameterizedList.add(om.convertValue(item, typeOfParameter)); - } - values[i] = parameterizedList; - } - } else { - values[i] = om.convertValue(value, parameterTypes[i]); - } - } - } - } else { - Object input = om.convertValue(task.getInputData(), parameterTypes[i]); - values[i] = input; - } - } - return values; - } - - private TaskResult setValue(Object invocationResult, Task task) { - - if (invocationResult == null) { - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - - OutputParam opAnnotation = - workerMethod.getAnnotatedReturnType().getAnnotation(OutputParam.class); - if (opAnnotation != null) { - - String name = opAnnotation.value(); - task.getOutputData().put(name, invocationResult); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - - } else if (invocationResult instanceof TaskResult) { - - return (TaskResult) invocationResult; - - } else if (invocationResult instanceof Map) { - Map resultAsMap = (Map) invocationResult; - task.getOutputData().putAll(resultAsMap); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } else if (invocationResult instanceof String - || invocationResult instanceof Number - || invocationResult instanceof Boolean) { - task.getOutputData().put("result", invocationResult); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } else if (invocationResult instanceof List) { - - List resultAsList = om.convertValue(invocationResult, List.class); - task.getOutputData().put("result", resultAsList); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - - } else if (invocationResult instanceof DynamicForkInput) { - DynamicForkInput forkInput = (DynamicForkInput) invocationResult; - List> tasks = forkInput.getTasks(); - List workflowTasks = new ArrayList<>(); - for (com.netflix.conductor.sdk.workflow.def.tasks.Task sdkTask : tasks) { - workflowTasks.addAll(sdkTask.getWorkflowDefTasks()); - } - task.getOutputData().put(DynamicFork.FORK_TASK_PARAM, workflowTasks); - task.getOutputData().put(DynamicFork.FORK_TASK_INPUT_PARAM, forkInput.getInputs()); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - - } else { - Map resultAsMap = om.convertValue(invocationResult, Map.class); - task.getOutputData().putAll(resultAsMap); - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - } - - public void setPollingInterval(int pollingInterval) { - this.pollingInterval = pollingInterval; - } - - @Override - public int getPollingInterval() { - return pollingInterval; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorkerExecutor.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorkerExecutor.java deleted file mode 100644 index 5480ecc15..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/AnnotatedWorkerExecutor.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.executor.task; - -import java.lang.reflect.Method; -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.client.automator.TaskRunnerConfigurer; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; - -import com.google.common.reflect.ClassPath; - -public class AnnotatedWorkerExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(AnnotatedWorkerExecutor.class); - - private TaskClient taskClient; - - private TaskRunnerConfigurer taskRunner; - - private Map workerExecutors = new HashMap<>(); - - private Map workerClassObjs = new HashMap<>(); - - private static Set scannedPackages = new HashSet<>(); - - private int pollingInteralInMS = 100; - - public AnnotatedWorkerExecutor(TaskClient taskClient) { - this.taskClient = taskClient; - } - - public AnnotatedWorkerExecutor(TaskClient taskClient, int pollingInteralInMS) { - this.taskClient = taskClient; - this.pollingInteralInMS = pollingInteralInMS; - } - - /** - * Finds any worker implementation and starts polling for tasks - * - * @param basePackage list of packages - comma separated - to scan for annotated worker - * implementation - */ - public synchronized void initWorkers(String basePackage) { - scanWorkers(basePackage); - startPolling(); - } - - /** Shuts down the workers */ - public void shutdown() { - if (taskRunner != null) { - taskRunner.shutdown(); - } - } - - private void scanWorkers(String basePackage) { - try { - if (scannedPackages.contains(basePackage)) { - // skip - LOGGER.info("Package {} already scanned and will skip", basePackage); - return; - } - // Add here so to avoid infinite recursion where a class in the package contains the - // code to init workers - scannedPackages.add(basePackage); - List packagesToScan = new ArrayList<>(); - if (basePackage != null) { - String[] packages = basePackage.split(","); - Collections.addAll(packagesToScan, packages); - } - - LOGGER.info("packages to scan {}", packagesToScan); - - long s = System.currentTimeMillis(); - ClassPath.from(AnnotatedWorkerExecutor.class.getClassLoader()) - .getAllClasses() - .forEach( - classMeta -> { - String name = classMeta.getName(); - if (!includePackage(packagesToScan, name)) { - return; - } - try { - Class clazz = classMeta.load(); - Object obj = clazz.getConstructor().newInstance(); - scanClass(clazz, obj); - } catch (Throwable t) { - // trace because many classes won't have a default no-args - // constructor and will fail - LOGGER.trace( - "Caught exception while loading and scanning class {}", - t.getMessage()); - } - }); - LOGGER.info( - "Took {} ms to scan all the classes, loading {} tasks", - (System.currentTimeMillis() - s), - workerExecutors.size()); - - } catch (Exception e) { - LOGGER.error("Error while scanning for workers: ", e); - } - } - - private boolean includePackage(List packagesToScan, String name) { - for (String scanPkg : packagesToScan) { - if (name.startsWith(scanPkg)) return true; - } - return false; - } - - private void scanClass(Class clazz, Object obj) { - for (Method method : clazz.getMethods()) { - WorkerTask annotation = method.getAnnotation(WorkerTask.class); - if (annotation == null) { - continue; - } - String name = annotation.value(); - workerExecutors.put(name, method); - workerClassObjs.put(name, obj); - LOGGER.info("Adding worker for task {}, method {}", name, method); - } - } - - private void startPolling() { - List executors = new ArrayList<>(); - workerExecutors.forEach( - (taskName, method) -> { - Object obj = workerClassObjs.get(taskName); - AnnotatedWorker executor = new AnnotatedWorker(taskName, method, obj); - executor.setPollingInterval(pollingInteralInMS); - executors.add(executor); - }); - - if (executors.isEmpty()) { - return; - } - - taskRunner = - new TaskRunnerConfigurer.Builder(taskClient, executors) - .withThreadCount(executors.size()) - .build(); - - taskRunner.init(); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/DynamicForkWorker.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/DynamicForkWorker.java deleted file mode 100644 index 17b8309de..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/task/DynamicForkWorker.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.executor.task; - -import java.lang.reflect.Method; -import java.util.Map; -import java.util.function.Function; - -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.sdk.workflow.def.tasks.DynamicFork; -import com.netflix.conductor.sdk.workflow.def.tasks.DynamicForkInput; -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.utils.ObjectMapperProvider; - -import com.fasterxml.jackson.databind.ObjectMapper; - -public class DynamicForkWorker implements Worker { - - private final int pollingInterval; - - private final Function workerMethod; - - private final String name; - - private ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - public DynamicForkWorker( - String name, Function workerMethod, int pollingInterval) { - this.name = name; - this.workerMethod = workerMethod; - this.pollingInterval = pollingInterval; - } - - @Override - public String getTaskDefName() { - return name; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - try { - - Object parameter = getInvocationParameters(this.workerMethod, task); - DynamicForkInput output = this.workerMethod.apply(parameter); - result.getOutputData().put(DynamicFork.FORK_TASK_PARAM, output.getTasks()); - result.getOutputData().put(DynamicFork.FORK_TASK_INPUT_PARAM, output.getInputs()); - result.setStatus(TaskResult.Status.COMPLETED); - - } catch (Exception e) { - throw new RuntimeException(e); - } - return result; - } - - @Override - public int getPollingInterval() { - return pollingInterval; - } - - private Object getInvocationParameters(Function function, Task task) { - InputParam annotation = null; - Class parameterType = null; - for (Method method : function.getClass().getDeclaredMethods()) { - if (method.getReturnType().equals(DynamicForkInput.class)) { - annotation = method.getParameters()[0].getAnnotation(InputParam.class); - parameterType = method.getParameters()[0].getType(); - } - } - - if (parameterType.equals(Task.class)) { - return task; - } else if (parameterType.equals(Map.class)) { - return task.getInputData(); - } - if (annotation != null) { - String name = annotation.value(); - Object value = task.getInputData().get(name); - return objectMapper.convertValue(value, parameterType); - } - return objectMapper.convertValue(task.getInputData(), parameterType); - } - - public static void main(String[] args) { - Function fn = - new Function() { - @Override - public DynamicForkInput apply(@InputParam("a") TaskDef s) { - return null; - } - }; - - for (Method method : fn.getClass().getDeclaredMethods()) { - if (method.getReturnType().equals(DynamicForkInput.class)) { - System.out.println( - "\n\n-->method: " - + method - + ", input: " - + method.getParameters()[0].getType()); - System.out.println("I take input as " + method.getParameters()[0].getType()); - InputParam annotation = method.getParameters()[0].getAnnotation(InputParam.class); - System.out.println("I have annotation " + annotation); - } - } - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/InputParam.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/InputParam.java deleted file mode 100644 index d27fa8b7c..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/InputParam.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.task; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.PARAMETER) -public @interface InputParam { - String value(); - - boolean required() default false; -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/OutputParam.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/OutputParam.java deleted file mode 100644 index a8c6ec378..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/OutputParam.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.task; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE_USE) -public @interface OutputParam { - String value(); -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/WorkerTask.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/WorkerTask.java deleted file mode 100644 index 9e8848051..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/task/WorkerTask.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.task; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** Identifies a simple worker task. */ -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) -public @interface WorkerTask { - String value(); -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/InputOutputGetter.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/InputOutputGetter.java deleted file mode 100644 index 41095b1a2..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/InputOutputGetter.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.utils; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -public class InputOutputGetter { - - public enum Field { - input, - output - } - - public static final class Map { - private final String parent; - - public Map(String parent) { - this.parent = parent; - } - - public String get(String key) { - return parent + "." + key + "}"; - } - - public Map map(String key) { - return new Map(parent + "." + key); - } - - public List list(String key) { - return new List(parent + "." + key); - } - - @Override - public String toString() { - return parent + "}"; - } - } - - public static final class List { - - private final String parent; - - public List(String parent) { - this.parent = parent; - } - - public List list(String key) { - return new List(parent + "." + key); - } - - public Map map(String key) { - return new Map(parent + "." + key); - } - - public String get(String key, int index) { - return parent + "." + key + "[" + index + "]}"; - } - - public String get(int index) { - return parent + "[" + index + "]}"; - } - - @Override - public String toString() { - return parent + "}"; - } - } - - private final String name; - - private final Field field; - - public InputOutputGetter(String name, Field field) { - this.name = name; - this.field = field; - } - - public String get(String key) { - return "${" + name + "." + field + "." + key + "}"; - } - - public String getParent() { - return "${" + name + "." + field + "}"; - } - - @JsonIgnore - public Map map(String key) { - return new Map("${" + name + "." + field + "." + key); - } - - @JsonIgnore - public List list(String key) { - return new List("${" + name + "." + field + "." + key); - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/MapBuilder.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/MapBuilder.java deleted file mode 100644 index 2552d12b5..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/MapBuilder.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.utils; - -import java.util.HashMap; -import java.util.Map; - -public class MapBuilder { - private Map map = new HashMap<>(); - - public MapBuilder add(String key, String value) { - map.put(key, value); - return this; - } - - public MapBuilder add(String key, Number value) { - map.put(key, value); - return this; - } - - public MapBuilder add(String key, MapBuilder value) { - map.put(key, value.build()); - return this; - } - - public Map build() { - return map; - } -} diff --git a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/ObjectMapperProvider.java b/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/ObjectMapperProvider.java deleted file mode 100644 index 4230cac71..000000000 --- a/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/utils/ObjectMapperProvider.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.utils; - -import com.netflix.conductor.common.jackson.JsonProtoModule; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; - -public class ObjectMapperProvider { - - public ObjectMapper getObjectMapper() { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); - - objectMapper.setDefaultPropertyInclusion( - JsonInclude.Value.construct( - JsonInclude.Include.NON_NULL, JsonInclude.Include.NON_EMPTY)); - objectMapper.setSerializationInclusion(JsonInclude.Include.ALWAYS); - // objectMapper.setSerializationInclusion(JsonInclude.Include.); - - objectMapper.registerModule(new JsonProtoModule()); - return objectMapper; - } -} diff --git a/java-sdk/src/main/resources/test-server.properties b/java-sdk/src/main/resources/test-server.properties deleted file mode 100644 index 900b554d3..000000000 --- a/java-sdk/src/main/resources/test-server.properties +++ /dev/null @@ -1,5 +0,0 @@ -conductor.db.type=memory -conductor.indexing.enabled=false -conductor.workflow-repair-service.enabled=false -loadSample=true -conductor.system-task-workers.enabled=false \ No newline at end of file diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/TaskConversionsTests.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/TaskConversionsTests.java deleted file mode 100644 index 61cef6a81..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/TaskConversionsTests.java +++ /dev/null @@ -1,533 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -import java.time.Duration; -import java.time.ZonedDateTime; -import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; - -import static org.junit.jupiter.api.Assertions.*; - -public class TaskConversionsTests { - - static { - WorkflowExecutor.initTaskImplementations(); - } - - @Test - public void testSimpleTaskConversion() { - SimpleTask simpleTask = new SimpleTask("task_name", "task_ref_name"); - - Map map = new HashMap<>(); - map.put("key11", "value11"); - map.put("key12", 100); - - simpleTask.input("key1", "value"); - simpleTask.input("key2", 42); - simpleTask.input("key3", true); - simpleTask.input("key4", map); - - WorkflowTask workflowTask = simpleTask.getWorkflowDefTasks().get(0); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof SimpleTask); - SimpleTask simpleTaskFromWorkflowTask = (SimpleTask) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(simpleTask.getName(), fromWorkflowTask.getName()); - assertEquals(simpleTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(simpleTask.getTaskDef(), simpleTaskFromWorkflowTask.getTaskDef()); - assertEquals(simpleTask.getType(), simpleTaskFromWorkflowTask.getType()); - assertEquals(simpleTask.getStartDelay(), simpleTaskFromWorkflowTask.getStartDelay()); - assertEquals(simpleTask.getInput(), simpleTaskFromWorkflowTask.getInput()); - } - - @Test - public void testDynamicTaskCoversion() { - Dynamic dynamicTask = new Dynamic("task_name", "task_ref_name"); - - WorkflowTask workflowTask = dynamicTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters().get(Dynamic.TASK_NAME_INPUT_PARAM)); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof Dynamic); - Dynamic taskFromWorkflowTask = (Dynamic) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(dynamicTask.getName(), fromWorkflowTask.getName()); - assertEquals(dynamicTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(dynamicTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(dynamicTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(dynamicTask.getInput(), taskFromWorkflowTask.getInput()); - } - - @Test - public void testForkTaskConversion() { - SimpleTask task1 = new SimpleTask("task1", "task1"); - SimpleTask task2 = new SimpleTask("task2", "task2"); - SimpleTask task3 = new SimpleTask("task3", "task3"); - - ForkJoin forkTask = - new ForkJoin("task_ref_name", new Task[] {task1}, new Task[] {task2, task3}); - - WorkflowTask workflowTask = forkTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getForkTasks()); - assertFalse(workflowTask.getForkTasks().isEmpty()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof ForkJoin); - ForkJoin taskFromWorkflowTask = (ForkJoin) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(forkTask.getName(), fromWorkflowTask.getName()); - assertEquals(forkTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(forkTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(forkTask.getInput(), taskFromWorkflowTask.getInput()); - - assertEquals( - forkTask.getForkedTasks().length, taskFromWorkflowTask.getForkedTasks().length); - for (int i = 0; i < forkTask.getForkedTasks().length; i++) { - assertEquals( - forkTask.getForkedTasks()[i].length, - taskFromWorkflowTask.getForkedTasks()[i].length); - for (int j = 0; j < forkTask.getForkedTasks()[i].length; j++) { - assertEquals( - forkTask.getForkedTasks()[i][j].getTaskReferenceName(), - taskFromWorkflowTask.getForkedTasks()[i][j].getTaskReferenceName()); - - assertEquals( - forkTask.getForkedTasks()[i][j].getName(), - taskFromWorkflowTask.getForkedTasks()[i][j].getName()); - - assertEquals( - forkTask.getForkedTasks()[i][j].getType(), - taskFromWorkflowTask.getForkedTasks()[i][j].getType()); - } - } - } - - @Test - public void testDynamicForkTaskCoversion() { - DynamicFork dynamicTask = new DynamicFork("task_ref_name", "forkTasks", "forkTaskInputs"); - - WorkflowTask workflowTask = dynamicTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof DynamicFork); - DynamicFork taskFromWorkflowTask = (DynamicFork) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(dynamicTask.getName(), fromWorkflowTask.getName()); - assertEquals(dynamicTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(dynamicTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(dynamicTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(dynamicTask.getInput(), taskFromWorkflowTask.getInput()); - assertEquals( - dynamicTask.getForkTasksParameter(), taskFromWorkflowTask.getForkTasksParameter()); - assertEquals( - dynamicTask.getForkTasksInputsParameter(), - taskFromWorkflowTask.getForkTasksInputsParameter()); - } - - @Test - public void testDoWhileConversion() { - SimpleTask task1 = new SimpleTask("task_name", "task_ref_name"); - SimpleTask task2 = new SimpleTask("task_name", "task_ref_name"); - - DoWhile doWhileTask = new DoWhile("task_ref_name", 2, task1, task2); - - WorkflowTask workflowTask = doWhileTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof DoWhile); - DoWhile taskFromWorkflowTask = (DoWhile) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(doWhileTask.getName(), fromWorkflowTask.getName()); - assertEquals(doWhileTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(doWhileTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(doWhileTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(doWhileTask.getInput(), taskFromWorkflowTask.getInput()); - - assertEquals(doWhileTask.getLoopCondition(), taskFromWorkflowTask.getLoopCondition()); - assertEquals( - doWhileTask.getLoopTasks().stream() - .map(task -> task.getTaskReferenceName()) - .sorted() - .collect(Collectors.toSet()), - taskFromWorkflowTask.getLoopTasks().stream() - .map(task -> task.getTaskReferenceName()) - .sorted() - .collect(Collectors.toSet())); - } - - @Test - public void testJoin() { - - Join joinTask = new Join("task_ref_name", "task1", "task2"); - - WorkflowTask workflowTask = joinTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - assertNotNull(workflowTask.getJoinOn()); - assertTrue(!workflowTask.getJoinOn().isEmpty()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Join, - "task is not of type Join, but of type " + fromWorkflowTask.getClass().getName()); - Join taskFromWorkflowTask = (Join) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(joinTask.getName(), fromWorkflowTask.getName()); - assertEquals(joinTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(joinTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(joinTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(joinTask.getInput(), taskFromWorkflowTask.getInput()); - - assertEquals(joinTask.getJoinOn().length, taskFromWorkflowTask.getJoinOn().length); - assertEquals( - Arrays.asList(joinTask.getJoinOn()).stream().sorted().collect(Collectors.toSet()), - Arrays.asList(taskFromWorkflowTask.getJoinOn()).stream() - .sorted() - .collect(Collectors.toSet())); - } - - @Test - public void testEvent() { - - Event eventTask = new Event("task_ref_name", "sqs:queue11"); - - WorkflowTask workflowTask = eventTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Event, - "task is not of type Event, but of type " + fromWorkflowTask.getClass().getName()); - Event taskFromWorkflowTask = (Event) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(eventTask.getName(), fromWorkflowTask.getName()); - assertEquals(eventTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(eventTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(eventTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(eventTask.getInput(), taskFromWorkflowTask.getInput()); - assertEquals(eventTask.getSink(), taskFromWorkflowTask.getSink()); - } - - @Test - public void testSetVariableConversion() { - - SetVariable setVariableTask = new SetVariable("task_ref_name"); - - WorkflowTask workflowTask = setVariableTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof SetVariable, - "task is not of type SetVariable, but of type " - + fromWorkflowTask.getClass().getName()); - SetVariable taskFromWorkflowTask = (SetVariable) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(setVariableTask.getName(), fromWorkflowTask.getName()); - assertEquals( - setVariableTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(setVariableTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(setVariableTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(setVariableTask.getInput(), taskFromWorkflowTask.getInput()); - } - - @Test - public void testSubWorkflowConversion() { - - SubWorkflow subWorkflowTask = new SubWorkflow("task_ref_name", "sub_flow", 2); - - WorkflowTask workflowTask = subWorkflowTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof SubWorkflow, - "task is not of type SubWorkflow, but of type " - + fromWorkflowTask.getClass().getName()); - SubWorkflow taskFromWorkflowTask = (SubWorkflow) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(subWorkflowTask.getName(), fromWorkflowTask.getName()); - assertEquals( - subWorkflowTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(subWorkflowTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(subWorkflowTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(subWorkflowTask.getInput(), taskFromWorkflowTask.getInput()); - assertEquals(subWorkflowTask.getWorkflowName(), taskFromWorkflowTask.getWorkflowName()); - assertEquals( - subWorkflowTask.getWorkflowVersion(), taskFromWorkflowTask.getWorkflowVersion()); - } - - @Test - public void testSwitchConversion() { - - SimpleTask task1 = new SimpleTask("task_name", "task_ref_name1"); - SimpleTask task2 = new SimpleTask("task_name", "task_ref_name2"); - SimpleTask task3 = new SimpleTask("task_name", "task_ref_name3"); - - Switch decision = new Switch("switch", "${workflow.input.zip"); - decision.switchCase("caseA", task1); - decision.switchCase("caseB", task2, task3); - - decision.defaultCase( - new Terminate("terminate", Workflow.WorkflowStatus.FAILED, "", new HashMap<>())); - - WorkflowTask workflowTask = decision.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Switch, - "task is not of type Switch, but of type " + fromWorkflowTask.getClass().getName()); - Switch taskFromWorkflowTask = (Switch) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(decision.getName(), fromWorkflowTask.getName()); - assertEquals(decision.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(decision.getType(), taskFromWorkflowTask.getType()); - assertEquals(decision.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(decision.getInput(), taskFromWorkflowTask.getInput()); - // TODO: ADD CASES FOR DEFAULT CASE - assertEquals(decision.getBranches().keySet(), taskFromWorkflowTask.getBranches().keySet()); - assertEquals( - decision.getBranches().values().stream() - .map( - tasks -> - tasks.stream() - .map(Task::getTaskReferenceName) - .collect(Collectors.toSet())) - .collect(Collectors.toSet()), - taskFromWorkflowTask.getBranches().values().stream() - .map( - tasks -> - tasks.stream() - .map(Task::getTaskReferenceName) - .collect(Collectors.toSet())) - .collect(Collectors.toSet())); - assertEquals(decision.getBranches().size(), taskFromWorkflowTask.getBranches().size()); - } - - @Test - public void testTerminateConversion() { - - Terminate terminateTask = - new Terminate("terminate", Workflow.WorkflowStatus.FAILED, "", new HashMap<>()); - - WorkflowTask workflowTask = terminateTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Terminate, - "task is not of type Terminate, but of type " - + fromWorkflowTask.getClass().getName()); - Terminate taskFromWorkflowTask = (Terminate) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(terminateTask.getName(), fromWorkflowTask.getName()); - assertEquals(terminateTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(terminateTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(terminateTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(terminateTask.getInput(), taskFromWorkflowTask.getInput()); - } - - @Test - public void testWaitConversion() { - - Wait waitTask = new Wait("terminate"); - - WorkflowTask workflowTask = waitTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Wait, - "task is not of type Wait, but of type " + fromWorkflowTask.getClass().getName()); - Wait taskFromWorkflowTask = (Wait) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(waitTask.getName(), fromWorkflowTask.getName()); - assertEquals(waitTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(waitTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(waitTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(waitTask.getInput(), taskFromWorkflowTask.getInput()); - - // Wait for 10 seconds - waitTask = new Wait("wait_for_10_seconds", Duration.of(10, ChronoUnit.SECONDS)); - workflowTask = waitTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - assertEquals("10s", workflowTask.getInputParameters().get(Wait.DURATION_INPUT)); - - // Wait for 10 minutes - waitTask = new Wait("wait_for_10_seconds", Duration.of(10, ChronoUnit.MINUTES)); - workflowTask = waitTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - assertEquals("600s", workflowTask.getInputParameters().get(Wait.DURATION_INPUT)); - - // Wait till next week some time - ZonedDateTime nextWeek = ZonedDateTime.now().plusDays(7); - String formattedDateTime = Wait.dateTimeFormatter.format(nextWeek); - waitTask = new Wait("wait_till_next_week", nextWeek); - workflowTask = waitTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - assertEquals(formattedDateTime, workflowTask.getInputParameters().get(Wait.UNTIL_INPUT)); - } - - @Test - public void testHttpConverter() { - - Http httpTask = new Http("terminate"); - Http.Input input = new Http.Input(); - input.setUri("http://example.com"); - input.setMethod(Http.Input.HttpMethod.POST); - input.setBody("Hello World"); - input.setReadTimeOut(100); - Map headers = new HashMap<>(); - headers.put("X-AUTHORIZATION", "my_api_key"); - input.setHeaders(headers); - - httpTask.input(input); - - WorkflowTask workflowTask = httpTask.getWorkflowDefTasks().get(0); - assertNotNull(workflowTask.getInputParameters()); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Http, - "task is not of type Http, but of type " + fromWorkflowTask.getClass().getName()); - Http taskFromWorkflowTask = (Http) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(httpTask.getName(), fromWorkflowTask.getName()); - assertEquals(httpTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(httpTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(httpTask.getStartDelay(), taskFromWorkflowTask.getStartDelay()); - assertEquals(httpTask.getInput(), taskFromWorkflowTask.getInput()); - assertEquals(httpTask.getHttpRequest(), taskFromWorkflowTask.getHttpRequest()); - - System.out.println(httpTask.getInput()); - System.out.println(taskFromWorkflowTask.getInput()); - } - - @Test - public void testJQTaskConversion() { - JQ jqTask = new JQ("task_name", "{ key3: (.key1.value1 + .key2.value2) }"); - - Map map = new HashMap<>(); - map.put("key11", "value11"); - map.put("key12", 100); - - jqTask.input("key1", "value"); - jqTask.input("key2", 42); - jqTask.input("key3", true); - jqTask.input("key4", map); - - WorkflowTask workflowTask = jqTask.getWorkflowDefTasks().get(0); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue(fromWorkflowTask instanceof JQ, "Found the instance " + fromWorkflowTask); - JQ taskFromWorkflowTask = (JQ) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(jqTask.getName(), fromWorkflowTask.getName()); - assertEquals(jqTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(jqTask.getQueryExpression(), taskFromWorkflowTask.getQueryExpression()); - assertEquals(jqTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(jqTask.getInput(), taskFromWorkflowTask.getInput()); - } - - @Test - public void testInlineTaskConversion() { - - Javascript inlineTask = - new Javascript( - "task_name", - "function e() { if ($.value == 1){return {\"result\": true}} else { return {\"result\": false}}} e();"); - inlineTask.validate(); - - Map map = new HashMap<>(); - map.put("key11", "value11"); - map.put("key12", 100); - - inlineTask.input("key1", "value"); - inlineTask.input("key2", 42); - inlineTask.input("key3", true); - inlineTask.input("key4", map); - - WorkflowTask workflowTask = inlineTask.getWorkflowDefTasks().get(0); - - Task fromWorkflowTask = TaskRegistry.getTask(workflowTask); - assertTrue( - fromWorkflowTask instanceof Javascript, "Found the instance " + fromWorkflowTask); - Javascript taskFromWorkflowTask = (Javascript) fromWorkflowTask; - - assertNotNull(fromWorkflowTask); - assertEquals(inlineTask.getName(), fromWorkflowTask.getName()); - assertEquals(inlineTask.getTaskReferenceName(), fromWorkflowTask.getTaskReferenceName()); - assertEquals(inlineTask.getExpression(), taskFromWorkflowTask.getExpression()); - assertEquals(inlineTask.getType(), taskFromWorkflowTask.getType()); - assertEquals(inlineTask.getInput(), taskFromWorkflowTask.getInput()); - } - - @Test - public void testJavascriptValidation() { - // This script has errors - Javascript inlineTask = - new Javascript( - "task_name", - "function e() { if ($.value ==> 1){return {\"result\": true}} else { return {\"result\": false}}} e();"); - boolean failed = false; - try { - inlineTask.validate(); - } catch (ValidationError ve) { - failed = true; - } - - assertTrue(failed); - - // This script does NOT have errors - inlineTask = - new Javascript( - "task_name", - "function e() { if ($.value == 1){return {\"result\": true}} else { return {\"result\": false}}} e();"); - failed = false; - try { - inlineTask.validate(); - } catch (ValidationError ve) { - failed = true; - } - - assertFalse(failed); - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowCreationTests.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowCreationTests.java deleted file mode 100644 index ab57cea37..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowCreationTests.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.testing.WorkflowTestRunner; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.task.OutputParam; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; -import com.netflix.conductor.sdk.workflow.testing.TestWorkflowInput; - -import static org.junit.jupiter.api.Assertions.*; - -public class WorkflowCreationTests { - - private static WorkflowExecutor executor; - - private static WorkflowTestRunner runner; - - @BeforeAll - public static void init() throws IOException { - runner = new WorkflowTestRunner(8080, "3.7.3"); - runner.init("com.netflix.conductor.sdk"); - executor = runner.getWorkflowExecutor(); - } - - @AfterAll - public static void cleanUp() { - runner.shutdown(); - } - - @WorkerTask("get_user_info") - public @OutputParam("zipCode") String getZipCode(@InputParam("name") String userName) { - return "95014"; - } - - @WorkerTask("task2") - public @OutputParam("greetings") String task2() { - return "Hello World"; - } - - @WorkerTask("task3") - public @OutputParam("greetings") String task3() { - return "Hello World-3"; - } - - @WorkerTask("fork_gen") - public DynamicForkInput generateDynamicFork() { - DynamicForkInput forks = new DynamicForkInput(); - Map inputs = new HashMap<>(); - forks.setInputs(inputs); - List> tasks = new ArrayList<>(); - forks.setTasks(tasks); - - for (int i = 0; i < 3; i++) { - SimpleTask task = new SimpleTask("task2", "fork_task_" + i); - tasks.add(task); - HashMap taskInput = new HashMap<>(); - taskInput.put("key", "value"); - taskInput.put("key2", 101); - inputs.put(task.getTaskReferenceName(), taskInput); - } - return forks; - } - - private ConductorWorkflow registerTestWorkflow() { - InputStream script = getClass().getResourceAsStream("/script.js"); - SimpleTask getUserInfo = new SimpleTask("get_user_info", "get_user_info"); - getUserInfo.input("name", ConductorWorkflow.input.get("name")); - - SimpleTask sendToCupertino = new SimpleTask("task2", "cupertino"); - SimpleTask sendToNYC = new SimpleTask("task2", "nyc"); - - int len = 4; - Task[][] parallelTasks = new Task[len][1]; - for (int i = 0; i < len; i++) { - parallelTasks[i][0] = new SimpleTask("task2", "task_parallel_" + i); - } - - WorkflowBuilder builder = new WorkflowBuilder<>(executor); - TestWorkflowInput defaultInput = new TestWorkflowInput(); - defaultInput.setName("defaultName"); - - builder.name("sdk_workflow_example") - .version(1) - .ownerEmail("hello@example.com") - .description("Example Workflow") - .restartable(true) - .variables(new WorkflowState()) - .timeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF, 100) - .defaultInput(defaultInput) - .add(new Javascript("js", script)) - .add(new ForkJoin("parallel", parallelTasks)) - .add(getUserInfo) - .add( - new Switch("decide2", "${workflow.input.zipCode}") - .switchCase("95014", sendToCupertino) - .switchCase("10121", sendToNYC)) - // .add(new SubWorkflow("subflow", "sub_workflow_example", 5)) - .add(new SimpleTask("task2", "task222")) - .add(new DynamicFork("dynamic_fork", new SimpleTask("fork_gen", "fork_gen"))); - - ConductorWorkflow workflow = builder.build(); - boolean registered = workflow.registerWorkflow(true, true); - assertTrue(registered); - - return workflow; - } - - @Test - public void verifyCreatedWorkflow() { - ConductorWorkflow conductorWorkflow = registerTestWorkflow(); - WorkflowDef def = conductorWorkflow.toWorkflowDef(); - assertNotNull(def); - assertTrue( - def.getTasks() - .get(def.getTasks().size() - 2) - .getType() - .equals(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC)); - assertTrue( - def.getTasks() - .get(def.getTasks().size() - 1) - .getType() - .equals(TaskType.TASK_TYPE_JOIN)); - } - - @Test - public void verifyInlineWorkflowExecution() throws ValidationError { - TestWorkflowInput workflowInput = new TestWorkflowInput("username", "10121", "US"); - try { - Workflow run = registerTestWorkflow().execute(workflowInput).get(10, TimeUnit.SECONDS); - assertEquals( - Workflow.WorkflowStatus.COMPLETED, - run.getStatus(), - run.getReasonForIncompletion()); - } catch (Exception e) { - fail(e.getMessage()); - } - } - - @Test - public void testWorkflowExecutionByName() throws ExecutionException, InterruptedException { - - // Register the workflow first - registerTestWorkflow(); - - TestWorkflowInput input = new TestWorkflowInput("username", "10121", "US"); - - ConductorWorkflow conductorWorkflow = - new ConductorWorkflow(executor) - .from("sdk_workflow_example", null); - - CompletableFuture execution = conductorWorkflow.execute(input); - try { - execution.get(10, TimeUnit.SECONDS); - } catch (Exception e) { - fail(e.getMessage()); - } - } - - @Test - public void verifyWorkflowExecutionFailsIfNotExists() - throws ExecutionException, InterruptedException { - - // Register the workflow first - registerTestWorkflow(); - - TestWorkflowInput input = new TestWorkflowInput("username", "10121", "US"); - - try { - ConductorWorkflow conductorWorkflow = - new ConductorWorkflow(executor) - .from("non_existent_workflow", null); - conductorWorkflow.execute(input); - fail("execution should have failed"); - } catch (Exception e) { - } - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowDefTaskTests.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowDefTaskTests.java deleted file mode 100644 index a7e818071..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowDefTaskTests.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.sdk.workflow.def.tasks.*; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; - -import static org.junit.jupiter.api.Assertions.*; - -public class WorkflowDefTaskTests { - - static { - WorkflowExecutor.initTaskImplementations(); - } - - @Test - public void testWorkflowDefTaskWithStartDelay() { - SimpleTask simpleTask = new SimpleTask("task_name", "task_ref_name"); - int startDelay = 5; - - simpleTask.setStartDelay(startDelay); - - WorkflowTask workflowTask = simpleTask.getWorkflowDefTasks().get(0); - - assertEquals(simpleTask.getStartDelay(), workflowTask.getStartDelay()); - assertEquals(startDelay, simpleTask.getStartDelay()); - assertEquals(startDelay, workflowTask.getStartDelay()); - } - - @Test - public void testWorkflowDefTaskWithOptionalEnabled() { - SimpleTask simpleTask = new SimpleTask("task_name", "task_ref_name"); - - simpleTask.setOptional(true); - - WorkflowTask workflowTask = simpleTask.getWorkflowDefTasks().get(0); - - assertEquals(simpleTask.getStartDelay(), workflowTask.getStartDelay()); - assertEquals(true, simpleTask.isOptional()); - assertEquals(true, workflowTask.isOptional()); - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowState.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowState.java deleted file mode 100644 index 066d74dbf..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/def/WorkflowState.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.def; - -public class WorkflowState { - - private boolean paymentCompleted; - - private int timeTaken; - - public boolean isPaymentCompleted() { - return paymentCompleted; - } - - public void setPaymentCompleted(boolean paymentCompleted) { - this.paymentCompleted = paymentCompleted; - } - - public int getTimeTaken() { - return timeTaken; - } - - public void setTimeTaken(int timeTaken) { - this.timeTaken = timeTaken; - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/Task1Input.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/Task1Input.java deleted file mode 100644 index 3cb3375da..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/Task1Input.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.testing; - -public class Task1Input { - - private int mod; - - private int oddEven; - - public int getMod() { - return mod; - } - - public void setMod(int mod) { - this.mod = mod; - } - - public int getOddEven() { - return oddEven; - } - - public void setOddEven(int oddEven) { - this.oddEven = oddEven; - } - - @Override - public String toString() { - return "Task1Input{" + "mod=" + mod + ", oddEven=" + oddEven + '}'; - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/TestWorkflowInput.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/TestWorkflowInput.java deleted file mode 100644 index 3caa0d1df..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/TestWorkflowInput.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.testing; - -public class TestWorkflowInput { - - private String name; - - private String zipCode; - - private String countryCode; - - public TestWorkflowInput(String name, String zipCode, String countryCode) { - this.name = name; - this.zipCode = zipCode; - this.countryCode = countryCode; - } - - public TestWorkflowInput() {} - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getZipCode() { - return zipCode; - } - - public void setZipCode(String zipCode) { - this.zipCode = zipCode; - } - - public String getCountryCode() { - return countryCode; - } - - public void setCountryCode(String countryCode) { - this.countryCode = countryCode; - } -} diff --git a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/WorkflowTestFrameworkTests.java b/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/WorkflowTestFrameworkTests.java deleted file mode 100644 index d691b5d9a..000000000 --- a/java-sdk/src/test/java/com/netflix/conductor/sdk/workflow/testing/WorkflowTestFrameworkTests.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.sdk.workflow.testing; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.testing.WorkflowTestRunner; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.task.OutputParam; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; - -import static org.junit.jupiter.api.Assertions.*; - -public class WorkflowTestFrameworkTests { - - private static WorkflowTestRunner testRunner; - - private static WorkflowExecutor executor; - - @BeforeAll - public static void init() throws IOException { - testRunner = new WorkflowTestRunner(8080, "3.7.3"); - testRunner.init("com.netflix.conductor.sdk.workflow.testing"); - - executor = testRunner.getWorkflowExecutor(); - executor.loadTaskDefs("/tasks.json"); - executor.loadWorkflowDefs("/simple_workflow.json"); - } - - @AfterAll - public static void cleanUp() { - testRunner.shutdown(); - } - - @Test - public void testDynamicTaskExecuted() throws Exception { - - Map input = new HashMap<>(); - input.put("task2Name", "task_2"); - input.put("mod", "1"); - input.put("oddEven", "12"); - input.put("number", 0); - - // Start the workflow and wait for it to complete - Workflow workflow = executor.executeWorkflow("Decision_TaskExample", 1, input).get(); - - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertNotNull(workflow.getOutput()); - assertNotNull(workflow.getTasks()); - assertFalse(workflow.getTasks().isEmpty()); - assertTrue( - workflow.getTasks().stream() - .anyMatch(task -> task.getTaskDefName().equals("task_6"))); - - // task_2's implementation fails at the first try, so we should have to instances of task_2 - // execution - // 2 executions of task_2 should be present - assertEquals( - 2, - workflow.getTasks().stream() - .filter(task -> task.getTaskDefName().equals("task_2")) - .count()); - List task2Executions = - workflow.getTasks().stream() - .filter(task -> task.getTaskDefName().equals("task_2")) - .collect(Collectors.toList()); - assertNotNull(task2Executions); - assertEquals(2, task2Executions.size()); - - // First instance would have failed and second succeeded. - assertEquals(Task.Status.FAILED, task2Executions.get(0).getStatus()); - assertEquals(Task.Status.COMPLETED, task2Executions.get(1).getStatus()); - - // task10's output - assertEquals(100, workflow.getOutput().get("c")); - } - - @Test - public void testWorkflowFailure() throws Exception { - - Map input = new HashMap<>(); - // task2Name is missing which will cause workflow to fail - input.put("mod", "1"); - input.put("oddEven", "12"); - input.put("number", 0); - - // we are missing task2Name parameter which is required to wire up dynamictask - // The workflow should fail as we are not passing it as input - Workflow workflow = executor.executeWorkflow("Decision_TaskExample", 1, input).get(); - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.FAILED, workflow.getStatus()); - assertNotNull(workflow.getReasonForIncompletion()); - } - - @WorkerTask("task_1") - public Map task1(Task1Input input) { - Map result = new HashMap<>(); - result.put("input", input); - return result; - } - - @WorkerTask("task_2") - public TaskResult task2(Task task) { - if (task.getRetryCount() < 1) { - task.setStatus(Task.Status.FAILED); - task.setReasonForIncompletion("try again"); - return new TaskResult(task); - } - - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - - @WorkerTask("task_6") - public TaskResult task6(Task task) { - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - - @WorkerTask("task_10") - public TaskResult task10(Task task) { - task.setStatus(Task.Status.COMPLETED); - task.getOutputData().put("a", "b"); - task.getOutputData().put("c", 100); - task.getOutputData().put("x", false); - return new TaskResult(task); - } - - @WorkerTask("task_8") - public TaskResult task8(Task task) { - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - - @WorkerTask("task_5") - public TaskResult task5(Task task) { - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } - - @WorkerTask("task_3") - public @OutputParam("z1") String task3(@InputParam("taskToExecute") String p1) { - return "output of task3, p1=" + p1; - } - - @WorkerTask("task_30") - public Map task30(Task task) { - Map output = new HashMap<>(); - output.put("v1", "b"); - output.put("v2", Arrays.asList("one", "two", 3)); - output.put("v3", 5); - return output; - } - - @WorkerTask("task_31") - public Map task31(Task task) { - Map output = new HashMap<>(); - output.put("a1", "b"); - output.put("a2", Arrays.asList("one", "two", 3)); - output.put("a3", 5); - return output; - } - - @WorkerTask("HTTP") - public Map http(Task task) { - Map output = new HashMap<>(); - output.put("a1", "b"); - output.put("a2", Arrays.asList("one", "two", 3)); - output.put("a3", 5); - return output; - } - - @WorkerTask("EVENT") - public Map event(Task task) { - Map output = new HashMap<>(); - output.put("a1", "b"); - output.put("a2", Arrays.asList("one", "two", 3)); - output.put("a3", 5); - return output; - } -} diff --git a/java-sdk/src/test/resources/application-integrationtest.properties b/java-sdk/src/test/resources/application-integrationtest.properties deleted file mode 100644 index a9869cc17..000000000 --- a/java-sdk/src/test/resources/application-integrationtest.properties +++ /dev/null @@ -1,53 +0,0 @@ -# -# /* -# * Copyright 2021 Netflix, Inc. -# *

    -# * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# * the License. You may obtain a copy of the License at -# *

    -# * http://www.apache.org/licenses/LICENSE-2.0 -# *

    -# * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# * specific language governing permissions and limitations under the License. -# */ -# - -conductor.db.type=memory -conductor.workflow-execution-lock.type=local_only -conductor.external-payload-storage.type=mock -conductor.indexing.enabled=false - -conductor.app.stack=test -conductor.app.appId=conductor - -conductor.app.workflow-offset-timeout=30s - -conductor.system-task-workers.enabled=false -conductor.app.system-task-worker-callback-duration=0 - -conductor.app.event-message-indexing-enabled=true -conductor.app.event-execution-indexing-enabled=true - -conductor.workflow-reconciler.enabled=true -conductor.workflow-repair-service.enabled=false - -conductor.app.workflow-execution-lock-enabled=false - -conductor.app.workflow-input-payload-size-threshold=10KB -conductor.app.max-workflow-input-payload-size-threshold=10240KB -conductor.app.workflow-output-payload-size-threshold=10KB -conductor.app.max-workflow-output-payload-size-threshold=10240KB -conductor.app.task-input-payload-size-threshold=10KB -conductor.app.max-task-input-payload-size-threshold=10240KB -conductor.app.task-output-payload-size-threshold=10KB -conductor.app.max-task-output-payload-size-threshold=10240KB -conductor.app.max-workflow-variables-payload-size-threshold=2KB - -conductor.redis.availability-zone=us-east-1c -conductor.redis.data-center-region=us-east-1 -conductor.redis.workflow-namespace-prefix=integration-test -conductor.redis.queue-namespace-prefix=integtest - -conductor.elasticsearch.index-prefix=conductor -conductor.elasticsearch.cluster-health-color=yellow diff --git a/java-sdk/src/test/resources/log4j2.xml b/java-sdk/src/test/resources/log4j2.xml deleted file mode 100644 index cab346657..000000000 --- a/java-sdk/src/test/resources/log4j2.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/java-sdk/src/test/resources/script.js b/java-sdk/src/test/resources/script.js deleted file mode 100644 index af6d42c43..000000000 --- a/java-sdk/src/test/resources/script.js +++ /dev/null @@ -1,11 +0,0 @@ -function e() { - if ($.value > 1){ - return { - "key": "value", - "key2": 42 - }; - } else { - return {}; - } -} -e(); \ No newline at end of file diff --git a/java-sdk/src/test/resources/simple_workflow.json b/java-sdk/src/test/resources/simple_workflow.json deleted file mode 100644 index cc8a78051..000000000 --- a/java-sdk/src/test/resources/simple_workflow.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "createTime": 1635491472393, - "updateTime": 1635356450472, - "name": "Decision_TaskExample", - "description": "Decision_TaskExample", - "version": 1, - "tasks": [ - { - "name": "decision_task", - "taskReferenceName": "decision_task", - "inputParameters": { - "case_value_param": "${workflow.input.number}" - }, - "type": "DECISION", - "caseValueParam": "case_value_param", - "decisionCases": { - "0": [ - { - "name": "task_5", - "taskReferenceName": "task_5", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute":"${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam":"taskToExecute", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "task_6", - "taskReferenceName": "task_6", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "1": [ - { - "name": "task_8", - "taskReferenceName": "task_8", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "task_10", - "taskReferenceName": "task_10", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "task_8", - "taskReferenceName": "task_8_default", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "task_10", - "taskReferenceName": "task_10_last", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": true, - "ownerEmail": "abc@example.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "variables": {}, - "inputTemplate": {} -} \ No newline at end of file diff --git a/java-sdk/src/test/resources/tasks.json b/java-sdk/src/test/resources/tasks.json deleted file mode 100644 index b60881d08..000000000 --- a/java-sdk/src/test/resources/tasks.json +++ /dev/null @@ -1,1252 +0,0 @@ -[ - { - "createTime": 1635656118884, - "createdBy": "", - "name": "task_38", - "description": "task_38", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638846956, - "createdBy": "", - "name": "encode", - "retryCount": 3, - "timeoutSeconds": 1200, - "inputKeys": [ - "fileLocation" - ], - "outputKeys": [ - "encodeLocation" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 1200, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "encode_admin@test.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635656118436, - "createdBy": "", - "name": "task_8", - "description": "task_8", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118873, - "createdBy": "", - "name": "task_37", - "description": "task_37", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118460, - "createdBy": "", - "name": "task_9", - "description": "task_9", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118390, - "createdBy": "", - "name": "task_6", - "description": "task_6", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118861, - "createdBy": "", - "name": "task_36", - "description": "task_36", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847017, - "createdBy": "", - "name": "collect_payment_task", - "description": "collect_payment_task", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118847, - "createdBy": "", - "name": "task_35", - "description": "task_35", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118422, - "createdBy": "", - "name": "task_7", - "description": "task_7", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118835, - "createdBy": "", - "name": "task_34", - "description": "task_34", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118349, - "createdBy": "", - "name": "task_4", - "description": "task_4", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118819, - "createdBy": "", - "name": "task_33", - "description": "task_33", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118371, - "createdBy": "", - "name": "task_5", - "description": "task_5", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118808, - "createdBy": "", - "name": "task_32", - "description": "task_32", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118302, - "createdBy": "", - "name": "task_2", - "description": "task_2", - "retryCount": 3, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 1, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118797, - "createdBy": "", - "name": "task_31", - "description": "task_31", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118323, - "createdBy": "", - "name": "task_3", - "description": "task_3", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118228, - "createdBy": "", - "name": "task_0", - "description": "task_0", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118775, - "createdBy": "", - "name": "task_30", - "description": "task_30", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118267, - "createdBy": "", - "name": "task_1", - "description": "task_1", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847161, - "createdBy": "", - "name": "BookHotels", - "retryCount": 3, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "ui@example.com" - }, - { - "createTime": 1635638847170, - "createdBy": "", - "name": "deploy", - "retryCount": 3, - "timeoutSeconds": 1200, - "inputKeys": [ - "fileLocation" - ], - "outputKeys": [ - "deployLocation" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 1200, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "encode_admin@test.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635763310960, - "createdBy": "", - "name": "ship_via_dhl", - "retryCount": 3, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 2, - "ownerEmail": "abc@example.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635638847180, - "createdBy": "", - "name": "StartBooking", - "retryCount": 3, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "ui@example.com" - }, - { - "createTime": 1635434088645, - "createdBy": "", - "name": "Read_Name", - "retryCount": 1, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 1, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "abc@example.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635638847189, - "createdBy": "", - "name": "book_flight_task", - "description": "book_flight_task", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847198, - "createdBy": "", - "name": "book_car_task", - "description": "book_car_task", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118758, - "createdBy": "", - "name": "task_29", - "description": "task_29", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118747, - "createdBy": "", - "name": "task_28", - "description": "task_28", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635278952348, - "createdBy": "", - "name": "ship_via_ups", - "retryCount": 3, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 2, - "ownerEmail": "abc@example.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635638847226, - "createdBy": "", - "name": "image_convert_resize", - "retryCount": 3, - "timeoutSeconds": 1200, - "inputKeys": [ - "fileLocation", - "outputFormat", - "outputWidth", - "outputHeight", - "maintainAspectRatio" - ], - "outputKeys": [ - "fileLocation" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 1200, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "test@example.com", - "pollTimeoutSeconds": 3600 - }, - { - "createTime": 1635638847238, - "createdBy": "", - "name": "deposit_money", - "description": "deposit_money", - "retryCount": 5, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118912, - "createdBy": "", - "name": "search_elasticsearch", - "description": "search_elasticsearch", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118900, - "createdBy": "", - "name": "task_39", - "description": "task_39", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118586, - "createdBy": "", - "name": "task_16", - "description": "task_16", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635278952330, - "createdBy": "", - "name": "shipping_info", - "retryCount": 1, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 2, - "ownerEmail": "abc@example.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635656118567, - "createdBy": "", - "name": "task_15", - "description": "task_15", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118545, - "createdBy": "", - "name": "task_14", - "description": "task_14", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118528, - "createdBy": "", - "name": "task_13", - "description": "task_13", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118513, - "createdBy": "", - "name": "task_12", - "description": "task_12", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847312, - "createdBy": "", - "name": "withdraw_money", - "description": "withdraw_money", - "retryCount": 5, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118495, - "createdBy": "", - "name": "task_11", - "description": "task_11", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118480, - "createdBy": "", - "name": "task_10", - "description": "task_10", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "updateTime": 1636574526469, - "createdBy": "user", - "updatedBy": "", - "name": "sample_task_name_1", - "description": "This is a sample task for demo", - "retryCount": 3, - "timeoutSeconds": 30, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 5, - "responseTimeoutSeconds": 10, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1636051623273, - "createdBy": "", - "name": "order_details", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "abc@example.com" - }, - { - "createTime": 1635638847343, - "createdBy": "", - "name": "CompleteFlightBooking", - "retryCount": 3, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "ui@example.com" - }, - { - "createTime": 1635278952339, - "createdBy": "", - "name": "ship_via_fedex", - "retryCount": 3, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 300, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 2, - "ownerEmail": "abc@example.com", - "pollTimeoutSeconds": 1200 - }, - { - "createTime": 1635638847353, - "createdBy": "", - "name": "map_state_codes", - "retryCount": 3, - "timeoutSeconds": 300, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 180, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@gmail.com" - }, - { - "createTime": 1635638847362, - "createdBy": "", - "name": "compute_median_top_states", - "retryCount": 3, - "timeoutSeconds": 300, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 180, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@gmail.com" - }, - { - "createTime": 1635638847374, - "createdBy": "", - "name": "scaleS3Image", - "retryCount": 3, - "timeoutSeconds": 300, - "inputKeys": [ - "inputBucketName", - "inputKeyName", - "scalingFactor", - "outputBucketName", - "outputKeyName" - ], - "outputKeys": [ - "response" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 180, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "conductor@example.com" - }, - { - "createTime": 1635656118736, - "createdBy": "", - "name": "task_27", - "description": "task_27", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118725, - "createdBy": "", - "name": "task_26", - "description": "task_26", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118713, - "createdBy": "", - "name": "task_25", - "description": "task_25", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118700, - "createdBy": "", - "name": "task_24", - "description": "task_24", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635878631466, - "createdBy": "", - "name": "task_23", - "retryCount": 1, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "pollTimeoutSeconds": 600, - "ownerEmail": "test@example.com" - }, - { - "createTime": 1635878631456, - "createdBy": "", - "name": "task_22", - "retryCount": 1, - "timeoutSeconds": 600, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 300, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "pollTimeoutSeconds": 600, - "ownerEmail": "test@example.com" - }, - { - "createTime": 1635638847436, - "createdBy": "", - "name": "send_email_task", - "description": "send_email_task", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118658, - "createdBy": "", - "name": "task_21", - "description": "task_21", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847459, - "createdBy": "", - "name": "image_multiple_convert_resize", - "retryCount": 3, - "timeoutSeconds": 1200, - "inputKeys": [ - "fileLocation", - "outputFormats", - "outputSizes", - "maintainAspectRatio" - ], - "outputKeys": [ - "dynamicTasks", - "dynamicTasksInput" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 600, - "responseTimeoutSeconds": 1200, - "concurrentExecLimit": 100, - "inputTemplate": {}, - "rateLimitPerFrequency": 50, - "rateLimitFrequencyInSeconds": 60, - "ownerEmail": "exampl@example.com", - "pollTimeoutSeconds": 3600 - }, - { - "createTime": 1635656118644, - "createdBy": "", - "name": "task_20", - "description": "task_20", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847477, - "createdBy": "", - "name": "simple_worker", - "retryCount": 3, - "timeoutSeconds": 300, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 180, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@gmail.com" - }, - { - "createTime": 1635638847486, - "createdBy": "", - "name": "book_hotel_task", - "description": "book_hotel_task", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 1200, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635638847495, - "createdBy": "", - "name": "watermarkS3Image", - "retryCount": 3, - "timeoutSeconds": 300, - "inputKeys": [ - "inputBucketName", - "inputKeyName", - "watermarkBucketName", - "watermarkKeyName", - "outputBucketName", - "outputKeyName" - ], - "outputKeys": [ - "response" - ], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 10, - "responseTimeoutSeconds": 180, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "conductor@example.com" - }, - { - "createTime": 1635656118631, - "createdBy": "", - "name": "task_19", - "description": "task_19", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118616, - "createdBy": "", - "name": "task_18", - "description": "task_18", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - }, - { - "createTime": 1635656118601, - "createdBy": "", - "name": "task_17", - "description": "task_17", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1, - "ownerEmail": "example@email.com" - } -] \ No newline at end of file diff --git a/java-sdk/testing_framework.md b/java-sdk/testing_framework.md deleted file mode 100644 index 18f950cc2..000000000 --- a/java-sdk/testing_framework.md +++ /dev/null @@ -1,74 +0,0 @@ -# Unit Testing framework for workflows - -The framework allows you to test the workflow definitions against a specific version of Conductor server. - -The unit tests allows the following: -1. **Input/Output Wiring**: Ensure the tasks are wired up correctly -2. **Parameter check**: Workflow behavior with missing mandatory parameters is expected (fail if required) -3. **Task Failure behavior**: Ensure the task definitions have right no. of retries etc. - e.g. If the task is not idempotent, it does not get retried. -4. **Branch Testing**: Given a specific input, ensure the workflow executes specific branch of the fork/decision. - -The local test server is self-contained with no additional dependencies required and stores all the data -in memory. Once the tests complete, the server is terminated and all the data is wiped out. - -## Unit Testing frameworks -The testing framework is agnostic to the framework you use for testing, can be easily integrated into -JUnit, Spock and other testing framework being used. - -## Setting up the local server for testing - -```java -//Setup method code - should be called once per the test lifecycle -//e.g. @BeforeClass in JUnit - -//Download the published conductor server version 3.5.2 -//Start the local server at port 8096 -testRunner = new WorkflowTestRunner(8096, "3.5.2"); - -//Scan the packages for task workers -testRunner.init("com.netflix.conductor.testing.workflows"); - -//Get the executor instance used for loading workflows -executor = testRunner.getWorkflowExecutor(); -``` - -Clean up method -```java -//Clean up method code -- place in a clean up method e.g. @AfterClass in Junit - -//Shutdown local workers and server and clean up any local resources in use. -testRunner.shutdown(); -``` - -Loading workflows from JSON files for testing -```java -executor.loadTaskDefs("/tasks.json"); -executor.loadWorkflowDefs("/simple_workflow.json"); -``` - -## Sample test code that starts a workflow and verifies its execution - -```java -GetInsuranceQuote getQuote = new GetInsuranceQuote(); -getQuote.setName("personA"); -getQuote.setAmount(1000000.0); -getQuote.setZipCode("10121"); - -// Start the workflow and wait for it to complete -CompletableFuture workflowFuture = executor.executeWorkflow("InsuranceQuoteWorkflow", 1, getQuote); - -//Wait for the workflow execution to complete -Workflow workflow = workflowFuture.get(); - -//Assertions -assertNotNull(workflow); -assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); -assertNotNull(workflow.getOutput()); -assertNotNull(workflow.getTasks()); -assertFalse(workflow.getTasks().isEmpty()); -assertTrue(workflow.getTasks().stream().anyMatch(task -> task.getTaskDefName().equals("task_6"))); -``` - - - diff --git a/java-sdk/worker_sdk.md b/java-sdk/worker_sdk.md deleted file mode 100644 index c0dac6cc3..000000000 --- a/java-sdk/worker_sdk.md +++ /dev/null @@ -1,116 +0,0 @@ -# Worker SDK -Worker SDK makes it easy to write conductor workers which are strongly typed with specific inputs and outputs. - -Annotations for the worker methods: - -* `@WorkerTask` When annotated converts a method to a conductor worker -* `@InputParam` name of the input parameter to bind to from the task's input -* `@OutputParam` name of the output key of the task's output. - -Please note, inputs and outputs to a task in Conductor are JSON documents. - - -**Examples** - -Create a worker named `task1` that gets Task as input and produces TaskResult as output. -```java -@WorkerTask("task1") - public TaskResult task1(Task task) { - task.setStatus(Task.Status.COMPLETED); - return new TaskResult(task); - } -``` - -Create a worker named `task2` that takes `name` as a String input and produces a -```java -@WorkerTask("task2") -public @OutputParam("greetings") String task2(@InputParam("name") String name) { - return "Hello, " + name; -} -``` -Example Task Input/Output - -Input: -```json -{ - "name": "conductor" -} -``` - -Output: -```json -{ - "greetings": "Hello, conductor" -} -``` -A worker that takes complex java type as input and produces complex output: -```java -@WorkerTask("get_insurance_quote") - public InsuranceQuote getInsuranceQuote(GetInsuranceQuote quoteInput) { - InsuranceQuote quote = new InsuranceQuote(); - //Implementation - return quote; - } -``` - -Example Task Input/Output - -Input: -```json -{ - "name": "personA", - "zipCode": "10121", - "amount": 1000000 -} -``` - -Output: -```json -{ - "name": "personA", - "quotedPremium": 123.50, - "quotedAmount": 1000000 -} -``` - -## Managing Task Workers -Annotated Workers are managed by [WorkflowExecutor](https://github.com/netflix/conductor/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/executor/WorkflowExecutor.java) - -### Start Workers -```java -WorkflowExecutor executor = new WorkflowExecutor("http://server/api/"); -//List of packages (comma separated) to scan for annotated workers. -// Please note,the worker method MUST be public and the class in which they are defined -//MUST have a no-args constructor -executor.initWorkers("com.company.package1,com.company.package2"); -``` - -### Stop Workers -Code fragment to stop workers at the shutdown of the application -```java -executor.shutdown(); -``` - -### Unit Testing Workers -Workers implemented with the annotations are regular Java methods can be united tested with any testing framework. - -#### Mock workers for workflow testing -Create a mock worker in a different package (e.g. test) and scan for these packages when loading up the workers for integration testing. - -See [Unit Testing Framework](testing_framework.md) for more details on testing. - -## Best Practices -In a typical production environment, you will have multiple workers across different machines/VMs/pods polling for the same task. -As with all the Conductor workers, the following best practices applies: - -1. Workers should be stateless and should not maintain any state on the process they are running -2. Ideally workers should be idempotent -3. Worker should follow Single Responsibility Principle and do exactly one thing they are responsible for -4. Worker should not embed any workflow logic - ie scheduling another worker, sending a message etc. Conductor has features to do this making it possible to decouple your workflow logic from worker implementation. - - - - - - - diff --git a/java-sdk/workflow_sdk.md b/java-sdk/workflow_sdk.md deleted file mode 100644 index ec946446b..000000000 --- a/java-sdk/workflow_sdk.md +++ /dev/null @@ -1,125 +0,0 @@ -# Workflow SDK -Workflow SDK provides fluent API to create workflows with strongly typed interfaces. - -## APIs -### ConductorWorkflow -[ConductorWorkflow](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/ConductorWorkflow.java) is the SDK representation of a Conductor workflow. - -#### Create a `ConductorWorkflow` instance -```java -ConductorWorkflow conductorWorkflow = new WorkflowBuilder(executor) - .name("sdk_workflow_example") - .version(1) - .ownerEmail("hello@example.com") - .description("Example Workflow") - .timeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF, 100) - .add(new SimpleTask("calculate_insurance_premium", "calculate_insurance_premium")) - .add(new SimpleTask("send_email", "send_email")) - .build(); -``` -### Working with Simple Worker Tasks -Use [SimpleTask](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SimpleTask.java) to add simple task to a workflow. - -Example: -```java -... -builder.add(new SimpleTask("send_email", "send_email")) -... -``` -### Wiring inputs the task -use `input` methods to configure the inputs the task. - -See https://netflix.github.io/conductor/how-tos/Tasks/task-inputs/ for details on Task Inputs/Outputs - -Example -```java -builder.add( - new SimpleTask("send_email", "send_email") - .input("email", "${workflow.input.email}") - .input("subject", "Your insurance quote for the amount ${generate_quote.output.amount}") -); -``` - -### Working with operators -Each of the operator - - -[ForkJoin](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/ForkJoin.java), -[Wait](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Wait.java), -[Switch](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Switch.java), -[DynamicFork](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DynamicFork.java), -[DoWhile](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/DoWhile.java), -[Join](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Join.java), -[Dynamic](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Dynamic.java), -[Terminate](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/Terminate.java), -[SubWorkflow](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SubWorkflow.java), -[SetVariable](https://github.com/Netflix/conductor/blob/main/java-sdk/src/main/java/com/netflix/conductor/sdk/workflow/def/tasks/SetVariable.java), - -have their own class that can be added to the workflow builder. - - -#### Register Workflow with Conductor Server -```java -//Returns true if the workflow is successfully created -//Reasons why this method will return false -//1. Network connectivity issue -//2. Workflow already exists with the specified name and version -//3. There are missing task definitions -boolean registered = workflow.registerWorkflow(); -``` -#### Overwrite existing workflow definition -```java -boolean registered = workflow.registerWorkflow(true); -``` - -#### Overwrite existing workflow definition & register any missing task definitions -```java -boolean registered = workflow.registerWorkflow(true, true); -``` - -#### Create `ConductorWorkflow` based on the definition registered on the server - -```java -ConductorWorkflow conductorWorkflow = - new ConductorWorkflow(executor) - .from("sdk_workflow_example", 1); -``` - -#### Start a workflow execution -Start the execution of the workflow based on the definition registered on the server. -Use register method to register a workflow on the server before executing. - -```java - -//Returns a completable future -CompletableFuture execution = conductorWorkflow.execute(input); - -//Wait for the workflow to complete -- useful if workflow completes within a reasonable amount of time -Workflow workflowRun = execution.get(); - -//Get the workflowId -String workflowId = workflowRun.getWorkflowId(); - -//Get the status of workflow execution -WorkflowStatus status = workflowRun.getStatus(); -``` -See [Workflow](https://github.com/Netflix/conductor/blob/main/common/src/main/java/com/netflix/conductor/common/run/Workflow.java) for more details on Workflow object. - -#### Start a dynamic workflow execution -Dynamic workflows are executed by specifying the workflow definition along with the execution and does not require registering the workflow on the server before executing. - -##### Use cases for dynamic workflows -1. Each workflow run has a unique workflow definition -2. Workflows are defined based on the user data and cannot be modeled ahead of time statically - -```java -//1. Use WorkflowBuilder to create ConductorWorkflow -//2. Execute using the definition created by SDK -CompletableFuture execution = conductorWorkflow.executeDynamic(input); - -``` - - - - - - diff --git a/json-jq-task/build.gradle b/json-jq-task/build.gradle deleted file mode 100644 index 24e98a6e5..000000000 --- a/json-jq-task/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "net.thisptr:jackson-jq:${revJq}" - implementation "com.github.ben-manes.caffeine:caffeine" -} diff --git a/json-jq-task/dependencies.lock b/json-jq-task/dependencies.lock deleted file mode 100644 index d4da4040f..000000000 --- a/json-jq-task/dependencies.lock +++ /dev/null @@ -1,384 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "net.thisptr:jackson-jq": { - "locked": "0.0.13" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "net.thisptr:jackson-jq": { - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.github.ben-manes.caffeine:caffeine": { - "locked": "2.9.3" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "net.thisptr:jackson-jq": { - "locked": "0.0.13" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "net.thisptr:jackson-jq": { - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java b/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java deleted file mode 100644 index ed49c40d2..000000000 --- a/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.json; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.benmanes.caffeine.cache.CacheLoader; -import com.github.benmanes.caffeine.cache.Caffeine; -import com.github.benmanes.caffeine.cache.LoadingCache; -import net.thisptr.jackson.jq.JsonQuery; -import net.thisptr.jackson.jq.Scope; - -@Component(JsonJqTransform.NAME) -public class JsonJqTransform extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(JsonJqTransform.class); - public static final String NAME = "JSON_JQ_TRANSFORM"; - private static final String QUERY_EXPRESSION_PARAMETER = "queryExpression"; - private static final String OUTPUT_RESULT = "result"; - private static final String OUTPUT_RESULT_LIST = "resultList"; - private static final String OUTPUT_ERROR = "error"; - private final Scope rootScope; - private final ObjectMapper objectMapper; - private final LoadingCache queryCache = createQueryCache(); - - @Autowired - public JsonJqTransform(ObjectMapper objectMapper) { - super(NAME); - this.objectMapper = objectMapper; - this.rootScope = Scope.newEmptyScope(); - this.rootScope.loadFunctions(Scope.class.getClassLoader()); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - final Map taskInput = task.getInputData(); - - final String queryExpression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER); - - if (queryExpression == null) { - task.setReasonForIncompletion( - "Missing '" + QUERY_EXPRESSION_PARAMETER + "' in input parameters"); - task.setStatus(TaskModel.Status.FAILED); - return; - } - - try { - final JsonNode input = objectMapper.valueToTree(taskInput); - final JsonQuery query = queryCache.get(queryExpression); - - final Scope childScope = Scope.newChildScope(rootScope); - - final List result = query.apply(childScope, input); - - task.setStatus(TaskModel.Status.COMPLETED); - if (result == null) { - task.addOutput(OUTPUT_RESULT, null); - task.addOutput(OUTPUT_RESULT_LIST, null); - } else if (result.isEmpty()) { - task.addOutput(OUTPUT_RESULT, null); - task.addOutput(OUTPUT_RESULT_LIST, result); - } else { - task.addOutput(OUTPUT_RESULT, result.get(0)); - task.addOutput(OUTPUT_RESULT_LIST, result); - } - } catch (final Exception e) { - LOGGER.error( - "Error executing task: {} in workflow: {}", - task.getTaskId(), - workflow.getWorkflowId(), - e); - task.setStatus(TaskModel.Status.FAILED); - final String message = extractFirstValidMessage(e); - task.setReasonForIncompletion(message); - task.addOutput(OUTPUT_ERROR, message); - } - } - - private LoadingCache createQueryCache() { - final CacheLoader loader = JsonQuery::compile; - return Caffeine.newBuilder() - .expireAfterWrite(1, TimeUnit.HOURS) - .maximumSize(1000) - .build(loader); - } - - private String extractFirstValidMessage(final Exception e) { - Throwable currentStack = e; - final List messages = new ArrayList<>(); - messages.add(currentStack.getMessage()); - while (currentStack.getCause() != null) { - currentStack = currentStack.getCause(); - messages.add(currentStack.getMessage()); - } - return messages.stream().filter(it -> !it.contains("N/A")).findFirst().orElse(""); - } -} diff --git a/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java b/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java deleted file mode 100644 index 78db2cca2..000000000 --- a/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tasks.json; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.*; - -public class JsonJqTransformTest { - - private final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - @Test - public void dataShouldBeCorrectlySelected() { - final JsonJqTransform jsonJqTransform = new JsonJqTransform(objectMapper); - final WorkflowModel workflow = new WorkflowModel(); - final TaskModel task = new TaskModel(); - final Map inputData = new HashMap<>(); - inputData.put("queryExpression", ".inputJson.key[0]"); - final Map inputJson = new HashMap<>(); - inputJson.put("key", Collections.singletonList("VALUE")); - inputData.put("inputJson", inputJson); - task.setInputData(inputData); - task.setOutputData(new HashMap<>()); - - jsonJqTransform.start(workflow, task, null); - - assertNull(task.getOutputData().get("error")); - assertEquals("\"VALUE\"", task.getOutputData().get("result").toString()); - assertEquals("[\"VALUE\"]", task.getOutputData().get("resultList").toString()); - } - - @Test - public void simpleErrorShouldBeDisplayed() { - final JsonJqTransform jsonJqTransform = new JsonJqTransform(objectMapper); - final WorkflowModel workflow = new WorkflowModel(); - final TaskModel task = new TaskModel(); - final Map inputData = new HashMap<>(); - inputData.put("queryExpression", "{"); - task.setInputData(inputData); - task.setOutputData(new HashMap<>()); - - jsonJqTransform.start(workflow, task, null); - - assertTrue( - ((String) task.getOutputData().get("error")) - .startsWith("Encountered \"\" at line 1, column 1.")); - } - - @Test - public void nestedExceptionsWithNACausesShouldBeDisregarded() { - final JsonJqTransform jsonJqTransform = new JsonJqTransform(objectMapper); - final WorkflowModel workflow = new WorkflowModel(); - final TaskModel task = new TaskModel(); - final Map inputData = new HashMap<>(); - inputData.put( - "queryExpression", - "{officeID: (.inputJson.OIDs | unique)[], requestedIndicatorList: .inputJson.requestedindicatorList}"); - final Map inputJson = new HashMap<>(); - inputJson.put("OIDs", Collections.singletonList("VALUE")); - final Map indicatorList = new HashMap<>(); - indicatorList.put("indicator", "AFA"); - indicatorList.put("value", false); - inputJson.put("requestedindicatorList", Collections.singletonList(indicatorList)); - inputData.put("inputJson", inputJson); - task.setInputData(inputData); - task.setOutputData(new HashMap<>()); - - jsonJqTransform.start(workflow, task, null); - - assertTrue( - ((String) task.getOutputData().get("error")) - .startsWith("Encountered \" \"[\" \"[ \"\" at line 1")); - } -} diff --git a/licenseheader.txt b/licenseheader.txt deleted file mode 100644 index 0edfbaf25..000000000 --- a/licenseheader.txt +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright $YEAR Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ \ No newline at end of file diff --git a/polyglot-clients/README.md b/polyglot-clients/README.md deleted file mode 100644 index 30a191a5f..000000000 --- a/polyglot-clients/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# SDKs for other languages - -Language specific client SDKs are maintained at a dedicated [conductor-sdk](https://github.com/conductor-sdk) repository. \ No newline at end of file diff --git a/polyglot-clients/go/README.md b/polyglot-clients/go/README.md deleted file mode 100644 index 93b138156..000000000 --- a/polyglot-clients/go/README.md +++ /dev/null @@ -1,109 +0,0 @@ -# Go client for Conductor -Go client for Conductor provides two sets of functions: - -1. Workflow Management APIs (start, terminate, get workflow status etc.) -2. Worker execution framework - -## Prerequisites -Go must be installed and GOPATH env variable set. - -## Install - -```shell -go get github.com/netflix/conductor/client/go -``` -This will create a Go project under $GOPATH/src and download any dependencies. - -## Run - -```shell -go run $GOPATH/src/netflix-conductor/client/go/startclient/startclient.go -``` - -## Using Workflow Management API -Go struct ```ConductorHttpClient``` provides client API calls to the conductor server to start and manage workflows and tasks. - -### Example -```go -package main - -import ( - conductor "github.com/netflix/conductor/client/go" -) - -func main() { - conductorClient := conductor.NewConductorHttpClient("http://localhost:8080") - - // Example API that will print out workflow definition meta - conductorClient.GetAllWorkflowDefs() -} - -``` - -## Task Worker Execution -Task Worker execution APIs facilitates execution of a task worker using go. The API provides necessary tools to poll for tasks at a specified interval and executing the go worker in a separate goroutine. - -### Example -The following go code demonstrates workers for tasks "task_1" and "task_2". - -```go -package task - -import ( - "fmt" -) - -// Implementation for "task_1" -func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { - log.Println("Executing Task_1_Execution_Function for", t.TaskType) - - //Do some logic - taskResult = task.NewTaskResult(t) - - output := map[string]interface{}{"task":"task_1", "key2":"value2", "key3":3, "key4":false} - taskResult.OutputData = output - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} - -// Implementation for "task_2" -func Task_2_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { - log.Println("Executing Task_2_Execution_Function for", t.TaskType) - - //Do some logic - taskResult = task.NewTaskResult(t) - - output := map[string]interface{}{"task":"task_2", "key2":"value2", "key3":3, "key4":false} - taskResult.OutputData = output - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} - -``` - - -Then main application to utilize these workers - -```go -package main - -import ( - "github.com/netflix/conductor/client/go" - "github.com/netflix/conductor/client/go/task/sample" -) - -func main() { - c := conductor.NewConductorWorker("http://localhost:8080", 1, 10000) - - c.Start("task_1", "", sample.Task_1_Execution_Function, false) - c.Start("task_2", "mydomain", sample.Task_2_Execution_Function, true) -} - -``` - -Note: For the example listed above the example task implementations are in conductor/task/sample package. Real task implementations can be placed in conductor/task directory or new subdirectory. - diff --git a/polyglot-clients/go/conductorhttpclient.go b/polyglot-clients/go/conductorhttpclient.go deleted file mode 100644 index 5745d9597..000000000 --- a/polyglot-clients/go/conductorhttpclient.go +++ /dev/null @@ -1,426 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package conductor - -import ( - "fmt" - "github.com/netflix/conductor/client/go/httpclient" - log "github.com/sirupsen/logrus" - "strconv" -) - -type ConductorHttpClient struct { - httpClient *httpclient.HttpClient -} - -func NewConductorHttpClient(baseUrl string) *ConductorHttpClient { - conductorClient := new(ConductorHttpClient) - headers := map[string]string{"Content-Type": "application/json", "Accept": "application/json"} - httpClient := httpclient.NewHttpClient(baseUrl, headers, true) - conductorClient.httpClient = httpClient - conductorClient.httpClient.PrintLogs = false - return conductorClient -} - -type ConductorHttpClientConfig struct { - baseUrl string - bearerToken *string -} - -func NewConductorHttpClientWithConfig(config ConductorHttpClientConfig) *ConductorHttpClient { - conductorClient := new(ConductorHttpClient) - headers := map[string]string{"Content-Type": "application/json", "Accept": "application/json"} - if config.bearerToken != nil { - headers["Authorization"] = fmt.Sprintf("Bearer %s", *config.bearerToken) - } - httpClient := httpclient.NewHttpClient(config.baseUrl, headers, true) - conductorClient.httpClient = httpClient - return conductorClient -} - -/**********************/ -/* Metadata Functions */ -/**********************/ - -func (c *ConductorHttpClient) GetWorkflowDef(workflowName string, version int) (string, error) { - url := c.httpClient.MakeUrl("/metadata/workflow/{workflowName}", "{workflowName}", workflowName) - versionString := "1" - - // Set default version as 1 - if version > 0 { - versionString = strconv.Itoa(version) - } - params := map[string]string{"version": versionString} - outputString, err := c.httpClient.Get(url, params, nil) - if err != nil { - log.Error("Error while trying to Get Workflow Definition", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) CreateWorkflowDef(workflowDefBody string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/workflow") - outputString, err := c.httpClient.Post(url, nil, nil, workflowDefBody) - if err != nil { - log.Error("Error while trying to Create Workflow Definition", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) UpdateWorkflowDefs(workflowDefsBody string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/workflow") - outputString, err := c.httpClient.Put(url, nil, nil, workflowDefsBody) - if err != nil { - log.Error("Error while trying to Update Workflow Definitions", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) GetAllWorkflowDefs() (string, error) { - url := c.httpClient.MakeUrl("/metadata/workflow") - outputString, err := c.httpClient.Get(url, nil, nil) - if err != nil { - log.Error("Error while trying to Get All Workflow Definitions", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) UnRegisterWorkflowDef(workflowDefName string, version int) (string, error) { - versionString := "" - - versionString = strconv.Itoa(version) - - url := c.httpClient.MakeUrl("/metadata/workflow/{workflowDefName}/{version}", "{workflowDefName}", - workflowDefName, "{version}", versionString) - - outputString, err := c.httpClient.Delete(url, nil, nil, "") - - if err != nil { - log.Error("Error while trying to Unregister Workflow Definition", workflowDefName, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) GetTaskDef(taskDefName string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/taskdefs/{taskDefName}", "{taskDefName}", taskDefName) - outputString, err := c.httpClient.Get(url, nil, nil) - if err != nil { - log.Error("Error while trying to Get Task Definition", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) RegisterTaskDefs(taskDefsMeta string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/taskdefs") - outputString, err := c.httpClient.Post(url, nil, nil, taskDefsMeta) - if err != nil { - log.Error("Error while trying to Register Task Definitions", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) UpdateTaskDef(taskDefMeta string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/taskdefs") - outputString, err := c.httpClient.Put(url, nil, nil, taskDefMeta) - if err != nil { - log.Error("Error while trying to Update Task Definition", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) UnRegisterTaskDef(taskDefName string) (string, error) { - url := c.httpClient.MakeUrl("/metadata/taskdefs/{taskDefName}", "{taskDefName}", taskDefName) - outputString, err := c.httpClient.Delete(url, nil, nil, "") - if err != nil { - log.Error("Error while trying to Unregister Task Definition", taskDefName, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) GetAllTaskDefs() (string, error) { - url := c.httpClient.MakeUrl("/metadata/taskdefs") - outputString, err := c.httpClient.Get(url, nil, nil) - if err != nil { - log.Error("Error while trying to Get All Task Definitions", err) - return "", err - } else { - return outputString, nil - } -} - -/**********************/ -/* Task Functions */ -/**********************/ - -func (c *ConductorHttpClient) GetTask(taskId string) (string, error) { - url := c.httpClient.MakeUrl("/tasks/{taskId}", "{taskId}", taskId) - outputString, err := c.httpClient.Get(url, nil, nil) - if err != nil { - log.Error("Error while trying to Get Task", taskId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) UpdateTask(taskBody string) (string, error) { - url := c.httpClient.MakeUrl("/tasks") - outputString, err := c.httpClient.Post(url, nil, nil, taskBody) - if err != nil { - log.Error("Error while trying to Update Task", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) PollForTask(taskType string, workerid string, domain string) (string, error) { - url := c.httpClient.MakeUrl("/tasks/poll/{taskType}", "{taskType}", taskType) - params := map[string]string{ - "workerid": workerid, - } - // only add the domain if requested, otherwise conductor will silently fail (https://github.com/Netflix/conductor/issues/1952) - if domain != "" { - params["domain"] = domain - } - outputString, err := c.httpClient.Get(url, params, nil) - if err != nil { - log.Error("Error while trying to Poll For Task taskType:", taskType, ",workerid:", workerid, err) - return "", err - } else { - return outputString, nil - } -} - -// AckTask Deprecated -func (c *ConductorHttpClient) AckTask(taskId, workerid, domain string) (string, error) { - url := c.httpClient.MakeUrl("/tasks/{taskId}/ack", "{taskId}", taskId) - params := map[string]string{ - "workerid": workerid, - } - // only add the domain if requested, otherwise conductor will silently fail (https://github.com/Netflix/conductor/issues/1952) - if domain != "" { - params["domain"] = domain - } - headers := map[string]string{"Accept": "application/json"} - outputString, err := c.httpClient.Post(url, params, headers, "") - if err != nil { - return "", err - } - if outputString != "true" { - return "", fmt.Errorf("Task id: %s has already been Acked", taskId) - } - return outputString, nil -} - -func (c *ConductorHttpClient) GetAllTasksInQueue() (string, error) { - url := c.httpClient.MakeUrl("/tasks/queue/all") - outputString, err := c.httpClient.Get(url, nil, nil) - if err != nil { - log.Error("Error while trying to Get All Tasks in Queue", err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) RemoveTaskFromQueue(taskType string, taskId string) (string, error) { - url := c.httpClient.MakeUrl("/tasks/queue/{taskType}/{taskId}", "{taskType}", taskType, "{taskId}", taskId) - outputString, err := c.httpClient.Delete(url, nil, nil, "") - if err != nil { - log.Error("Error while trying to Delete Task taskType:", taskType, ",taskId:", taskId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) GetTaskQueueSizes(taskNames string) (string, error) { - url := c.httpClient.MakeUrl("/tasks/queue/sizes") - outputString, err := c.httpClient.Post(url, nil, nil, taskNames) - if err != nil { - log.Error("Error while trying to Get Task Queue Sizes", err) - return "", err - } else { - return outputString, nil - } -} - -/**********************/ -/* Workflow Functions */ -/**********************/ - -func (c *ConductorHttpClient) GetWorkflow(workflowId string, includeTasks bool) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}", "{workflowId}", workflowId) - includeTasksString := "false" - if includeTasks { - includeTasksString = "true" - } - params := map[string]string{"includeTasks": includeTasksString} - outputString, err := c.httpClient.Get(url, params, nil) - if err != nil { - log.Error("Error while trying to Get Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) GetRunningWorkflows(workflowName string, version int, startTime float64, endTime float64) (string, error) { - url := c.httpClient.MakeUrl("/workflow/running/{workflowName}", "{workflowName}", workflowName) - versionString := "1" - // Set default version as 1 - if version > 0 { - versionString = strconv.Itoa(version) - } - params := map[string]string{"version": versionString} - if startTime != 0 { - params["startTime"] = strconv.FormatFloat(startTime, 'f', -1, 64) - } - if endTime != 0 { - params["endTime"] = strconv.FormatFloat(endTime, 'f', -1, 64) - } - - outputString, err := c.httpClient.Get(url, params, nil) - if err != nil { - log.Error("Error while trying to Get Running Workflows", workflowName, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) StartWorkflow(workflowName string, version int, correlationId string, inputJson string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowName}", "{workflowName}", workflowName) - - params := make(map[string]string) - if version > 0 { - params["version"] = strconv.Itoa(version) - } - - if correlationId != "" { - params["correlationId"] = correlationId - } - - if inputJson == "" { - inputJson = "{}" - } - - headers := map[string]string{"Accept": "text/plain"} - - outputString, err := c.httpClient.Post(url, params, headers, inputJson) - if err != nil { - log.Error("Error while trying to Start Workflow", workflowName, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) TerminateWorkflow(workflowId string, reason string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}", "{workflowId}", workflowId) - - params := make(map[string]string) - - if reason != "" { - params["reason"] = reason - } - - outputString, err := c.httpClient.Delete(url, params, nil, "") - if err != nil { - log.Error("Error while trying to Terminate Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) PauseWorkflow(workflowId string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}/pause", "{workflowId}", workflowId) - outputString, err := c.httpClient.Put(url, nil, nil, "") - if err != nil { - log.Error("Error while trying to Pause Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) ResumeWorkflow(workflowId string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}/resume", "{workflowId}", workflowId) - outputString, err := c.httpClient.Put(url, nil, nil, "") - if err != nil { - log.Error("Error while trying to Resume Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) SkipTaskFromWorkflow(workflowId string, taskReferenceName string, skipTaskRequestBody string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}/skiptask/{taskReferenceName}", "{workflowId}", workflowId, "{taskReferenceName}", taskReferenceName) - - outputString, err := c.httpClient.Put(url, nil, nil, skipTaskRequestBody) - if err != nil { - log.Error("Error while trying to Skip Task From Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) RerunWorkflow(workflowId string, rerunWorkflowRequest string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}/rerun", "{workflowId}", workflowId) - if rerunWorkflowRequest == "" { - rerunWorkflowRequest = "{}" - } - - outputString, err := c.httpClient.Post(url, nil, nil, rerunWorkflowRequest) - if err != nil { - log.Error("Error while trying to Rerun Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} - -func (c *ConductorHttpClient) RestartWorkflow(workflowId string) (string, error) { - url := c.httpClient.MakeUrl("/workflow/{workflowId}/restart", "{workflowId}", workflowId) - - outputString, err := c.httpClient.Post(url, nil, nil, "") - if err != nil { - log.Error("Error while trying to Restart Completed Workflow", workflowId, err) - return "", err - } else { - return outputString, nil - } -} diff --git a/polyglot-clients/go/conductorworker.go b/polyglot-clients/go/conductorworker.go deleted file mode 100644 index d5ca00a17..000000000 --- a/polyglot-clients/go/conductorworker.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package conductor - -import ( - "github.com/netflix/conductor/client/go/task" - log "github.com/sirupsen/logrus" - "os" - "time" -) - -var ( - hostname, hostnameError = os.Hostname() -) - -func init() { - if hostnameError != nil { - log.Fatal("Could not get hostname") - } -} - -type ConductorWorker struct { - ConductorHttpClient *ConductorHttpClient - ThreadCount int - PollingInterval int -} - -// NewConductorWorker Create a new Conductor worker -//with baseUrl (e.g. http://localhost:8080/api, and pollingInterval in millisecond) -func NewConductorWorker(baseUrl string, threadCount int, pollingInterval int) *ConductorWorker { - conductorWorker := new(ConductorWorker) - conductorWorker.ThreadCount = threadCount - conductorWorker.PollingInterval = pollingInterval - conductorHttpClient := NewConductorHttpClient(baseUrl) - conductorWorker.ConductorHttpClient = conductorHttpClient - return conductorWorker -} - -func (c *ConductorWorker) Execute(t *task.Task, executeFunction func(t *task.Task) (*task.TaskResult, error)) { - taskResult, err := executeFunction(t) - if taskResult == nil { - log.Error("TaskResult cannot be nil: ", t.TaskId) - return - } - if err != nil { - log.Error("Error Executing task:", err.Error()) - taskResult.Status = task.FAILED - taskResult.ReasonForIncompletion = err.Error() - } - - taskResultJsonString, err := taskResult.ToJSONString() - if err != nil { - log.Error("Error Forming TaskResult JSON body", err) - return - } - _, _ = c.ConductorHttpClient.UpdateTask(taskResultJsonString) -} - -func (c *ConductorWorker) PollAndExecute(taskType string, domain string, executeFunction func(t *task.Task) (*task.TaskResult, error)) { - for { - time.Sleep(time.Duration(c.PollingInterval) * time.Millisecond) - - // Poll for Task taskType - polled, err := c.ConductorHttpClient.PollForTask(taskType, hostname, domain) - if err != nil { - log.Error("Error Polling task:", err.Error()) - continue - } - if polled == "" { - log.Debug("No task found for:", taskType) - continue - } - - // Parse Http response into Task - parsedTask, err := task.ParseTask(polled) - if err != nil { - log.Error("Error Parsing task:", err.Error()) - continue - } - - // Execute given function - c.Execute(parsedTask, executeFunction) - } -} - -func (c *ConductorWorker) Start(taskType string, domain string, executeFunction func(t *task.Task) (*task.TaskResult, error), wait bool) { - log.Println("Polling for task:", taskType, "with a:", c.PollingInterval, "(ms) polling interval with", c.ThreadCount, "goroutines for task execution, with workerid as", hostname) - for i := 1; i <= c.ThreadCount; i++ { - go c.PollAndExecute(taskType, domain, executeFunction) - } - - // wait infinitely while the go routines are running - if wait { - select {} - } -} diff --git a/polyglot-clients/go/go.mod b/polyglot-clients/go/go.mod deleted file mode 100644 index 8c3b345bf..000000000 --- a/polyglot-clients/go/go.mod +++ /dev/null @@ -1,5 +0,0 @@ -module github.com/netflix/conductor/client/go - -go 1.12 - -require github.com/sirupsen/logrus v1.8.1 diff --git a/polyglot-clients/go/go.sum b/polyglot-clients/go/go.sum deleted file mode 100644 index 59bd790e9..000000000 --- a/polyglot-clients/go/go.sum +++ /dev/null @@ -1,10 +0,0 @@ -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/polyglot-clients/go/httpclient/httpclient.go b/polyglot-clients/go/httpclient/httpclient.go deleted file mode 100644 index cf6a8e5fb..000000000 --- a/polyglot-clients/go/httpclient/httpclient.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package httpclient - -import ( - "bytes" - "fmt" - log "github.com/sirupsen/logrus" - "io/ioutil" - "net/http" - "strings" -) - -type HttpClient struct { - BaseUrl string - Headers map[string]string - PrintLogs bool - client *http.Client -} - -func NewHttpClient(baseUrl string, headers map[string]string, printLogs bool) *HttpClient { - httpClient := new(HttpClient) - httpClient.BaseUrl = baseUrl - httpClient.Headers = headers - httpClient.PrintLogs = printLogs - httpClient.client = &http.Client{} - return httpClient -} - -func (c *HttpClient) logSendRequest(url string, requestType string, body string) { - log.Println("Sending [", requestType, "] request to Server (", url, "):") - log.Println("Body:") - log.Println(body) -} - -func (c *HttpClient) logResponse(statusCode string, response string) { - log.Println("Received response from Server (", c.BaseUrl, "):") - log.Println("Status: ", statusCode) - log.Println("Response:") - log.Println(response) -} - -func genParamString(paramMap map[string]string) string { - if paramMap == nil || len(paramMap) == 0 { - return "" - } - - output := "?" - for key, value := range paramMap { - output += key - output += "=" - output += value - output += "&" - } - return output -} - -func (c *HttpClient) httpRequest(url string, requestType string, headers map[string]string, body string) (string, error) { - var req *http.Request - var err error - - if requestType == "GET" { - req, err = http.NewRequest(requestType, url, nil) - } else { - var bodyStr = []byte(body) - req, err = http.NewRequest(requestType, url, bytes.NewBuffer(bodyStr)) - } - - if err != nil { - return "", err - } - // Default Headers - for key, value := range c.Headers { - req.Header.Set(key, value) - } - - // Custom Headers - for key, value := range headers { - req.Header.Set(key, value) - } - - if c.PrintLogs { - c.logSendRequest(url, requestType, body) - } - - resp, err := c.client.Do(req) - if err != nil { - return "", err - } - - // If successful HTTP call, but Client/Server error, we return error - if resp.StatusCode >= 400 && resp.StatusCode < 500 { - return "", fmt.Errorf("%d Http Client Error for url: %s", resp.StatusCode, url) - } - if resp.StatusCode >= 500 && resp.StatusCode < 600 { - return "", fmt.Errorf("%d Http Server Error for url: %s", resp.StatusCode, url) - } - - defer resp.Body.Close() - response, err := ioutil.ReadAll(resp.Body) - responseString := string(response) - if err != nil { - log.Error("ERROR reading response for URL: ", url, err) - return "", err - } - - if c.PrintLogs { - c.logResponse(resp.Status, responseString) - } - return responseString, nil -} - -func (c *HttpClient) Get(url string, queryParamsMap map[string]string, headers map[string]string) (string, error) { - urlString := url + genParamString(queryParamsMap) - resp, err := c.httpRequest(urlString, "GET", headers, "") - if err != nil { - log.Println("Http GET Error for URL: ", urlString) - return "", err - } - return resp, nil -} - -func (c *HttpClient) Put(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { - urlString := url + genParamString(queryParamsMap) - resp, err := c.httpRequest(urlString, "PUT", headers, body) - if err != nil { - log.Error("Http PUT Error for URL: ", urlString, err) - return "", err - } - return resp, nil -} - -func (c *HttpClient) Post(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { - urlString := url + genParamString(queryParamsMap) - resp, err := c.httpRequest(urlString, "POST", headers, body) - if err != nil { - log.Error("Http POST Error for URL: ", urlString, "Error: ", err) - return "", err - } - return resp, nil -} - -func (c *HttpClient) Delete(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { - urlString := url + genParamString(queryParamsMap) - resp, err := c.httpRequest(urlString, "DELETE", headers, body) - if err != nil { - log.Error("Http DELETE Error for URL: ", urlString) - return "", err - } - return resp, nil -} - -func (c *HttpClient) MakeUrl(path string, args ...string) string { - url := c.BaseUrl - r := strings.NewReplacer(args...) - return url + r.Replace(path) -} diff --git a/polyglot-clients/go/startclient/startclient.go b/polyglot-clients/go/startclient/startclient.go deleted file mode 100644 index 3d94d001f..000000000 --- a/polyglot-clients/go/startclient/startclient.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package main - -import ( - conductor "github.com/netflix/conductor/client/go" - "github.com/netflix/conductor/client/go/task/sample" - log "github.com/sirupsen/logrus" - "os" -) - -//Example init function that shows how to configure logging -//Using json formatter and changing level to Debug -func init() { - - // Log as JSON instead of the default ASCII formatter. - log.SetFormatter(&log.JSONFormatter{}) - - //Stdout, change to a file for production use case - log.SetOutput(os.Stdout) - - // Set to debug for demonstration. Change to Info for production use cases. - log.SetLevel(log.DebugLevel) -} -func main() { - c := conductor.NewConductorWorker("http://localhost:8080/api", 1, 1) - - c.Start("task_15", "", sample.Task_1_Execution_Function, true) - //c.Start("task_2", "mydomain", sample.Task_2_Execution_Function, true) -} diff --git a/polyglot-clients/go/task/sample/task_1_exec.go b/polyglot-clients/go/task/sample/task_1_exec.go deleted file mode 100644 index 55b3f7abc..000000000 --- a/polyglot-clients/go/task/sample/task_1_exec.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package sample - -import ( - "github.com/netflix/conductor/client/go/task" - log "github.com/sirupsen/logrus" -) - -// Implementation for "task_1" -func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { - log.Debug("Executing Task_1_Execution_Function for", t.TaskType) - - //Do some logic - taskResult = task.NewTaskResult(t) - - output := map[string]interface{}{"task": "task_1", "key2": "value2", "key3": 3, "key4": false} - taskResult.OutputData = output - taskResult.Logs = append(taskResult.Logs, task.LogMessage{Log: "Hello World"}) - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} diff --git a/polyglot-clients/go/task/sample/task_2_exec.go b/polyglot-clients/go/task/sample/task_2_exec.go deleted file mode 100644 index d73f0beaa..000000000 --- a/polyglot-clients/go/task/sample/task_2_exec.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package sample - -import ( - "log" - "github.com/netflix/conductor/client/go/task" -) - -// Implementation for "task_2" -func Task_2_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { - log.Println("Executing Task_2_Execution_Function for", t.TaskType) - - //Do some logic - taskResult = task.NewTaskResult(t) - - output := map[string]interface{}{"task":"task_2", "key2":"value2", "key3":3, "key4":false} - taskResult.OutputData = output - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} diff --git a/polyglot-clients/go/task/task.go b/polyglot-clients/go/task/task.go deleted file mode 100644 index 104128cad..000000000 --- a/polyglot-clients/go/task/task.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package task - -import ( - "encoding/json" -) - -type WorkflowTaskType uint8 -type TaskStatus string - -const ( - SIMPLE WorkflowTaskType = iota - DYNAMIC - FORK_JOIN - FORK_JOIN_DYNAMIC - DECISION - JOIN - SUB_WORKFLOW - EVENT - WAIT - USER_DEFINED -) - -const ( - IN_PROGRESS TaskStatus = "IN_PROGRESS" - CANCELED = "CANCELED" - FAILED = "FAILED" - COMPLETED = "COMPLETED" - SCHEDULED = "SCHEDULED" - TIMED_OUT = "TIMED_OUT" - SKIPPED = "SKIPPED" -) - -type Task struct { - TaskType string `json:"taskType"` - Status TaskStatus `json:"status"` - InputData map[string]interface{} `json:"inputData"` - ReferenceTaskName string `json:"referenceTaskName"` - RetryCount int `json:"retryCount"` - Seq int `json:"seq"` - CorrelationId string `json:"correlationId"` - PollCount int `json:"pollCount"` - TaskDefName string `json:"taskDefName"` - // Time when the task was scheduled - ScheduledTime int64 `json:"scheduledTime"` - // Time when the task was first polled - StartTime int64 `json:"startTime"` - // Time when the task completed executing - EndTime int64 `json:"endTime"` - // Time when the task was last updated - UpdateTime int64 `json:"updateTime"` - StartDelayInSeconds int `json:"startDelayInSeconds"` - RetriedTaskId string `json:"retriedTaskId"` - Retried bool `json:"retried"` - // Default = true - CallbackFromWorker bool `json:"callbackFromWorker"` - // DynamicWorkflowTask - ResponseTimeoutSeconds int `json:"responseTimeoutSeconds"` - WorkflowInstanceId string `json:"workflowInstanceId"` - TaskId string `json:"taskId"` - ReasonForIncompletion string `json:"reasonForIncompletion"` - CallbackAfterSeconds int64 `json:"callbackAfterSeconds"` - WorkerId string `json:"workerId"` - OutputData map[string]interface{} `json:"outputData"` -} - -// "Constructor" to initialze non zero value defaults -func NewTask() *Task { - task := new(Task) - task.CallbackFromWorker = true - task.InputData = make(map[string]interface{}) - task.OutputData = make(map[string]interface{}) - return task -} - -func (t *Task) ToJSONString() (string, error) { - var jsonString string - b, err := json.Marshal(t) - if err == nil { - jsonString = string(b) - } - return jsonString, err -} - -func ParseTask(inputJSON string) (*Task, error) { - t := NewTask() - err := json.Unmarshal([]byte(inputJSON), t) - return t, err -} diff --git a/polyglot-clients/go/task/task_exec_template.go b/polyglot-clients/go/task/task_exec_template.go deleted file mode 100644 index 364138abc..000000000 --- a/polyglot-clients/go/task/task_exec_template.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package task - -import ( - "log" -) - -/* Format for functions must be: - func Name (t *Task) (taskResult TaskResult, err error) - - taskResult (TaskResult) should return struct with populated fields - - err (error) returns error if any -*/ - -func ExampleTaskExecutionFunction(t *Task) (taskResult *TaskResult, err error) { - log.Println("Executing Example Function for", t.TaskType) - log.Println(t) - - //Do some logic - taskResult = NewTaskResult(t) - - output := map[string]interface{}{"task":"example", "key2":"value2", "key3":3, "key4":false} - taskResult.OutputData = output - taskResult.Status = "COMPLETED" - err = nil - - return taskResult, err -} diff --git a/polyglot-clients/go/task/taskresult.go b/polyglot-clients/go/task/taskresult.go deleted file mode 100644 index cd03c632a..000000000 --- a/polyglot-clients/go/task/taskresult.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2017 Netflix, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package task - -import ( - "encoding/json" -) - -type TaskResultStatus string - -type TaskResult struct { - Status TaskResultStatus `json:"status"` - WorkflowInstanceId string `json:"workflowInstanceId"` - TaskId string `json:"taskId"` - ReasonForIncompletion string `json:"reasonForIncompletion"` - CallbackAfterSeconds int64 `json:"callbackAfterSeconds"` - WorkerId string `json:"workerId"` - OutputData map[string]interface{} `json:"outputData"` - Logs []LogMessage `json:"logs"` -} - -// LogMessage used to sent logs to conductor server -type LogMessage struct { - Log string `json:"log"` - TaskID string `json:"taskId"` - CreatedTime int `json:"createdTime"` -} - -// "Constructor" to initialze non zero value defaults -func NewEmptyTaskResult() *TaskResult { - taskResult := new(TaskResult) - taskResult.OutputData = make(map[string]interface{}) - taskResult.Logs = make([]LogMessage, 0) - return taskResult -} - -func NewTaskResult(t *Task) *TaskResult { - taskResult := new(TaskResult) - taskResult.CallbackAfterSeconds = t.CallbackAfterSeconds - taskResult.WorkflowInstanceId = t.WorkflowInstanceId - taskResult.TaskId = t.TaskId - taskResult.ReasonForIncompletion = t.ReasonForIncompletion - taskResult.Status = TaskResultStatus(t.Status) - taskResult.WorkerId = t.WorkerId - taskResult.OutputData = t.OutputData - taskResult.Logs = make([]LogMessage, 0) - return taskResult -} - -func (t *TaskResult) ToJSONString() (string, error) { - var jsonString string - b, err := json.Marshal(t) - if err == nil { - jsonString = string(b) - } - return jsonString, err -} - -func ParseTaskResult(inputJSON string) (*TaskResult, error) { - t := NewEmptyTaskResult() - err := json.Unmarshal([]byte(inputJSON), t) - return t, err -} diff --git a/polyglot-clients/gogrpc/.gitignore b/polyglot-clients/gogrpc/.gitignore deleted file mode 100644 index 49ce3c193..000000000 --- a/polyglot-clients/gogrpc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/vendor \ No newline at end of file diff --git a/polyglot-clients/gogrpc/Gopkg.lock b/polyglot-clients/gogrpc/Gopkg.lock deleted file mode 100644 index 870521170..000000000 --- a/polyglot-clients/gogrpc/Gopkg.lock +++ /dev/null @@ -1,220 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec" - name = "github.com/davecgh/go-spew" - packages = ["spew"] - pruneopts = "NUT" - revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" - version = "v1.1.1" - -[[projects]] - digest = "1:68496943b027f697647279f48129c75a65cd816a6a95fc6bb7b8b039ea050f1d" - name = "github.com/golang/protobuf" - packages = [ - "jsonpb", - "proto", - "protoc-gen-go", - "protoc-gen-go/descriptor", - "protoc-gen-go/generator", - "protoc-gen-go/generator/internal/remap", - "protoc-gen-go/grpc", - "protoc-gen-go/plugin", - "ptypes", - "ptypes/any", - "ptypes/duration", - "ptypes/empty", - "ptypes/struct", - "ptypes/timestamp", - "ptypes/wrappers", - ] - pruneopts = "NUT" - revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5" - version = "v1.2.0" - -[[projects]] - digest = "1:406338ad39ab2e37b7f4452906442a3dbf0eb3379dd1f06aafb5c07e769a5fbb" - name = "github.com/inconshreveable/mousetrap" - packages = ["."] - pruneopts = "NUT" - revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" - version = "v1.0" - -[[projects]] - digest = "1:d0ede3366f0fca3c7c0eec74a2c5bfae7711a2fdecb2a3c63330840f333f7977" - name = "github.com/jhump/protoreflect" - packages = [ - "desc", - "desc/internal", - "dynamic", - "dynamic/grpcdynamic", - "grpcreflect", - "internal", - ] - pruneopts = "NUT" - revision = "b28d968eb345542b430a717dc72a88abf10d0b95" - version = "v1.0.0" - -[[projects]] - branch = "master" - digest = "1:d6b6479233449ae6a3defd500360f810d55415f2a768a9ecf62c126b7667903f" - name = "github.com/kazegusuri/grpcurl" - packages = ["."] - pruneopts = "NUT" - revision = "98e92bc156677950a3bedb128a7227b40f0ff125" - -[[projects]] - digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe" - name = "github.com/pmezard/go-difflib" - packages = ["difflib"] - pruneopts = "NUT" - revision = "792786c7400a136282c1664665ae0a8db921c6c2" - version = "v1.0.0" - -[[projects]] - digest = "1:343d44e06621142ab09ae0c76c1799104cdfddd3ffb445d78b1adf8dc3ffaf3d" - name = "github.com/spf13/cobra" - packages = ["."] - pruneopts = "NUT" - revision = "ef82de70bb3f60c65fb8eebacbb2d122ef517385" - version = "v0.0.3" - -[[projects]] - digest = "1:e3707aeaccd2adc89eba6c062fec72116fe1fc1ba71097da85b4d8ae1668a675" - name = "github.com/spf13/pflag" - packages = ["."] - pruneopts = "NUT" - revision = "9a97c102cda95a86cec2345a6f09f55a939babf5" - version = "v1.0.2" - -[[projects]] - branch = "master" - digest = "1:ea222cd3bb494fb2b0f799e33f91691d2f60cb298d064233367cd692d07d6c39" - name = "github.com/square/goprotowrap" - packages = [ - ".", - "cmd/protowrap", - "wrapper", - ] - pruneopts = "NUT" - revision = "6f414ea4a80cc23c26725b193215be2a0d85d6e1" - -[[projects]] - digest = "1:bacb8b590716ab7c33f2277240972c9582d389593ee8d66fc10074e0508b8126" - name = "github.com/stretchr/testify" - packages = ["assert"] - pruneopts = "NUT" - revision = "f35b8ab0b5a2cef36673838d662e249dd9c94686" - version = "v1.2.2" - -[[projects]] - branch = "master" - digest = "1:a2707daa031e6db5fcaae8a9b30eaa503a0d7d8aa72cd50b27fa394ef6c3f7fe" - name = "golang.org/x/net" - packages = [ - "context", - "http/httpguts", - "http2", - "http2/hpack", - "idna", - "internal/timeseries", - "trace", - ] - pruneopts = "NUT" - revision = "26e67e76b6c3f6ce91f7c52def5af501b4e0f3a2" - -[[projects]] - branch = "master" - digest = "1:5c2d57086d29bf60bf5fc8a1e6550650034f8b26177dced9b16d1f673311ab40" - name = "golang.org/x/sys" - packages = ["unix"] - pruneopts = "NUT" - revision = "d0be0721c37eeb5299f245a996a483160fc36940" - -[[projects]] - digest = "1:e7071ed636b5422cc51c0e3a6cebc229d6c9fffc528814b519a980641422d619" - name = "golang.org/x/text" - packages = [ - "collate", - "collate/build", - "internal/colltab", - "internal/gen", - "internal/tag", - "internal/triegen", - "internal/ucd", - "language", - "secure/bidirule", - "transform", - "unicode/bidi", - "unicode/cldr", - "unicode/norm", - "unicode/rangetable", - ] - pruneopts = "NUT" - revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" - version = "v0.3.0" - -[[projects]] - branch = "master" - digest = "1:9b295ec121babd2b209d9d52115f839f2ea4f8165977f4e446326b875434ab7b" - name = "google.golang.org/genproto" - packages = [ - "googleapis/rpc/errdetails", - "googleapis/rpc/status", - ] - pruneopts = "NUT" - revision = "5a2fd4cab2d6d4a18e70c34937662526cd0c4bd1" - -[[projects]] - digest = "1:827de0295c937025afe1896edd20b56aa9be42840818c194cf0eee6ab7d27e43" - name = "google.golang.org/grpc" - packages = [ - ".", - "balancer", - "balancer/base", - "balancer/roundrobin", - "codes", - "connectivity", - "credentials", - "encoding", - "encoding/proto", - "grpclog", - "internal", - "internal/backoff", - "internal/channelz", - "internal/envconfig", - "internal/grpcrand", - "internal/transport", - "keepalive", - "metadata", - "naming", - "peer", - "reflection/grpc_reflection_v1alpha", - "resolver", - "resolver/dns", - "resolver/passthrough", - "stats", - "status", - "tap", - ] - pruneopts = "NUT" - revision = "8dea3dc473e90c8179e519d91302d0597c0ca1d1" - version = "v1.15.0" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - input-imports = [ - "github.com/golang/protobuf/proto", - "github.com/golang/protobuf/protoc-gen-go", - "github.com/golang/protobuf/ptypes/any", - "github.com/golang/protobuf/ptypes/struct", - "github.com/kazegusuri/grpcurl", - "github.com/square/goprotowrap/cmd/protowrap", - "github.com/stretchr/testify/assert", - "golang.org/x/net/context", - "google.golang.org/grpc", - ] - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/polyglot-clients/gogrpc/Gopkg.toml b/polyglot-clients/gogrpc/Gopkg.toml deleted file mode 100644 index 55ddf0e9e..000000000 --- a/polyglot-clients/gogrpc/Gopkg.toml +++ /dev/null @@ -1,12 +0,0 @@ -[prune] - go-tests = true - unused-packages = true - non-go = true - -[[constraint]] - name = "google.golang.org/grpc" - version = "1.15.0" - -[[constraint]] - name = "github.com/stretchr/testify" - version = "1.2.1" diff --git a/polyglot-clients/gogrpc/Makefile b/polyglot-clients/gogrpc/Makefile deleted file mode 100644 index 74578d7e6..000000000 --- a/polyglot-clients/gogrpc/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -PROTO_SRC = ../../grpc/src/main/proto -PROTO_VERSION = 3.5.1 - -BUILD_DIR = build -BIN_DIR = $(BUILD_DIR)/bin -SERVICES = \ - $(PROTO_SRC)/grpc/event_service.pb.go \ - $(PROTO_SRC)/grpc/metadata_service.pb.go \ - $(PROTO_SRC)/grpc/search.pb.go \ - $(PROTO_SRC)/grpc/task_service.pb.go \ - $(PROTO_SRC)/grpc/workflow_service.pb.go - -USER_ID := $(shell id -u) -GROUP_ID := $(shell id -g) -CONDUCTOR_ROOT = /go/src/github.com/netflix/conductor -PROTOC = docker run --rm -it \ - --user $(USER_ID):$(GROUP_ID) \ - -v '$(PWD)/../..':'$(CONDUCTOR_ROOT)' \ - -w $(CONDUCTOR_ROOT)/client/gogrpc \ - znly/protoc:0.3.0 - -proto: models $(SERVICES) - -build: - go fmt ./... - go build ./... - -test: - go test ./... - -# Helpers -$(SERVICES): %.pb.go: %.proto - $(PROTOC) -I $(PROTO_SRC) $< --go_out=plugins=grpc:/go/src - -models: - $(PROTOC) -I $(PROTO_SRC) $(PROTO_SRC)/model/*.proto --go_out=/go/src \ No newline at end of file diff --git a/polyglot-clients/gogrpc/README.md b/polyglot-clients/gogrpc/README.md deleted file mode 100644 index e57b61235..000000000 --- a/polyglot-clients/gogrpc/README.md +++ /dev/null @@ -1,7 +0,0 @@ -## Conductor: gRPC Go client generation -At the moment, the generation of the go client is manual. -In order to generate the Go gRPC client, run: -``` -make proto -``` -This should update the folder `client/gogrpc/conductor` if any changes. diff --git a/polyglot-clients/gogrpc/conductor/client.go b/polyglot-clients/gogrpc/conductor/client.go deleted file mode 100644 index 264de7a86..000000000 --- a/polyglot-clients/gogrpc/conductor/client.go +++ /dev/null @@ -1,77 +0,0 @@ -package conductor - -import ( - "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" - "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" - "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" - grpc "google.golang.org/grpc" -) - -// TasksClient is a Conductor client that exposes the Conductor -// Tasks API. -type TasksClient interface { - Tasks() tasks.TaskServiceClient - Shutdown() -} - -// MetadataClient is a Conductor client that exposes the Conductor -// Metadata API. -type MetadataClient interface { - Metadata() metadata.MetadataServiceClient - Shutdown() -} - -// WorkflowsClient is a Conductor client that exposes the Conductor -// Workflows API. -type WorkflowsClient interface { - Workflows() workflows.WorkflowServiceClient - Shutdown() -} - -// Client encapsulates a GRPC connection to a Conductor server and -// the different services it exposes. -type Client struct { - conn *grpc.ClientConn - tasks tasks.TaskServiceClient - metadata metadata.MetadataServiceClient - workflows workflows.WorkflowServiceClient -} - -// NewClient returns a new Client with a GRPC connection to the given address, -// and any optional grpc.Dialoption settings. -func NewClient(address string, options ...grpc.DialOption) (*Client, error) { - conn, err := grpc.Dial(address, options...) - if err != nil { - return nil, err - } - return &Client{conn: conn}, nil -} - -// Shutdown closes the underlying GRPC connection for this client. -func (client *Client) Shutdown() { - client.conn.Close() -} - -// Tasks returns the Tasks service for this client -func (client *Client) Tasks() tasks.TaskServiceClient { - if client.tasks == nil { - client.tasks = tasks.NewTaskServiceClient(client.conn) - } - return client.tasks -} - -// Metadata returns the Metadata service for this client -func (client *Client) Metadata() metadata.MetadataServiceClient { - if client.metadata == nil { - client.metadata = metadata.NewMetadataServiceClient(client.conn) - } - return client.metadata -} - -// Workflows returns the workflows service for this client -func (client *Client) Workflows() workflows.WorkflowServiceClient { - if client.workflows == nil { - client.workflows = workflows.NewWorkflowServiceClient(client.conn) - } - return client.workflows -} diff --git a/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go deleted file mode 100644 index 697247158..000000000 --- a/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go +++ /dev/null @@ -1,991 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/event_service.proto - -package events // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/events" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type AddEventHandlerRequest struct { - Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddEventHandlerRequest) Reset() { *m = AddEventHandlerRequest{} } -func (m *AddEventHandlerRequest) String() string { return proto.CompactTextString(m) } -func (*AddEventHandlerRequest) ProtoMessage() {} -func (*AddEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{0} -} -func (m *AddEventHandlerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddEventHandlerRequest.Unmarshal(m, b) -} -func (m *AddEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddEventHandlerRequest.Marshal(b, m, deterministic) -} -func (dst *AddEventHandlerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddEventHandlerRequest.Merge(dst, src) -} -func (m *AddEventHandlerRequest) XXX_Size() int { - return xxx_messageInfo_AddEventHandlerRequest.Size(m) -} -func (m *AddEventHandlerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AddEventHandlerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AddEventHandlerRequest proto.InternalMessageInfo - -func (m *AddEventHandlerRequest) GetHandler() *model.EventHandler { - if m != nil { - return m.Handler - } - return nil -} - -type AddEventHandlerResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddEventHandlerResponse) Reset() { *m = AddEventHandlerResponse{} } -func (m *AddEventHandlerResponse) String() string { return proto.CompactTextString(m) } -func (*AddEventHandlerResponse) ProtoMessage() {} -func (*AddEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{1} -} -func (m *AddEventHandlerResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddEventHandlerResponse.Unmarshal(m, b) -} -func (m *AddEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddEventHandlerResponse.Marshal(b, m, deterministic) -} -func (dst *AddEventHandlerResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddEventHandlerResponse.Merge(dst, src) -} -func (m *AddEventHandlerResponse) XXX_Size() int { - return xxx_messageInfo_AddEventHandlerResponse.Size(m) -} -func (m *AddEventHandlerResponse) XXX_DiscardUnknown() { - xxx_messageInfo_AddEventHandlerResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_AddEventHandlerResponse proto.InternalMessageInfo - -type UpdateEventHandlerRequest struct { - Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateEventHandlerRequest) Reset() { *m = UpdateEventHandlerRequest{} } -func (m *UpdateEventHandlerRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateEventHandlerRequest) ProtoMessage() {} -func (*UpdateEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{2} -} -func (m *UpdateEventHandlerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateEventHandlerRequest.Unmarshal(m, b) -} -func (m *UpdateEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateEventHandlerRequest.Marshal(b, m, deterministic) -} -func (dst *UpdateEventHandlerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateEventHandlerRequest.Merge(dst, src) -} -func (m *UpdateEventHandlerRequest) XXX_Size() int { - return xxx_messageInfo_UpdateEventHandlerRequest.Size(m) -} -func (m *UpdateEventHandlerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateEventHandlerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateEventHandlerRequest proto.InternalMessageInfo - -func (m *UpdateEventHandlerRequest) GetHandler() *model.EventHandler { - if m != nil { - return m.Handler - } - return nil -} - -type UpdateEventHandlerResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateEventHandlerResponse) Reset() { *m = UpdateEventHandlerResponse{} } -func (m *UpdateEventHandlerResponse) String() string { return proto.CompactTextString(m) } -func (*UpdateEventHandlerResponse) ProtoMessage() {} -func (*UpdateEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{3} -} -func (m *UpdateEventHandlerResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateEventHandlerResponse.Unmarshal(m, b) -} -func (m *UpdateEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateEventHandlerResponse.Marshal(b, m, deterministic) -} -func (dst *UpdateEventHandlerResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateEventHandlerResponse.Merge(dst, src) -} -func (m *UpdateEventHandlerResponse) XXX_Size() int { - return xxx_messageInfo_UpdateEventHandlerResponse.Size(m) -} -func (m *UpdateEventHandlerResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateEventHandlerResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateEventHandlerResponse proto.InternalMessageInfo - -type RemoveEventHandlerRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveEventHandlerRequest) Reset() { *m = RemoveEventHandlerRequest{} } -func (m *RemoveEventHandlerRequest) String() string { return proto.CompactTextString(m) } -func (*RemoveEventHandlerRequest) ProtoMessage() {} -func (*RemoveEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{4} -} -func (m *RemoveEventHandlerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveEventHandlerRequest.Unmarshal(m, b) -} -func (m *RemoveEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveEventHandlerRequest.Marshal(b, m, deterministic) -} -func (dst *RemoveEventHandlerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveEventHandlerRequest.Merge(dst, src) -} -func (m *RemoveEventHandlerRequest) XXX_Size() int { - return xxx_messageInfo_RemoveEventHandlerRequest.Size(m) -} -func (m *RemoveEventHandlerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveEventHandlerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveEventHandlerRequest proto.InternalMessageInfo - -func (m *RemoveEventHandlerRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -type RemoveEventHandlerResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveEventHandlerResponse) Reset() { *m = RemoveEventHandlerResponse{} } -func (m *RemoveEventHandlerResponse) String() string { return proto.CompactTextString(m) } -func (*RemoveEventHandlerResponse) ProtoMessage() {} -func (*RemoveEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{5} -} -func (m *RemoveEventHandlerResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveEventHandlerResponse.Unmarshal(m, b) -} -func (m *RemoveEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveEventHandlerResponse.Marshal(b, m, deterministic) -} -func (dst *RemoveEventHandlerResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveEventHandlerResponse.Merge(dst, src) -} -func (m *RemoveEventHandlerResponse) XXX_Size() int { - return xxx_messageInfo_RemoveEventHandlerResponse.Size(m) -} -func (m *RemoveEventHandlerResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveEventHandlerResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveEventHandlerResponse proto.InternalMessageInfo - -type GetEventHandlersRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetEventHandlersRequest) Reset() { *m = GetEventHandlersRequest{} } -func (m *GetEventHandlersRequest) String() string { return proto.CompactTextString(m) } -func (*GetEventHandlersRequest) ProtoMessage() {} -func (*GetEventHandlersRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{6} -} -func (m *GetEventHandlersRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetEventHandlersRequest.Unmarshal(m, b) -} -func (m *GetEventHandlersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetEventHandlersRequest.Marshal(b, m, deterministic) -} -func (dst *GetEventHandlersRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetEventHandlersRequest.Merge(dst, src) -} -func (m *GetEventHandlersRequest) XXX_Size() int { - return xxx_messageInfo_GetEventHandlersRequest.Size(m) -} -func (m *GetEventHandlersRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetEventHandlersRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetEventHandlersRequest proto.InternalMessageInfo - -type GetEventHandlersForEventRequest struct { - Event string `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` - ActiveOnly bool `protobuf:"varint,2,opt,name=active_only,json=activeOnly,proto3" json:"active_only,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetEventHandlersForEventRequest) Reset() { *m = GetEventHandlersForEventRequest{} } -func (m *GetEventHandlersForEventRequest) String() string { return proto.CompactTextString(m) } -func (*GetEventHandlersForEventRequest) ProtoMessage() {} -func (*GetEventHandlersForEventRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{7} -} -func (m *GetEventHandlersForEventRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetEventHandlersForEventRequest.Unmarshal(m, b) -} -func (m *GetEventHandlersForEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetEventHandlersForEventRequest.Marshal(b, m, deterministic) -} -func (dst *GetEventHandlersForEventRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetEventHandlersForEventRequest.Merge(dst, src) -} -func (m *GetEventHandlersForEventRequest) XXX_Size() int { - return xxx_messageInfo_GetEventHandlersForEventRequest.Size(m) -} -func (m *GetEventHandlersForEventRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetEventHandlersForEventRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetEventHandlersForEventRequest proto.InternalMessageInfo - -func (m *GetEventHandlersForEventRequest) GetEvent() string { - if m != nil { - return m.Event - } - return "" -} - -func (m *GetEventHandlersForEventRequest) GetActiveOnly() bool { - if m != nil { - return m.ActiveOnly - } - return false -} - -type GetQueuesRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueuesRequest) Reset() { *m = GetQueuesRequest{} } -func (m *GetQueuesRequest) String() string { return proto.CompactTextString(m) } -func (*GetQueuesRequest) ProtoMessage() {} -func (*GetQueuesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{8} -} -func (m *GetQueuesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueuesRequest.Unmarshal(m, b) -} -func (m *GetQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueuesRequest.Marshal(b, m, deterministic) -} -func (dst *GetQueuesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueuesRequest.Merge(dst, src) -} -func (m *GetQueuesRequest) XXX_Size() int { - return xxx_messageInfo_GetQueuesRequest.Size(m) -} -func (m *GetQueuesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueuesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueuesRequest proto.InternalMessageInfo - -type GetQueuesResponse struct { - EventToQueueUri map[string]string `protobuf:"bytes,1,rep,name=event_to_queue_uri,json=eventToQueueUri,proto3" json:"event_to_queue_uri,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueuesResponse) Reset() { *m = GetQueuesResponse{} } -func (m *GetQueuesResponse) String() string { return proto.CompactTextString(m) } -func (*GetQueuesResponse) ProtoMessage() {} -func (*GetQueuesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{9} -} -func (m *GetQueuesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueuesResponse.Unmarshal(m, b) -} -func (m *GetQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueuesResponse.Marshal(b, m, deterministic) -} -func (dst *GetQueuesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueuesResponse.Merge(dst, src) -} -func (m *GetQueuesResponse) XXX_Size() int { - return xxx_messageInfo_GetQueuesResponse.Size(m) -} -func (m *GetQueuesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueuesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueuesResponse proto.InternalMessageInfo - -func (m *GetQueuesResponse) GetEventToQueueUri() map[string]string { - if m != nil { - return m.EventToQueueUri - } - return nil -} - -type GetQueueSizesRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueueSizesRequest) Reset() { *m = GetQueueSizesRequest{} } -func (m *GetQueueSizesRequest) String() string { return proto.CompactTextString(m) } -func (*GetQueueSizesRequest) ProtoMessage() {} -func (*GetQueueSizesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{10} -} -func (m *GetQueueSizesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueueSizesRequest.Unmarshal(m, b) -} -func (m *GetQueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueueSizesRequest.Marshal(b, m, deterministic) -} -func (dst *GetQueueSizesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueueSizesRequest.Merge(dst, src) -} -func (m *GetQueueSizesRequest) XXX_Size() int { - return xxx_messageInfo_GetQueueSizesRequest.Size(m) -} -func (m *GetQueueSizesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueueSizesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueueSizesRequest proto.InternalMessageInfo - -type GetQueueSizesResponse struct { - EventToQueueInfo map[string]*GetQueueSizesResponse_QueueInfo `protobuf:"bytes,2,rep,name=event_to_queue_info,json=eventToQueueInfo,proto3" json:"event_to_queue_info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueueSizesResponse) Reset() { *m = GetQueueSizesResponse{} } -func (m *GetQueueSizesResponse) String() string { return proto.CompactTextString(m) } -func (*GetQueueSizesResponse) ProtoMessage() {} -func (*GetQueueSizesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{11} -} -func (m *GetQueueSizesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueueSizesResponse.Unmarshal(m, b) -} -func (m *GetQueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueueSizesResponse.Marshal(b, m, deterministic) -} -func (dst *GetQueueSizesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueueSizesResponse.Merge(dst, src) -} -func (m *GetQueueSizesResponse) XXX_Size() int { - return xxx_messageInfo_GetQueueSizesResponse.Size(m) -} -func (m *GetQueueSizesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueueSizesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueueSizesResponse proto.InternalMessageInfo - -func (m *GetQueueSizesResponse) GetEventToQueueInfo() map[string]*GetQueueSizesResponse_QueueInfo { - if m != nil { - return m.EventToQueueInfo - } - return nil -} - -type GetQueueSizesResponse_QueueInfo struct { - QueueSizes map[string]int64 `protobuf:"bytes,1,rep,name=queue_sizes,json=queueSizes,proto3" json:"queue_sizes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueueSizesResponse_QueueInfo) Reset() { *m = GetQueueSizesResponse_QueueInfo{} } -func (m *GetQueueSizesResponse_QueueInfo) String() string { return proto.CompactTextString(m) } -func (*GetQueueSizesResponse_QueueInfo) ProtoMessage() {} -func (*GetQueueSizesResponse_QueueInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{11, 0} -} -func (m *GetQueueSizesResponse_QueueInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Unmarshal(m, b) -} -func (m *GetQueueSizesResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Marshal(b, m, deterministic) -} -func (dst *GetQueueSizesResponse_QueueInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Merge(dst, src) -} -func (m *GetQueueSizesResponse_QueueInfo) XXX_Size() int { - return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Size(m) -} -func (m *GetQueueSizesResponse_QueueInfo) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueueSizesResponse_QueueInfo.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueueSizesResponse_QueueInfo proto.InternalMessageInfo - -func (m *GetQueueSizesResponse_QueueInfo) GetQueueSizes() map[string]int64 { - if m != nil { - return m.QueueSizes - } - return nil -} - -type GetQueueProvidersRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueueProvidersRequest) Reset() { *m = GetQueueProvidersRequest{} } -func (m *GetQueueProvidersRequest) String() string { return proto.CompactTextString(m) } -func (*GetQueueProvidersRequest) ProtoMessage() {} -func (*GetQueueProvidersRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{12} -} -func (m *GetQueueProvidersRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueueProvidersRequest.Unmarshal(m, b) -} -func (m *GetQueueProvidersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueueProvidersRequest.Marshal(b, m, deterministic) -} -func (dst *GetQueueProvidersRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueueProvidersRequest.Merge(dst, src) -} -func (m *GetQueueProvidersRequest) XXX_Size() int { - return xxx_messageInfo_GetQueueProvidersRequest.Size(m) -} -func (m *GetQueueProvidersRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueueProvidersRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueueProvidersRequest proto.InternalMessageInfo - -type GetQueueProvidersResponse struct { - Providers []string `protobuf:"bytes,1,rep,name=providers,proto3" json:"providers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetQueueProvidersResponse) Reset() { *m = GetQueueProvidersResponse{} } -func (m *GetQueueProvidersResponse) String() string { return proto.CompactTextString(m) } -func (*GetQueueProvidersResponse) ProtoMessage() {} -func (*GetQueueProvidersResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{13} -} -func (m *GetQueueProvidersResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetQueueProvidersResponse.Unmarshal(m, b) -} -func (m *GetQueueProvidersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetQueueProvidersResponse.Marshal(b, m, deterministic) -} -func (dst *GetQueueProvidersResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetQueueProvidersResponse.Merge(dst, src) -} -func (m *GetQueueProvidersResponse) XXX_Size() int { - return xxx_messageInfo_GetQueueProvidersResponse.Size(m) -} -func (m *GetQueueProvidersResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetQueueProvidersResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetQueueProvidersResponse proto.InternalMessageInfo - -func (m *GetQueueProvidersResponse) GetProviders() []string { - if m != nil { - return m.Providers - } - return nil -} - -func init() { - proto.RegisterType((*AddEventHandlerRequest)(nil), "conductor.grpc.events.AddEventHandlerRequest") - proto.RegisterType((*AddEventHandlerResponse)(nil), "conductor.grpc.events.AddEventHandlerResponse") - proto.RegisterType((*UpdateEventHandlerRequest)(nil), "conductor.grpc.events.UpdateEventHandlerRequest") - proto.RegisterType((*UpdateEventHandlerResponse)(nil), "conductor.grpc.events.UpdateEventHandlerResponse") - proto.RegisterType((*RemoveEventHandlerRequest)(nil), "conductor.grpc.events.RemoveEventHandlerRequest") - proto.RegisterType((*RemoveEventHandlerResponse)(nil), "conductor.grpc.events.RemoveEventHandlerResponse") - proto.RegisterType((*GetEventHandlersRequest)(nil), "conductor.grpc.events.GetEventHandlersRequest") - proto.RegisterType((*GetEventHandlersForEventRequest)(nil), "conductor.grpc.events.GetEventHandlersForEventRequest") - proto.RegisterType((*GetQueuesRequest)(nil), "conductor.grpc.events.GetQueuesRequest") - proto.RegisterType((*GetQueuesResponse)(nil), "conductor.grpc.events.GetQueuesResponse") - proto.RegisterMapType((map[string]string)(nil), "conductor.grpc.events.GetQueuesResponse.EventToQueueUriEntry") - proto.RegisterType((*GetQueueSizesRequest)(nil), "conductor.grpc.events.GetQueueSizesRequest") - proto.RegisterType((*GetQueueSizesResponse)(nil), "conductor.grpc.events.GetQueueSizesResponse") - proto.RegisterMapType((map[string]*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.EventToQueueInfoEntry") - proto.RegisterType((*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo") - proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo.QueueSizesEntry") - proto.RegisterType((*GetQueueProvidersRequest)(nil), "conductor.grpc.events.GetQueueProvidersRequest") - proto.RegisterType((*GetQueueProvidersResponse)(nil), "conductor.grpc.events.GetQueueProvidersResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// EventServiceClient is the client API for EventService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type EventServiceClient interface { - // POST / - AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) - // PUT / - UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) - // DELETE /{name} - RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) - // GET / - GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) - // GET /{name} - GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) - // GET /queues - GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) - GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) - // GET /queues/providers - GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) -} - -type eventServiceClient struct { - cc *grpc.ClientConn -} - -func NewEventServiceClient(cc *grpc.ClientConn) EventServiceClient { - return &eventServiceClient{cc} -} - -func (c *eventServiceClient) AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) { - out := new(AddEventHandlerResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/AddEventHandler", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *eventServiceClient) UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) { - out := new(UpdateEventHandlerResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/UpdateEventHandler", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *eventServiceClient) RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) { - out := new(RemoveEventHandlerResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/RemoveEventHandler", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *eventServiceClient) GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) { - stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[0], "/conductor.grpc.events.EventService/GetEventHandlers", opts...) - if err != nil { - return nil, err - } - x := &eventServiceGetEventHandlersClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type EventService_GetEventHandlersClient interface { - Recv() (*model.EventHandler, error) - grpc.ClientStream -} - -type eventServiceGetEventHandlersClient struct { - grpc.ClientStream -} - -func (x *eventServiceGetEventHandlersClient) Recv() (*model.EventHandler, error) { - m := new(model.EventHandler) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *eventServiceClient) GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) { - stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[1], "/conductor.grpc.events.EventService/GetEventHandlersForEvent", opts...) - if err != nil { - return nil, err - } - x := &eventServiceGetEventHandlersForEventClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type EventService_GetEventHandlersForEventClient interface { - Recv() (*model.EventHandler, error) - grpc.ClientStream -} - -type eventServiceGetEventHandlersForEventClient struct { - grpc.ClientStream -} - -func (x *eventServiceGetEventHandlersForEventClient) Recv() (*model.EventHandler, error) { - m := new(model.EventHandler) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *eventServiceClient) GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) { - out := new(GetQueuesResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueues", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *eventServiceClient) GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) { - out := new(GetQueueSizesResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueSizes", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *eventServiceClient) GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) { - out := new(GetQueueProvidersResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueProviders", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// EventServiceServer is the server API for EventService service. -type EventServiceServer interface { - // POST / - AddEventHandler(context.Context, *AddEventHandlerRequest) (*AddEventHandlerResponse, error) - // PUT / - UpdateEventHandler(context.Context, *UpdateEventHandlerRequest) (*UpdateEventHandlerResponse, error) - // DELETE /{name} - RemoveEventHandler(context.Context, *RemoveEventHandlerRequest) (*RemoveEventHandlerResponse, error) - // GET / - GetEventHandlers(*GetEventHandlersRequest, EventService_GetEventHandlersServer) error - // GET /{name} - GetEventHandlersForEvent(*GetEventHandlersForEventRequest, EventService_GetEventHandlersForEventServer) error - // GET /queues - GetQueues(context.Context, *GetQueuesRequest) (*GetQueuesResponse, error) - GetQueueSizes(context.Context, *GetQueueSizesRequest) (*GetQueueSizesResponse, error) - // GET /queues/providers - GetQueueProviders(context.Context, *GetQueueProvidersRequest) (*GetQueueProvidersResponse, error) -} - -func RegisterEventServiceServer(s *grpc.Server, srv EventServiceServer) { - s.RegisterService(&_EventService_serviceDesc, srv) -} - -func _EventService_AddEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddEventHandlerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).AddEventHandler(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/AddEventHandler", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).AddEventHandler(ctx, req.(*AddEventHandlerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _EventService_UpdateEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateEventHandlerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).UpdateEventHandler(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/UpdateEventHandler", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).UpdateEventHandler(ctx, req.(*UpdateEventHandlerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _EventService_RemoveEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RemoveEventHandlerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).RemoveEventHandler(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/RemoveEventHandler", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).RemoveEventHandler(ctx, req.(*RemoveEventHandlerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _EventService_GetEventHandlers_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(GetEventHandlersRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(EventServiceServer).GetEventHandlers(m, &eventServiceGetEventHandlersServer{stream}) -} - -type EventService_GetEventHandlersServer interface { - Send(*model.EventHandler) error - grpc.ServerStream -} - -type eventServiceGetEventHandlersServer struct { - grpc.ServerStream -} - -func (x *eventServiceGetEventHandlersServer) Send(m *model.EventHandler) error { - return x.ServerStream.SendMsg(m) -} - -func _EventService_GetEventHandlersForEvent_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(GetEventHandlersForEventRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(EventServiceServer).GetEventHandlersForEvent(m, &eventServiceGetEventHandlersForEventServer{stream}) -} - -type EventService_GetEventHandlersForEventServer interface { - Send(*model.EventHandler) error - grpc.ServerStream -} - -type eventServiceGetEventHandlersForEventServer struct { - grpc.ServerStream -} - -func (x *eventServiceGetEventHandlersForEventServer) Send(m *model.EventHandler) error { - return x.ServerStream.SendMsg(m) -} - -func _EventService_GetQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetQueuesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).GetQueues(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/GetQueues", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).GetQueues(ctx, req.(*GetQueuesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _EventService_GetQueueSizes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetQueueSizesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).GetQueueSizes(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/GetQueueSizes", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).GetQueueSizes(ctx, req.(*GetQueueSizesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _EventService_GetQueueProviders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetQueueProvidersRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(EventServiceServer).GetQueueProviders(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.events.EventService/GetQueueProviders", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(EventServiceServer).GetQueueProviders(ctx, req.(*GetQueueProvidersRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _EventService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "conductor.grpc.events.EventService", - HandlerType: (*EventServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "AddEventHandler", - Handler: _EventService_AddEventHandler_Handler, - }, - { - MethodName: "UpdateEventHandler", - Handler: _EventService_UpdateEventHandler_Handler, - }, - { - MethodName: "RemoveEventHandler", - Handler: _EventService_RemoveEventHandler_Handler, - }, - { - MethodName: "GetQueues", - Handler: _EventService_GetQueues_Handler, - }, - { - MethodName: "GetQueueSizes", - Handler: _EventService_GetQueueSizes_Handler, - }, - { - MethodName: "GetQueueProviders", - Handler: _EventService_GetQueueProviders_Handler, - }, - }, - Streams: []grpc.StreamDesc{ - { - StreamName: "GetEventHandlers", - Handler: _EventService_GetEventHandlers_Handler, - ServerStreams: true, - }, - { - StreamName: "GetEventHandlersForEvent", - Handler: _EventService_GetEventHandlersForEvent_Handler, - ServerStreams: true, - }, - }, - Metadata: "grpc/event_service.proto", -} - -func init() { - proto.RegisterFile("grpc/event_service.proto", fileDescriptor_event_service_913a1fde08d4f277) -} - -var fileDescriptor_event_service_913a1fde08d4f277 = []byte{ - // 687 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5d, 0x6f, 0xd3, 0x3c, - 0x18, 0x55, 0xd6, 0xf7, 0x65, 0xf4, 0x29, 0xb0, 0x61, 0xf6, 0x91, 0x5a, 0x43, 0x9b, 0x7a, 0x43, - 0x25, 0xc0, 0x19, 0x45, 0x1a, 0x1f, 0xd2, 0x24, 0xa8, 0xb4, 0x0d, 0x24, 0x24, 0xb6, 0x6c, 0x93, - 0x10, 0x17, 0x54, 0x5d, 0xe2, 0x75, 0x19, 0xa9, 0xdd, 0x26, 0x4e, 0x44, 0xe1, 0x6f, 0xf0, 0x4f, - 0xb8, 0xe6, 0x8a, 0x3f, 0x86, 0x62, 0xa7, 0x49, 0x9a, 0x26, 0xb4, 0x45, 0xdc, 0xd5, 0xcf, 0xc7, - 0x39, 0x3e, 0xb6, 0x9f, 0xd3, 0x80, 0xde, 0xf3, 0x06, 0x96, 0x41, 0x43, 0xca, 0x44, 0xc7, 0xa7, - 0x5e, 0xe8, 0x58, 0x94, 0x0c, 0x3c, 0x2e, 0x38, 0x5a, 0xb7, 0x38, 0xb3, 0x03, 0x4b, 0x70, 0x8f, - 0x44, 0x35, 0x44, 0xd6, 0xf8, 0x58, 0xef, 0x73, 0x9b, 0xba, 0xaa, 0xe3, 0xaa, 0xcb, 0x6c, 0x97, - 0x7a, 0xaa, 0xa1, 0x71, 0x02, 0x1b, 0xaf, 0x6d, 0xfb, 0x20, 0x4a, 0xbc, 0x51, 0x09, 0x93, 0x0e, - 0x03, 0xea, 0x0b, 0xf4, 0x0c, 0x96, 0xe3, 0x52, 0x5d, 0xdb, 0xd1, 0x9a, 0xb5, 0xd6, 0x7d, 0x92, - 0x82, 0xcb, 0x66, 0x32, 0xd1, 0x36, 0xae, 0x6e, 0xd4, 0x61, 0x73, 0x0a, 0xd2, 0x1f, 0x70, 0xe6, - 0xd3, 0xc6, 0x19, 0xd4, 0xcf, 0x07, 0x76, 0x57, 0xd0, 0x7f, 0x4a, 0xb8, 0x05, 0xb8, 0x08, 0x35, - 0xe6, 0x34, 0xa0, 0x6e, 0xd2, 0x3e, 0x0f, 0x0b, 0x39, 0x11, 0xfc, 0xc7, 0xba, 0x7d, 0x2a, 0x09, - 0xab, 0xa6, 0xfc, 0x1d, 0xc1, 0x15, 0x35, 0xc4, 0x70, 0x75, 0xd8, 0x3c, 0xa2, 0x22, 0x9b, 0xf2, - 0x63, 0xb0, 0xc6, 0x07, 0xd8, 0xce, 0xa7, 0x0e, 0xb9, 0x27, 0xd7, 0x63, 0xbe, 0x35, 0xf8, 0x5f, - 0x5e, 0x42, 0x4c, 0xa8, 0x16, 0x68, 0x1b, 0x6a, 0x5d, 0x4b, 0x38, 0x21, 0xed, 0x70, 0xe6, 0x8e, - 0xf4, 0xa5, 0x1d, 0xad, 0x79, 0xd3, 0x04, 0x15, 0x7a, 0xcf, 0xdc, 0x51, 0x03, 0xc1, 0xea, 0x11, - 0x15, 0x27, 0x01, 0x0d, 0x68, 0xc2, 0xf6, 0x4b, 0x83, 0xbb, 0x99, 0xa0, 0xda, 0x1e, 0xba, 0x06, - 0xa4, 0xde, 0x85, 0xe0, 0x9d, 0x61, 0x94, 0xea, 0x04, 0x9e, 0xa3, 0x6b, 0x3b, 0x95, 0x66, 0xad, - 0xb5, 0x4f, 0x0a, 0x5f, 0x07, 0x99, 0x42, 0x51, 0xe7, 0x7c, 0xc6, 0x65, 0xf4, 0xdc, 0x73, 0x0e, - 0x98, 0xf0, 0x46, 0xe6, 0x0a, 0x9d, 0x8c, 0xe2, 0x36, 0xac, 0x15, 0x15, 0xa2, 0x55, 0xa8, 0x7c, - 0xa6, 0xa3, 0x58, 0x62, 0xf4, 0x33, 0x92, 0x1d, 0x76, 0xdd, 0x80, 0x4a, 0x69, 0x55, 0x53, 0x2d, - 0x5e, 0x2e, 0x3d, 0xd7, 0x1a, 0x1b, 0xb0, 0x36, 0xa6, 0x3f, 0x75, 0xbe, 0xa6, 0xea, 0x7e, 0x56, - 0x60, 0x3d, 0x97, 0x88, 0x15, 0x0e, 0xe1, 0x5e, 0x4e, 0xa1, 0xc3, 0x2e, 0xb9, 0xbe, 0x24, 0x25, - 0xb6, 0x67, 0x48, 0x9c, 0x80, 0x9a, 0x90, 0xf9, 0x96, 0x5d, 0x72, 0xa5, 0x73, 0x95, 0xe6, 0xc2, - 0xf8, 0x87, 0x06, 0xd5, 0x64, 0x85, 0x7a, 0x50, 0x53, 0xbc, 0x7e, 0x04, 0x16, 0x9f, 0xed, 0xe1, - 0x42, 0xc4, 0x09, 0x18, 0x49, 0x93, 0x8a, 0x1c, 0x86, 0x49, 0x00, 0xef, 0xc3, 0x4a, 0x2e, 0x3d, - 0xeb, 0x68, 0x2b, 0x99, 0xa3, 0xc5, 0xdf, 0x60, 0xbd, 0x50, 0x60, 0x01, 0xc8, 0xbb, 0x2c, 0x48, - 0xad, 0xb5, 0xf7, 0x77, 0x62, 0xb2, 0xf7, 0x8a, 0x41, 0x1f, 0x57, 0x1f, 0x7b, 0x3c, 0x74, 0xec, - 0xcc, 0x9c, 0xbc, 0x80, 0x7a, 0x41, 0x2e, 0xbe, 0xde, 0x2d, 0xa8, 0x0e, 0xc6, 0x41, 0x79, 0xb6, - 0x55, 0x33, 0x0d, 0xb4, 0xbe, 0x2f, 0xc3, 0x2d, 0x29, 0xea, 0x54, 0xd9, 0x1e, 0x1a, 0xc0, 0x4a, - 0xce, 0x6c, 0xd0, 0xe3, 0x92, 0xdd, 0x17, 0xfb, 0x1c, 0x26, 0xf3, 0x96, 0xc7, 0x1b, 0x1c, 0x01, - 0x9a, 0x76, 0x1b, 0xb4, 0x5b, 0x82, 0x52, 0x6a, 0x77, 0xf8, 0xc9, 0x02, 0x1d, 0x29, 0xf5, 0xb4, - 0x33, 0x95, 0x52, 0x97, 0xba, 0x5e, 0x29, 0x75, 0xb9, 0xed, 0x21, 0x4b, 0x3a, 0xd0, 0x84, 0xb7, - 0x21, 0x52, 0xfe, 0x4c, 0x8a, 0xfc, 0x11, 0xff, 0xd9, 0xcf, 0x77, 0x35, 0xe4, 0xcb, 0x47, 0x53, - 0x68, 0xa0, 0x68, 0x6f, 0x4e, 0xb2, 0x9c, 0xe3, 0xce, 0x26, 0xfd, 0x04, 0xd5, 0xc4, 0x00, 0xd1, - 0x83, 0xd9, 0x16, 0xa9, 0x60, 0x9b, 0xf3, 0x7a, 0x29, 0xba, 0x86, 0xdb, 0x13, 0x73, 0x83, 0x1e, - 0xce, 0x37, 0x5d, 0x8a, 0xe7, 0xd1, 0x22, 0xa3, 0x88, 0xc2, 0xf4, 0x2f, 0x21, 0x99, 0x2c, 0x64, - 0xcc, 0x80, 0xc8, 0xcf, 0x27, 0xde, 0x9d, 0xbf, 0x41, 0xf1, 0xb6, 0x19, 0x60, 0x8b, 0xf7, 0x09, - 0xa3, 0xe2, 0xd2, 0x75, 0xbe, 0xe4, 0xda, 0xdb, 0x77, 0xb2, 0x13, 0x7b, 0x7c, 0xf1, 0xf1, 0x55, - 0xcf, 0x11, 0x57, 0xc1, 0x05, 0xb1, 0x78, 0xdf, 0x88, 0x5b, 0x8c, 0xa4, 0xc5, 0xb0, 0x5c, 0x87, - 0x32, 0x61, 0xf4, 0xb8, 0xfc, 0xc8, 0x49, 0xe3, 0xe9, 0x37, 0x8f, 0x7f, 0x71, 0x43, 0xde, 0xe4, - 0xd3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x65, 0xa5, 0x44, 0x1b, 0x09, 0x09, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go deleted file mode 100644 index daf353165..000000000 --- a/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go +++ /dev/null @@ -1,867 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/metadata_service.proto - -package metadata // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type CreateWorkflowRequest struct { - Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateWorkflowRequest) Reset() { *m = CreateWorkflowRequest{} } -func (m *CreateWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*CreateWorkflowRequest) ProtoMessage() {} -func (*CreateWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{0} -} -func (m *CreateWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateWorkflowRequest.Unmarshal(m, b) -} -func (m *CreateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *CreateWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateWorkflowRequest.Merge(dst, src) -} -func (m *CreateWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_CreateWorkflowRequest.Size(m) -} -func (m *CreateWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CreateWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateWorkflowRequest proto.InternalMessageInfo - -func (m *CreateWorkflowRequest) GetWorkflow() *model.WorkflowDef { - if m != nil { - return m.Workflow - } - return nil -} - -type CreateWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateWorkflowResponse) Reset() { *m = CreateWorkflowResponse{} } -func (m *CreateWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*CreateWorkflowResponse) ProtoMessage() {} -func (*CreateWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{1} -} -func (m *CreateWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateWorkflowResponse.Unmarshal(m, b) -} -func (m *CreateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *CreateWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateWorkflowResponse.Merge(dst, src) -} -func (m *CreateWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_CreateWorkflowResponse.Size(m) -} -func (m *CreateWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CreateWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateWorkflowResponse proto.InternalMessageInfo - -type UpdateWorkflowsRequest struct { - Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} } -func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateWorkflowsRequest) ProtoMessage() {} -func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{2} -} -func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) -} -func (m *UpdateWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *UpdateWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateWorkflowsRequest.Merge(dst, src) -} -func (m *UpdateWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_UpdateWorkflowsRequest.Size(m) -} -func (m *UpdateWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateWorkflowsRequest proto.InternalMessageInfo - -func (m *UpdateWorkflowsRequest) GetDefs() []*model.WorkflowDef { - if m != nil { - return m.Defs - } - return nil -} - -type UpdateWorkflowsResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateWorkflowsResponse) Reset() { *m = UpdateWorkflowsResponse{} } -func (m *UpdateWorkflowsResponse) String() string { return proto.CompactTextString(m) } -func (*UpdateWorkflowsResponse) ProtoMessage() {} -func (*UpdateWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{3} -} -func (m *UpdateWorkflowsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateWorkflowsResponse.Unmarshal(m, b) -} -func (m *UpdateWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateWorkflowsResponse.Marshal(b, m, deterministic) -} -func (dst *UpdateWorkflowsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateWorkflowsResponse.Merge(dst, src) -} -func (m *UpdateWorkflowsResponse) XXX_Size() int { - return xxx_messageInfo_UpdateWorkflowsResponse.Size(m) -} -func (m *UpdateWorkflowsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateWorkflowsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateWorkflowsResponse proto.InternalMessageInfo - -type GetWorkflowRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } -func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowRequest) ProtoMessage() {} -func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{4} -} -func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) -} -func (m *GetWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowRequest.Merge(dst, src) -} -func (m *GetWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowRequest.Size(m) -} -func (m *GetWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowRequest proto.InternalMessageInfo - -func (m *GetWorkflowRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetWorkflowRequest) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -type GetWorkflowResponse struct { - Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowResponse) Reset() { *m = GetWorkflowResponse{} } -func (m *GetWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowResponse) ProtoMessage() {} -func (*GetWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{5} -} -func (m *GetWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowResponse.Unmarshal(m, b) -} -func (m *GetWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowResponse.Merge(dst, src) -} -func (m *GetWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_GetWorkflowResponse.Size(m) -} -func (m *GetWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowResponse proto.InternalMessageInfo - -func (m *GetWorkflowResponse) GetWorkflow() *model.WorkflowDef { - if m != nil { - return m.Workflow - } - return nil -} - -type CreateTasksRequest struct { - Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } -func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } -func (*CreateTasksRequest) ProtoMessage() {} -func (*CreateTasksRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{6} -} -func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) -} -func (m *CreateTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateTasksRequest.Marshal(b, m, deterministic) -} -func (dst *CreateTasksRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateTasksRequest.Merge(dst, src) -} -func (m *CreateTasksRequest) XXX_Size() int { - return xxx_messageInfo_CreateTasksRequest.Size(m) -} -func (m *CreateTasksRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CreateTasksRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateTasksRequest proto.InternalMessageInfo - -func (m *CreateTasksRequest) GetDefs() []*model.TaskDef { - if m != nil { - return m.Defs - } - return nil -} - -type CreateTasksResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateTasksResponse) Reset() { *m = CreateTasksResponse{} } -func (m *CreateTasksResponse) String() string { return proto.CompactTextString(m) } -func (*CreateTasksResponse) ProtoMessage() {} -func (*CreateTasksResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{7} -} -func (m *CreateTasksResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateTasksResponse.Unmarshal(m, b) -} -func (m *CreateTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateTasksResponse.Marshal(b, m, deterministic) -} -func (dst *CreateTasksResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateTasksResponse.Merge(dst, src) -} -func (m *CreateTasksResponse) XXX_Size() int { - return xxx_messageInfo_CreateTasksResponse.Size(m) -} -func (m *CreateTasksResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CreateTasksResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateTasksResponse proto.InternalMessageInfo - -type UpdateTaskRequest struct { - Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } -func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateTaskRequest) ProtoMessage() {} -func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{8} -} -func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) -} -func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) -} -func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) -} -func (m *UpdateTaskRequest) XXX_Size() int { - return xxx_messageInfo_UpdateTaskRequest.Size(m) -} -func (m *UpdateTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo - -func (m *UpdateTaskRequest) GetTask() *model.TaskDef { - if m != nil { - return m.Task - } - return nil -} - -type UpdateTaskResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } -func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } -func (*UpdateTaskResponse) ProtoMessage() {} -func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{9} -} -func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) -} -func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) -} -func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) -} -func (m *UpdateTaskResponse) XXX_Size() int { - return xxx_messageInfo_UpdateTaskResponse.Size(m) -} -func (m *UpdateTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo - -type GetTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } -func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } -func (*GetTaskRequest) ProtoMessage() {} -func (*GetTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{10} -} -func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) -} -func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) -} -func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskRequest.Merge(dst, src) -} -func (m *GetTaskRequest) XXX_Size() int { - return xxx_messageInfo_GetTaskRequest.Size(m) -} -func (m *GetTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo - -func (m *GetTaskRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -type GetTaskResponse struct { - Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } -func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } -func (*GetTaskResponse) ProtoMessage() {} -func (*GetTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{11} -} -func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) -} -func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) -} -func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskResponse.Merge(dst, src) -} -func (m *GetTaskResponse) XXX_Size() int { - return xxx_messageInfo_GetTaskResponse.Size(m) -} -func (m *GetTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo - -func (m *GetTaskResponse) GetTask() *model.TaskDef { - if m != nil { - return m.Task - } - return nil -} - -type DeleteTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } -func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } -func (*DeleteTaskRequest) ProtoMessage() {} -func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{12} -} -func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) -} -func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) -} -func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) -} -func (m *DeleteTaskRequest) XXX_Size() int { - return xxx_messageInfo_DeleteTaskRequest.Size(m) -} -func (m *DeleteTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo - -func (m *DeleteTaskRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -type DeleteTaskResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeleteTaskResponse) Reset() { *m = DeleteTaskResponse{} } -func (m *DeleteTaskResponse) String() string { return proto.CompactTextString(m) } -func (*DeleteTaskResponse) ProtoMessage() {} -func (*DeleteTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{13} -} -func (m *DeleteTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DeleteTaskResponse.Unmarshal(m, b) -} -func (m *DeleteTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DeleteTaskResponse.Marshal(b, m, deterministic) -} -func (dst *DeleteTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeleteTaskResponse.Merge(dst, src) -} -func (m *DeleteTaskResponse) XXX_Size() int { - return xxx_messageInfo_DeleteTaskResponse.Size(m) -} -func (m *DeleteTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_DeleteTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_DeleteTaskResponse proto.InternalMessageInfo - -func init() { - proto.RegisterType((*CreateWorkflowRequest)(nil), "conductor.grpc.metadata.CreateWorkflowRequest") - proto.RegisterType((*CreateWorkflowResponse)(nil), "conductor.grpc.metadata.CreateWorkflowResponse") - proto.RegisterType((*UpdateWorkflowsRequest)(nil), "conductor.grpc.metadata.UpdateWorkflowsRequest") - proto.RegisterType((*UpdateWorkflowsResponse)(nil), "conductor.grpc.metadata.UpdateWorkflowsResponse") - proto.RegisterType((*GetWorkflowRequest)(nil), "conductor.grpc.metadata.GetWorkflowRequest") - proto.RegisterType((*GetWorkflowResponse)(nil), "conductor.grpc.metadata.GetWorkflowResponse") - proto.RegisterType((*CreateTasksRequest)(nil), "conductor.grpc.metadata.CreateTasksRequest") - proto.RegisterType((*CreateTasksResponse)(nil), "conductor.grpc.metadata.CreateTasksResponse") - proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.metadata.UpdateTaskRequest") - proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.metadata.UpdateTaskResponse") - proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.metadata.GetTaskRequest") - proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.metadata.GetTaskResponse") - proto.RegisterType((*DeleteTaskRequest)(nil), "conductor.grpc.metadata.DeleteTaskRequest") - proto.RegisterType((*DeleteTaskResponse)(nil), "conductor.grpc.metadata.DeleteTaskResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// MetadataServiceClient is the client API for MetadataService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type MetadataServiceClient interface { - // POST /workflow - CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) - // PUT /workflow - UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) - // GET /workflow/{name} - GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) - // POST /taskdefs - CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) - // PUT /taskdefs - UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) - // GET /taskdefs/{tasktype} - GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) - // DELETE /taskdefs/{tasktype} - DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) -} - -type metadataServiceClient struct { - cc *grpc.ClientConn -} - -func NewMetadataServiceClient(cc *grpc.ClientConn) MetadataServiceClient { - return &metadataServiceClient{cc} -} - -func (c *metadataServiceClient) CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) { - out := new(CreateWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) { - out := new(UpdateWorkflowsResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) { - out := new(GetWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) { - out := new(CreateTasksResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateTasks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { - out := new(UpdateTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { - out := new(GetTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) { - out := new(DeleteTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/DeleteTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// MetadataServiceServer is the server API for MetadataService service. -type MetadataServiceServer interface { - // POST /workflow - CreateWorkflow(context.Context, *CreateWorkflowRequest) (*CreateWorkflowResponse, error) - // PUT /workflow - UpdateWorkflows(context.Context, *UpdateWorkflowsRequest) (*UpdateWorkflowsResponse, error) - // GET /workflow/{name} - GetWorkflow(context.Context, *GetWorkflowRequest) (*GetWorkflowResponse, error) - // POST /taskdefs - CreateTasks(context.Context, *CreateTasksRequest) (*CreateTasksResponse, error) - // PUT /taskdefs - UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) - // GET /taskdefs/{tasktype} - GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) - // DELETE /taskdefs/{tasktype} - DeleteTask(context.Context, *DeleteTaskRequest) (*DeleteTaskResponse, error) -} - -func RegisterMetadataServiceServer(s *grpc.Server, srv MetadataServiceServer) { - s.RegisterService(&_MetadataService_serviceDesc, srv) -} - -func _MetadataService_CreateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).CreateWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/CreateWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).CreateWorkflow(ctx, req.(*CreateWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_UpdateWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).UpdateWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).UpdateWorkflows(ctx, req.(*UpdateWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).GetWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/GetWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).GetWorkflow(ctx, req.(*GetWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_CreateTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateTasksRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).CreateTasks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/CreateTasks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).CreateTasks(ctx, req.(*CreateTasksRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).UpdateTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).GetTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/GetTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).GetTask(ctx, req.(*GetTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).DeleteTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.metadata.MetadataService/DeleteTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _MetadataService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "conductor.grpc.metadata.MetadataService", - HandlerType: (*MetadataServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateWorkflow", - Handler: _MetadataService_CreateWorkflow_Handler, - }, - { - MethodName: "UpdateWorkflows", - Handler: _MetadataService_UpdateWorkflows_Handler, - }, - { - MethodName: "GetWorkflow", - Handler: _MetadataService_GetWorkflow_Handler, - }, - { - MethodName: "CreateTasks", - Handler: _MetadataService_CreateTasks_Handler, - }, - { - MethodName: "UpdateTask", - Handler: _MetadataService_UpdateTask_Handler, - }, - { - MethodName: "GetTask", - Handler: _MetadataService_GetTask_Handler, - }, - { - MethodName: "DeleteTask", - Handler: _MetadataService_DeleteTask_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "grpc/metadata_service.proto", -} - -func init() { - proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_4778cc9d199e5aef) -} - -var fileDescriptor_metadata_service_4778cc9d199e5aef = []byte{ - // 526 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdf, 0x6b, 0xd3, 0x50, - 0x18, 0xa5, 0xba, 0xb9, 0xed, 0x1b, 0xac, 0xf4, 0x76, 0x5b, 0x63, 0xe6, 0x43, 0xc9, 0x8b, 0xc5, - 0xcd, 0x9b, 0x32, 0x5f, 0x7c, 0x13, 0xe3, 0x60, 0x20, 0x88, 0x5a, 0x27, 0x82, 0x08, 0x23, 0x4d, - 0xbe, 0x74, 0xa1, 0x49, 0x6e, 0x96, 0x7b, 0xbb, 0xd9, 0x7f, 0xdd, 0x27, 0x49, 0x72, 0xf3, 0xbb, - 0x4d, 0x2b, 0xbe, 0xb5, 0xf7, 0x9e, 0xef, 0x9c, 0x9c, 0x8f, 0x73, 0xb8, 0x70, 0x36, 0x8b, 0x42, - 0x4b, 0xf7, 0x51, 0x98, 0xb6, 0x29, 0xcc, 0x5b, 0x8e, 0xd1, 0x83, 0x6b, 0x21, 0x0d, 0x23, 0x26, - 0x18, 0x19, 0x58, 0x2c, 0xb0, 0x17, 0x96, 0x60, 0x11, 0x8d, 0x61, 0x34, 0x83, 0xa9, 0x7d, 0x9f, - 0xd9, 0xe8, 0xe9, 0xc2, 0xe4, 0x73, 0x1b, 0x9d, 0x14, 0xad, 0x0e, 0xd2, 0xc3, 0x47, 0x16, 0xcd, - 0x1d, 0x8f, 0x3d, 0xe6, 0x17, 0xda, 0x57, 0x38, 0xf9, 0x10, 0xa1, 0x29, 0xf0, 0x87, 0xbc, 0x9a, - 0xe0, 0xfd, 0x02, 0xb9, 0x20, 0x6f, 0x61, 0x3f, 0x43, 0x2b, 0x9d, 0x61, 0x67, 0x74, 0x78, 0xf9, - 0x82, 0x16, 0x92, 0xc9, 0x30, 0xcd, 0x66, 0xae, 0xd0, 0x99, 0xe4, 0x68, 0x4d, 0x81, 0xd3, 0x3a, - 0x25, 0x0f, 0x59, 0xc0, 0x51, 0xfb, 0x08, 0xa7, 0xdf, 0x43, 0xbb, 0x74, 0xc3, 0x33, 0xb5, 0x31, - 0xec, 0xd8, 0xe8, 0x70, 0xa5, 0x33, 0x7c, 0xba, 0x51, 0x29, 0x41, 0x6a, 0xcf, 0x61, 0xd0, 0xe0, - 0x92, 0x32, 0x06, 0x90, 0x6b, 0x14, 0x75, 0x43, 0x04, 0x76, 0x02, 0xd3, 0xc7, 0xc4, 0xcc, 0xc1, - 0x24, 0xf9, 0x4d, 0x14, 0xd8, 0x7b, 0xc0, 0x88, 0xbb, 0x2c, 0x50, 0x9e, 0x0c, 0x3b, 0xa3, 0xdd, - 0x49, 0xf6, 0x57, 0xfb, 0x0c, 0xfd, 0x0a, 0x47, 0x4a, 0xfd, 0x1f, 0x5b, 0x31, 0x80, 0xa4, 0x5b, - 0xb9, 0x31, 0xf9, 0x3c, 0xf7, 0x7d, 0x51, 0xf1, 0xad, 0x34, 0xb8, 0x62, 0x70, 0xe1, 0xf9, 0x04, - 0xfa, 0x15, 0x0e, 0xe9, 0xf7, 0x3d, 0xf4, 0xd2, 0x55, 0xc4, 0xc7, 0x25, 0xe6, 0x38, 0x02, 0xf2, - 0x2b, 0x5b, 0x98, 0x63, 0x94, 0x76, 0x0c, 0xa4, 0x4c, 0x21, 0x89, 0x5f, 0xc3, 0xd1, 0x35, 0x8a, - 0x32, 0xeb, 0x19, 0x1c, 0xc4, 0xf8, 0x5b, 0xb1, 0x0c, 0xb3, 0x4d, 0xee, 0xc7, 0x07, 0x37, 0xcb, - 0x10, 0xb5, 0x77, 0xd0, 0xcd, 0xe1, 0x72, 0x5f, 0xff, 0xf6, 0x15, 0x63, 0xe8, 0x5d, 0xa1, 0x87, - 0x55, 0x23, 0xad, 0x92, 0xc7, 0x40, 0xca, 0x13, 0xa9, 0xea, 0xe5, 0x9f, 0x5d, 0xe8, 0x7e, 0x92, - 0x7d, 0xf8, 0x96, 0xb6, 0x86, 0xdc, 0xc3, 0x51, 0x35, 0x95, 0x84, 0xd2, 0x35, 0x15, 0xa2, 0x2b, - 0x1b, 0xa1, 0xea, 0x5b, 0xe3, 0xa5, 0x79, 0x01, 0xdd, 0x5a, 0x44, 0xc9, 0x7a, 0x8e, 0xd5, 0xc5, - 0x50, 0xc7, 0xdb, 0x0f, 0x48, 0xd5, 0x3b, 0x38, 0x2c, 0x25, 0x97, 0x9c, 0xaf, 0x25, 0x68, 0x76, - 0x44, 0xbd, 0xd8, 0x0e, 0x5c, 0x28, 0x95, 0xe2, 0xd8, 0xa2, 0xd4, 0x0c, 0x7e, 0x8b, 0xd2, 0x8a, - 0x84, 0x13, 0x04, 0x28, 0xe2, 0x49, 0x5e, 0x6d, 0xd8, 0x49, 0x29, 0x3d, 0xea, 0xf9, 0x56, 0x58, - 0x29, 0xf3, 0x0b, 0xf6, 0x64, 0x80, 0xc9, 0xcb, 0xb6, 0x4d, 0x94, 0x05, 0x46, 0x9b, 0x81, 0x85, - 0x89, 0x22, 0xab, 0x2d, 0x26, 0x1a, 0x15, 0x68, 0x31, 0xd1, 0x0c, 0xbf, 0xc1, 0x41, 0xb5, 0x98, - 0x4f, 0x03, 0x14, 0x8e, 0xe7, 0xfe, 0xae, 0x4d, 0x1a, 0xbd, 0x5a, 0x2f, 0xbe, 0x4c, 0x7f, 0x1a, - 0x33, 0x57, 0xdc, 0x2d, 0xa6, 0xd4, 0x62, 0xbe, 0x2e, 0xa7, 0xf4, 0x7c, 0x4a, 0xb7, 0x3c, 0x17, - 0x03, 0xa1, 0xcf, 0x58, 0xf2, 0x18, 0x15, 0xe7, 0x95, 0xb7, 0x69, 0xfa, 0x2c, 0xa9, 0xf3, 0x9b, - 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5f, 0x81, 0xa1, 0x07, 0xb3, 0x06, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go b/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go deleted file mode 100644 index 2a5710156..000000000 --- a/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go +++ /dev/null @@ -1,113 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/search.proto - -package search // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type Request struct { - Start int32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` - Size int32 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` - Sort string `protobuf:"bytes,3,opt,name=sort,proto3" json:"sort,omitempty"` - FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText,proto3" json:"free_text,omitempty"` - Query string `protobuf:"bytes,5,opt,name=query,proto3" json:"query,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Request) Reset() { *m = Request{} } -func (m *Request) String() string { return proto.CompactTextString(m) } -func (*Request) ProtoMessage() {} -func (*Request) Descriptor() ([]byte, []int) { - return fileDescriptor_search_855a611014aa2143, []int{0} -} -func (m *Request) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Request.Unmarshal(m, b) -} -func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Request.Marshal(b, m, deterministic) -} -func (dst *Request) XXX_Merge(src proto.Message) { - xxx_messageInfo_Request.Merge(dst, src) -} -func (m *Request) XXX_Size() int { - return xxx_messageInfo_Request.Size(m) -} -func (m *Request) XXX_DiscardUnknown() { - xxx_messageInfo_Request.DiscardUnknown(m) -} - -var xxx_messageInfo_Request proto.InternalMessageInfo - -func (m *Request) GetStart() int32 { - if m != nil { - return m.Start - } - return 0 -} - -func (m *Request) GetSize() int32 { - if m != nil { - return m.Size - } - return 0 -} - -func (m *Request) GetSort() string { - if m != nil { - return m.Sort - } - return "" -} - -func (m *Request) GetFreeText() string { - if m != nil { - return m.FreeText - } - return "" -} - -func (m *Request) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -func init() { - proto.RegisterType((*Request)(nil), "conductor.grpc.search.Request") -} - -func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_855a611014aa2143) } - -var fileDescriptor_search_855a611014aa2143 = []byte{ - // 212 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xc1, 0x4a, 0xc4, 0x30, - 0x10, 0x86, 0xa9, 0x6e, 0x75, 0x37, 0x37, 0x83, 0x42, 0xd0, 0xcb, 0xe2, 0x69, 0x4f, 0xc9, 0xc1, - 0x17, 0x90, 0x7d, 0x02, 0xa9, 0x9e, 0xbc, 0xc8, 0x36, 0x4e, 0xdb, 0x40, 0xdb, 0x69, 0x27, 0x13, - 0xa8, 0x7d, 0x7a, 0xe9, 0x54, 0xd4, 0xbd, 0xcd, 0x7c, 0xdf, 0x10, 0xfe, 0xfc, 0xea, 0xa6, 0xa6, - 0xc1, 0xbb, 0x08, 0x27, 0xf2, 0x8d, 0x1d, 0x08, 0x19, 0xf5, 0x9d, 0xc7, 0xfe, 0x33, 0x79, 0x46, - 0xb2, 0x8b, 0xb4, 0xab, 0x7c, 0x9c, 0xd5, 0x75, 0x01, 0x63, 0x82, 0xc8, 0xfa, 0x56, 0xe5, 0x91, - 0x4f, 0xc4, 0x26, 0xdb, 0x67, 0x87, 0xbc, 0x58, 0x17, 0xad, 0xd5, 0x26, 0x86, 0x19, 0xcc, 0x85, - 0x40, 0x99, 0x85, 0x21, 0xb1, 0xb9, 0xdc, 0x67, 0x87, 0x5d, 0x21, 0xb3, 0x7e, 0x50, 0xbb, 0x8a, - 0x00, 0x3e, 0x18, 0x26, 0x36, 0x1b, 0x11, 0xdb, 0x05, 0xbc, 0xc1, 0x24, 0x4f, 0x8f, 0x09, 0xe8, - 0xcb, 0xe4, 0x22, 0xd6, 0xe5, 0xd8, 0xa8, 0x7b, 0x8f, 0x9d, 0xed, 0x81, 0xab, 0x36, 0x4c, 0xf6, - 0x3c, 0xe0, 0x71, 0xfb, 0x2a, 0x09, 0x5f, 0xca, 0xf7, 0xe7, 0x3a, 0x70, 0x93, 0x4a, 0xeb, 0xb1, - 0x73, 0x3f, 0xc7, 0xee, 0xf7, 0xd8, 0xf9, 0x36, 0x40, 0xcf, 0xae, 0x46, 0xf9, 0xf3, 0x1f, 0xff, - 0x57, 0x41, 0x79, 0x25, 0x1d, 0x3c, 0x7d, 0x07, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x4d, 0x39, 0xe7, - 0x18, 0x01, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go deleted file mode 100644 index eabcf3ae1..000000000 --- a/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go +++ /dev/null @@ -1,1757 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/task_service.proto - -package tasks // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type PollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PollRequest) Reset() { *m = PollRequest{} } -func (m *PollRequest) String() string { return proto.CompactTextString(m) } -func (*PollRequest) ProtoMessage() {} -func (*PollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{0} -} -func (m *PollRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PollRequest.Unmarshal(m, b) -} -func (m *PollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PollRequest.Marshal(b, m, deterministic) -} -func (dst *PollRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PollRequest.Merge(dst, src) -} -func (m *PollRequest) XXX_Size() int { - return xxx_messageInfo_PollRequest.Size(m) -} -func (m *PollRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PollRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PollRequest proto.InternalMessageInfo - -func (m *PollRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *PollRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *PollRequest) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -type PollResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PollResponse) Reset() { *m = PollResponse{} } -func (m *PollResponse) String() string { return proto.CompactTextString(m) } -func (*PollResponse) ProtoMessage() {} -func (*PollResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{1} -} -func (m *PollResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PollResponse.Unmarshal(m, b) -} -func (m *PollResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PollResponse.Marshal(b, m, deterministic) -} -func (dst *PollResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_PollResponse.Merge(dst, src) -} -func (m *PollResponse) XXX_Size() int { - return xxx_messageInfo_PollResponse.Size(m) -} -func (m *PollResponse) XXX_DiscardUnknown() { - xxx_messageInfo_PollResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_PollResponse proto.InternalMessageInfo - -func (m *PollResponse) GetTask() *model.Task { - if m != nil { - return m.Task - } - return nil -} - -type BatchPollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` - Count int32 `protobuf:"varint,4,opt,name=count,proto3" json:"count,omitempty"` - Timeout int32 `protobuf:"varint,5,opt,name=timeout,proto3" json:"timeout,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *BatchPollRequest) Reset() { *m = BatchPollRequest{} } -func (m *BatchPollRequest) String() string { return proto.CompactTextString(m) } -func (*BatchPollRequest) ProtoMessage() {} -func (*BatchPollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{2} -} -func (m *BatchPollRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_BatchPollRequest.Unmarshal(m, b) -} -func (m *BatchPollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_BatchPollRequest.Marshal(b, m, deterministic) -} -func (dst *BatchPollRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_BatchPollRequest.Merge(dst, src) -} -func (m *BatchPollRequest) XXX_Size() int { - return xxx_messageInfo_BatchPollRequest.Size(m) -} -func (m *BatchPollRequest) XXX_DiscardUnknown() { - xxx_messageInfo_BatchPollRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_BatchPollRequest proto.InternalMessageInfo - -func (m *BatchPollRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *BatchPollRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *BatchPollRequest) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -func (m *BatchPollRequest) GetCount() int32 { - if m != nil { - return m.Count - } - return 0 -} - -func (m *BatchPollRequest) GetTimeout() int32 { - if m != nil { - return m.Timeout - } - return 0 -} - -type TasksInProgressRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - Count int32 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} } -func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } -func (*TasksInProgressRequest) ProtoMessage() {} -func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{3} -} -func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) -} -func (m *TasksInProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TasksInProgressRequest.Marshal(b, m, deterministic) -} -func (dst *TasksInProgressRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_TasksInProgressRequest.Merge(dst, src) -} -func (m *TasksInProgressRequest) XXX_Size() int { - return xxx_messageInfo_TasksInProgressRequest.Size(m) -} -func (m *TasksInProgressRequest) XXX_DiscardUnknown() { - xxx_messageInfo_TasksInProgressRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_TasksInProgressRequest proto.InternalMessageInfo - -func (m *TasksInProgressRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *TasksInProgressRequest) GetStartKey() string { - if m != nil { - return m.StartKey - } - return "" -} - -func (m *TasksInProgressRequest) GetCount() int32 { - if m != nil { - return m.Count - } - return 0 -} - -type TasksInProgressResponse struct { - Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse{} } -func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } -func (*TasksInProgressResponse) ProtoMessage() {} -func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{4} -} -func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) -} -func (m *TasksInProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TasksInProgressResponse.Marshal(b, m, deterministic) -} -func (dst *TasksInProgressResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_TasksInProgressResponse.Merge(dst, src) -} -func (m *TasksInProgressResponse) XXX_Size() int { - return xxx_messageInfo_TasksInProgressResponse.Size(m) -} -func (m *TasksInProgressResponse) XXX_DiscardUnknown() { - xxx_messageInfo_TasksInProgressResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_TasksInProgressResponse proto.InternalMessageInfo - -func (m *TasksInProgressResponse) GetTasks() []*model.Task { - if m != nil { - return m.Tasks - } - return nil -} - -type PendingTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } -func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } -func (*PendingTaskRequest) ProtoMessage() {} -func (*PendingTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{5} -} -func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) -} -func (m *PendingTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PendingTaskRequest.Marshal(b, m, deterministic) -} -func (dst *PendingTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PendingTaskRequest.Merge(dst, src) -} -func (m *PendingTaskRequest) XXX_Size() int { - return xxx_messageInfo_PendingTaskRequest.Size(m) -} -func (m *PendingTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PendingTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PendingTaskRequest proto.InternalMessageInfo - -func (m *PendingTaskRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *PendingTaskRequest) GetTaskRefName() string { - if m != nil { - return m.TaskRefName - } - return "" -} - -type PendingTaskResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PendingTaskResponse) Reset() { *m = PendingTaskResponse{} } -func (m *PendingTaskResponse) String() string { return proto.CompactTextString(m) } -func (*PendingTaskResponse) ProtoMessage() {} -func (*PendingTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{6} -} -func (m *PendingTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PendingTaskResponse.Unmarshal(m, b) -} -func (m *PendingTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PendingTaskResponse.Marshal(b, m, deterministic) -} -func (dst *PendingTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_PendingTaskResponse.Merge(dst, src) -} -func (m *PendingTaskResponse) XXX_Size() int { - return xxx_messageInfo_PendingTaskResponse.Size(m) -} -func (m *PendingTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_PendingTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_PendingTaskResponse proto.InternalMessageInfo - -func (m *PendingTaskResponse) GetTask() *model.Task { - if m != nil { - return m.Task - } - return nil -} - -type UpdateTaskRequest struct { - Result *model.TaskResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } -func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateTaskRequest) ProtoMessage() {} -func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{7} -} -func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) -} -func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) -} -func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) -} -func (m *UpdateTaskRequest) XXX_Size() int { - return xxx_messageInfo_UpdateTaskRequest.Size(m) -} -func (m *UpdateTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo - -func (m *UpdateTaskRequest) GetResult() *model.TaskResult { - if m != nil { - return m.Result - } - return nil -} - -type UpdateTaskResponse struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } -func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } -func (*UpdateTaskResponse) ProtoMessage() {} -func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{8} -} -func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) -} -func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) -} -func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) -} -func (m *UpdateTaskResponse) XXX_Size() int { - return xxx_messageInfo_UpdateTaskResponse.Size(m) -} -func (m *UpdateTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo - -func (m *UpdateTaskResponse) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type AckTaskRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } -func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } -func (*AckTaskRequest) ProtoMessage() {} -func (*AckTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{9} -} -func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) -} -func (m *AckTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AckTaskRequest.Marshal(b, m, deterministic) -} -func (dst *AckTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AckTaskRequest.Merge(dst, src) -} -func (m *AckTaskRequest) XXX_Size() int { - return xxx_messageInfo_AckTaskRequest.Size(m) -} -func (m *AckTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AckTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AckTaskRequest proto.InternalMessageInfo - -func (m *AckTaskRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *AckTaskRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -type AckTaskResponse struct { - Ack bool `protobuf:"varint,1,opt,name=ack,proto3" json:"ack,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } -func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } -func (*AckTaskResponse) ProtoMessage() {} -func (*AckTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{10} -} -func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) -} -func (m *AckTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AckTaskResponse.Marshal(b, m, deterministic) -} -func (dst *AckTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_AckTaskResponse.Merge(dst, src) -} -func (m *AckTaskResponse) XXX_Size() int { - return xxx_messageInfo_AckTaskResponse.Size(m) -} -func (m *AckTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_AckTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_AckTaskResponse proto.InternalMessageInfo - -func (m *AckTaskResponse) GetAck() bool { - if m != nil { - return m.Ack - } - return false -} - -type AddLogRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - Log string `protobuf:"bytes,2,opt,name=log,proto3" json:"log,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } -func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } -func (*AddLogRequest) ProtoMessage() {} -func (*AddLogRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{11} -} -func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) -} -func (m *AddLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddLogRequest.Marshal(b, m, deterministic) -} -func (dst *AddLogRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddLogRequest.Merge(dst, src) -} -func (m *AddLogRequest) XXX_Size() int { - return xxx_messageInfo_AddLogRequest.Size(m) -} -func (m *AddLogRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AddLogRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AddLogRequest proto.InternalMessageInfo - -func (m *AddLogRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *AddLogRequest) GetLog() string { - if m != nil { - return m.Log - } - return "" -} - -type AddLogResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddLogResponse) Reset() { *m = AddLogResponse{} } -func (m *AddLogResponse) String() string { return proto.CompactTextString(m) } -func (*AddLogResponse) ProtoMessage() {} -func (*AddLogResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{12} -} -func (m *AddLogResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddLogResponse.Unmarshal(m, b) -} -func (m *AddLogResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddLogResponse.Marshal(b, m, deterministic) -} -func (dst *AddLogResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddLogResponse.Merge(dst, src) -} -func (m *AddLogResponse) XXX_Size() int { - return xxx_messageInfo_AddLogResponse.Size(m) -} -func (m *AddLogResponse) XXX_DiscardUnknown() { - xxx_messageInfo_AddLogResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_AddLogResponse proto.InternalMessageInfo - -type GetTaskLogsRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskLogsRequest) Reset() { *m = GetTaskLogsRequest{} } -func (m *GetTaskLogsRequest) String() string { return proto.CompactTextString(m) } -func (*GetTaskLogsRequest) ProtoMessage() {} -func (*GetTaskLogsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{13} -} -func (m *GetTaskLogsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskLogsRequest.Unmarshal(m, b) -} -func (m *GetTaskLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskLogsRequest.Marshal(b, m, deterministic) -} -func (dst *GetTaskLogsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskLogsRequest.Merge(dst, src) -} -func (m *GetTaskLogsRequest) XXX_Size() int { - return xxx_messageInfo_GetTaskLogsRequest.Size(m) -} -func (m *GetTaskLogsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskLogsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskLogsRequest proto.InternalMessageInfo - -func (m *GetTaskLogsRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type GetTaskLogsResponse struct { - Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs,proto3" json:"logs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskLogsResponse) Reset() { *m = GetTaskLogsResponse{} } -func (m *GetTaskLogsResponse) String() string { return proto.CompactTextString(m) } -func (*GetTaskLogsResponse) ProtoMessage() {} -func (*GetTaskLogsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{14} -} -func (m *GetTaskLogsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskLogsResponse.Unmarshal(m, b) -} -func (m *GetTaskLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskLogsResponse.Marshal(b, m, deterministic) -} -func (dst *GetTaskLogsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskLogsResponse.Merge(dst, src) -} -func (m *GetTaskLogsResponse) XXX_Size() int { - return xxx_messageInfo_GetTaskLogsResponse.Size(m) -} -func (m *GetTaskLogsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskLogsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskLogsResponse proto.InternalMessageInfo - -func (m *GetTaskLogsResponse) GetLogs() []*model.TaskExecLog { - if m != nil { - return m.Logs - } - return nil -} - -type GetTaskRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } -func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } -func (*GetTaskRequest) ProtoMessage() {} -func (*GetTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{15} -} -func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) -} -func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) -} -func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskRequest.Merge(dst, src) -} -func (m *GetTaskRequest) XXX_Size() int { - return xxx_messageInfo_GetTaskRequest.Size(m) -} -func (m *GetTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo - -func (m *GetTaskRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type GetTaskResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } -func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } -func (*GetTaskResponse) ProtoMessage() {} -func (*GetTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{16} -} -func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) -} -func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) -} -func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskResponse.Merge(dst, src) -} -func (m *GetTaskResponse) XXX_Size() int { - return xxx_messageInfo_GetTaskResponse.Size(m) -} -func (m *GetTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo - -func (m *GetTaskResponse) GetTask() *model.Task { - if m != nil { - return m.Task - } - return nil -} - -type RemoveTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveTaskRequest) Reset() { *m = RemoveTaskRequest{} } -func (m *RemoveTaskRequest) String() string { return proto.CompactTextString(m) } -func (*RemoveTaskRequest) ProtoMessage() {} -func (*RemoveTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{17} -} -func (m *RemoveTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveTaskRequest.Unmarshal(m, b) -} -func (m *RemoveTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveTaskRequest.Marshal(b, m, deterministic) -} -func (dst *RemoveTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveTaskRequest.Merge(dst, src) -} -func (m *RemoveTaskRequest) XXX_Size() int { - return xxx_messageInfo_RemoveTaskRequest.Size(m) -} -func (m *RemoveTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveTaskRequest proto.InternalMessageInfo - -func (m *RemoveTaskRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *RemoveTaskRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type RemoveTaskResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveTaskResponse) Reset() { *m = RemoveTaskResponse{} } -func (m *RemoveTaskResponse) String() string { return proto.CompactTextString(m) } -func (*RemoveTaskResponse) ProtoMessage() {} -func (*RemoveTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{18} -} -func (m *RemoveTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveTaskResponse.Unmarshal(m, b) -} -func (m *RemoveTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveTaskResponse.Marshal(b, m, deterministic) -} -func (dst *RemoveTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveTaskResponse.Merge(dst, src) -} -func (m *RemoveTaskResponse) XXX_Size() int { - return xxx_messageInfo_RemoveTaskResponse.Size(m) -} -func (m *RemoveTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveTaskResponse proto.InternalMessageInfo - -type QueueSizesRequest struct { - TaskTypes []string `protobuf:"bytes,1,rep,name=task_types,json=taskTypes,proto3" json:"task_types,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueSizesRequest) Reset() { *m = QueueSizesRequest{} } -func (m *QueueSizesRequest) String() string { return proto.CompactTextString(m) } -func (*QueueSizesRequest) ProtoMessage() {} -func (*QueueSizesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{19} -} -func (m *QueueSizesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueSizesRequest.Unmarshal(m, b) -} -func (m *QueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueSizesRequest.Marshal(b, m, deterministic) -} -func (dst *QueueSizesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueSizesRequest.Merge(dst, src) -} -func (m *QueueSizesRequest) XXX_Size() int { - return xxx_messageInfo_QueueSizesRequest.Size(m) -} -func (m *QueueSizesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_QueueSizesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueSizesRequest proto.InternalMessageInfo - -func (m *QueueSizesRequest) GetTaskTypes() []string { - if m != nil { - return m.TaskTypes - } - return nil -} - -type QueueSizesResponse struct { - QueueForTask map[string]int32 `protobuf:"bytes,1,rep,name=queue_for_task,json=queueForTask,proto3" json:"queue_for_task,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueSizesResponse) Reset() { *m = QueueSizesResponse{} } -func (m *QueueSizesResponse) String() string { return proto.CompactTextString(m) } -func (*QueueSizesResponse) ProtoMessage() {} -func (*QueueSizesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{20} -} -func (m *QueueSizesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueSizesResponse.Unmarshal(m, b) -} -func (m *QueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueSizesResponse.Marshal(b, m, deterministic) -} -func (dst *QueueSizesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueSizesResponse.Merge(dst, src) -} -func (m *QueueSizesResponse) XXX_Size() int { - return xxx_messageInfo_QueueSizesResponse.Size(m) -} -func (m *QueueSizesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_QueueSizesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueSizesResponse proto.InternalMessageInfo - -func (m *QueueSizesResponse) GetQueueForTask() map[string]int32 { - if m != nil { - return m.QueueForTask - } - return nil -} - -type QueueInfoRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueInfoRequest) Reset() { *m = QueueInfoRequest{} } -func (m *QueueInfoRequest) String() string { return proto.CompactTextString(m) } -func (*QueueInfoRequest) ProtoMessage() {} -func (*QueueInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{21} -} -func (m *QueueInfoRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueInfoRequest.Unmarshal(m, b) -} -func (m *QueueInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueInfoRequest.Marshal(b, m, deterministic) -} -func (dst *QueueInfoRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueInfoRequest.Merge(dst, src) -} -func (m *QueueInfoRequest) XXX_Size() int { - return xxx_messageInfo_QueueInfoRequest.Size(m) -} -func (m *QueueInfoRequest) XXX_DiscardUnknown() { - xxx_messageInfo_QueueInfoRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueInfoRequest proto.InternalMessageInfo - -type QueueInfoResponse struct { - Queues map[string]int64 `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueInfoResponse) Reset() { *m = QueueInfoResponse{} } -func (m *QueueInfoResponse) String() string { return proto.CompactTextString(m) } -func (*QueueInfoResponse) ProtoMessage() {} -func (*QueueInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{22} -} -func (m *QueueInfoResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueInfoResponse.Unmarshal(m, b) -} -func (m *QueueInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueInfoResponse.Marshal(b, m, deterministic) -} -func (dst *QueueInfoResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueInfoResponse.Merge(dst, src) -} -func (m *QueueInfoResponse) XXX_Size() int { - return xxx_messageInfo_QueueInfoResponse.Size(m) -} -func (m *QueueInfoResponse) XXX_DiscardUnknown() { - xxx_messageInfo_QueueInfoResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueInfoResponse proto.InternalMessageInfo - -func (m *QueueInfoResponse) GetQueues() map[string]int64 { - if m != nil { - return m.Queues - } - return nil -} - -type QueueAllInfoRequest struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueAllInfoRequest) Reset() { *m = QueueAllInfoRequest{} } -func (m *QueueAllInfoRequest) String() string { return proto.CompactTextString(m) } -func (*QueueAllInfoRequest) ProtoMessage() {} -func (*QueueAllInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{23} -} -func (m *QueueAllInfoRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueAllInfoRequest.Unmarshal(m, b) -} -func (m *QueueAllInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueAllInfoRequest.Marshal(b, m, deterministic) -} -func (dst *QueueAllInfoRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueAllInfoRequest.Merge(dst, src) -} -func (m *QueueAllInfoRequest) XXX_Size() int { - return xxx_messageInfo_QueueAllInfoRequest.Size(m) -} -func (m *QueueAllInfoRequest) XXX_DiscardUnknown() { - xxx_messageInfo_QueueAllInfoRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueAllInfoRequest proto.InternalMessageInfo - -type QueueAllInfoResponse struct { - Queues map[string]*QueueAllInfoResponse_QueueInfo `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueAllInfoResponse) Reset() { *m = QueueAllInfoResponse{} } -func (m *QueueAllInfoResponse) String() string { return proto.CompactTextString(m) } -func (*QueueAllInfoResponse) ProtoMessage() {} -func (*QueueAllInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24} -} -func (m *QueueAllInfoResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueAllInfoResponse.Unmarshal(m, b) -} -func (m *QueueAllInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueAllInfoResponse.Marshal(b, m, deterministic) -} -func (dst *QueueAllInfoResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueAllInfoResponse.Merge(dst, src) -} -func (m *QueueAllInfoResponse) XXX_Size() int { - return xxx_messageInfo_QueueAllInfoResponse.Size(m) -} -func (m *QueueAllInfoResponse) XXX_DiscardUnknown() { - xxx_messageInfo_QueueAllInfoResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueAllInfoResponse proto.InternalMessageInfo - -func (m *QueueAllInfoResponse) GetQueues() map[string]*QueueAllInfoResponse_QueueInfo { - if m != nil { - return m.Queues - } - return nil -} - -type QueueAllInfoResponse_ShardInfo struct { - Size int64 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` - Uacked int64 `protobuf:"varint,2,opt,name=uacked,proto3" json:"uacked,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueAllInfoResponse_ShardInfo) Reset() { *m = QueueAllInfoResponse_ShardInfo{} } -func (m *QueueAllInfoResponse_ShardInfo) String() string { return proto.CompactTextString(m) } -func (*QueueAllInfoResponse_ShardInfo) ProtoMessage() {} -func (*QueueAllInfoResponse_ShardInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 0} -} -func (m *QueueAllInfoResponse_ShardInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Unmarshal(m, b) -} -func (m *QueueAllInfoResponse_ShardInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Marshal(b, m, deterministic) -} -func (dst *QueueAllInfoResponse_ShardInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Merge(dst, src) -} -func (m *QueueAllInfoResponse_ShardInfo) XXX_Size() int { - return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Size(m) -} -func (m *QueueAllInfoResponse_ShardInfo) XXX_DiscardUnknown() { - xxx_messageInfo_QueueAllInfoResponse_ShardInfo.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueAllInfoResponse_ShardInfo proto.InternalMessageInfo - -func (m *QueueAllInfoResponse_ShardInfo) GetSize() int64 { - if m != nil { - return m.Size - } - return 0 -} - -func (m *QueueAllInfoResponse_ShardInfo) GetUacked() int64 { - if m != nil { - return m.Uacked - } - return 0 -} - -type QueueAllInfoResponse_QueueInfo struct { - Shards map[string]*QueueAllInfoResponse_ShardInfo `protobuf:"bytes,1,rep,name=shards,proto3" json:"shards,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueueAllInfoResponse_QueueInfo) Reset() { *m = QueueAllInfoResponse_QueueInfo{} } -func (m *QueueAllInfoResponse_QueueInfo) String() string { return proto.CompactTextString(m) } -func (*QueueAllInfoResponse_QueueInfo) ProtoMessage() {} -func (*QueueAllInfoResponse_QueueInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 1} -} -func (m *QueueAllInfoResponse_QueueInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Unmarshal(m, b) -} -func (m *QueueAllInfoResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Marshal(b, m, deterministic) -} -func (dst *QueueAllInfoResponse_QueueInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Merge(dst, src) -} -func (m *QueueAllInfoResponse_QueueInfo) XXX_Size() int { - return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Size(m) -} -func (m *QueueAllInfoResponse_QueueInfo) XXX_DiscardUnknown() { - xxx_messageInfo_QueueAllInfoResponse_QueueInfo.DiscardUnknown(m) -} - -var xxx_messageInfo_QueueAllInfoResponse_QueueInfo proto.InternalMessageInfo - -func (m *QueueAllInfoResponse_QueueInfo) GetShards() map[string]*QueueAllInfoResponse_ShardInfo { - if m != nil { - return m.Shards - } - return nil -} - -func init() { - proto.RegisterType((*PollRequest)(nil), "conductor.grpc.tasks.PollRequest") - proto.RegisterType((*PollResponse)(nil), "conductor.grpc.tasks.PollResponse") - proto.RegisterType((*BatchPollRequest)(nil), "conductor.grpc.tasks.BatchPollRequest") - proto.RegisterType((*TasksInProgressRequest)(nil), "conductor.grpc.tasks.TasksInProgressRequest") - proto.RegisterType((*TasksInProgressResponse)(nil), "conductor.grpc.tasks.TasksInProgressResponse") - proto.RegisterType((*PendingTaskRequest)(nil), "conductor.grpc.tasks.PendingTaskRequest") - proto.RegisterType((*PendingTaskResponse)(nil), "conductor.grpc.tasks.PendingTaskResponse") - proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.tasks.UpdateTaskRequest") - proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.tasks.UpdateTaskResponse") - proto.RegisterType((*AckTaskRequest)(nil), "conductor.grpc.tasks.AckTaskRequest") - proto.RegisterType((*AckTaskResponse)(nil), "conductor.grpc.tasks.AckTaskResponse") - proto.RegisterType((*AddLogRequest)(nil), "conductor.grpc.tasks.AddLogRequest") - proto.RegisterType((*AddLogResponse)(nil), "conductor.grpc.tasks.AddLogResponse") - proto.RegisterType((*GetTaskLogsRequest)(nil), "conductor.grpc.tasks.GetTaskLogsRequest") - proto.RegisterType((*GetTaskLogsResponse)(nil), "conductor.grpc.tasks.GetTaskLogsResponse") - proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.tasks.GetTaskRequest") - proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.tasks.GetTaskResponse") - proto.RegisterType((*RemoveTaskRequest)(nil), "conductor.grpc.tasks.RemoveTaskRequest") - proto.RegisterType((*RemoveTaskResponse)(nil), "conductor.grpc.tasks.RemoveTaskResponse") - proto.RegisterType((*QueueSizesRequest)(nil), "conductor.grpc.tasks.QueueSizesRequest") - proto.RegisterType((*QueueSizesResponse)(nil), "conductor.grpc.tasks.QueueSizesResponse") - proto.RegisterMapType((map[string]int32)(nil), "conductor.grpc.tasks.QueueSizesResponse.QueueForTaskEntry") - proto.RegisterType((*QueueInfoRequest)(nil), "conductor.grpc.tasks.QueueInfoRequest") - proto.RegisterType((*QueueInfoResponse)(nil), "conductor.grpc.tasks.QueueInfoResponse") - proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.tasks.QueueInfoResponse.QueuesEntry") - proto.RegisterType((*QueueAllInfoRequest)(nil), "conductor.grpc.tasks.QueueAllInfoRequest") - proto.RegisterType((*QueueAllInfoResponse)(nil), "conductor.grpc.tasks.QueueAllInfoResponse") - proto.RegisterMapType((map[string]*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueuesEntry") - proto.RegisterType((*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.ShardInfo") - proto.RegisterType((*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo") - proto.RegisterMapType((map[string]*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo.ShardsEntry") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// TaskServiceClient is the client API for TaskService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type TaskServiceClient interface { - // GET /poll/{tasktype} - Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) - // /poll/batch/{tasktype} - BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) - // GET /in_progress/{tasktype} - GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) - // GET /in_progress/{workflowId}/{taskRefName} - GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) - // POST / - UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) - // POST /{taskId}/ack - AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) - // POST /{taskId}/log - AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) - // GET {taskId}/log - GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) - // GET /{taskId} - GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) - // DELETE /queue/{taskType}/{taskId} - RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) - // GET /queue/sizes - GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) - // GET /queue/all - GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) - // GET /queue/all/verbose - GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) -} - -type taskServiceClient struct { - cc *grpc.ClientConn -} - -func NewTaskServiceClient(cc *grpc.ClientConn) TaskServiceClient { - return &taskServiceClient{cc} -} - -func (c *taskServiceClient) Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) { - out := new(PollResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/Poll", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) { - stream, err := c.cc.NewStream(ctx, &_TaskService_serviceDesc.Streams[0], "/conductor.grpc.tasks.TaskService/BatchPoll", opts...) - if err != nil { - return nil, err - } - x := &taskServiceBatchPollClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type TaskService_BatchPollClient interface { - Recv() (*model.Task, error) - grpc.ClientStream -} - -type taskServiceBatchPollClient struct { - grpc.ClientStream -} - -func (x *taskServiceBatchPollClient) Recv() (*model.Task, error) { - m := new(model.Task) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *taskServiceClient) GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) { - out := new(TasksInProgressResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTasksInProgress", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) { - out := new(PendingTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { - out := new(UpdateTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/UpdateTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) { - out := new(AckTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AckTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) { - out := new(AddLogResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AddLog", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) { - out := new(GetTaskLogsResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTaskLogs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { - out := new(GetTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) { - out := new(RemoveTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) { - out := new(QueueSizesResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) { - out := new(QueueInfoResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueInfo", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) { - out := new(QueueAllInfoResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TaskServiceServer is the server API for TaskService service. -type TaskServiceServer interface { - // GET /poll/{tasktype} - Poll(context.Context, *PollRequest) (*PollResponse, error) - // /poll/batch/{tasktype} - BatchPoll(*BatchPollRequest, TaskService_BatchPollServer) error - // GET /in_progress/{tasktype} - GetTasksInProgress(context.Context, *TasksInProgressRequest) (*TasksInProgressResponse, error) - // GET /in_progress/{workflowId}/{taskRefName} - GetPendingTaskForWorkflow(context.Context, *PendingTaskRequest) (*PendingTaskResponse, error) - // POST / - UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) - // POST /{taskId}/ack - AckTask(context.Context, *AckTaskRequest) (*AckTaskResponse, error) - // POST /{taskId}/log - AddLog(context.Context, *AddLogRequest) (*AddLogResponse, error) - // GET {taskId}/log - GetTaskLogs(context.Context, *GetTaskLogsRequest) (*GetTaskLogsResponse, error) - // GET /{taskId} - GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) - // DELETE /queue/{taskType}/{taskId} - RemoveTaskFromQueue(context.Context, *RemoveTaskRequest) (*RemoveTaskResponse, error) - // GET /queue/sizes - GetQueueSizesForTasks(context.Context, *QueueSizesRequest) (*QueueSizesResponse, error) - // GET /queue/all - GetQueueInfo(context.Context, *QueueInfoRequest) (*QueueInfoResponse, error) - // GET /queue/all/verbose - GetQueueAllInfo(context.Context, *QueueAllInfoRequest) (*QueueAllInfoResponse, error) -} - -func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { - s.RegisterService(&_TaskService_serviceDesc, srv) -} - -func _TaskService_Poll_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PollRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).Poll(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/Poll", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).Poll(ctx, req.(*PollRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_BatchPoll_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(BatchPollRequest) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(TaskServiceServer).BatchPoll(m, &taskServiceBatchPollServer{stream}) -} - -type TaskService_BatchPollServer interface { - Send(*model.Task) error - grpc.ServerStream -} - -type taskServiceBatchPollServer struct { - grpc.ServerStream -} - -func (x *taskServiceBatchPollServer) Send(m *model.Task) error { - return x.ServerStream.SendMsg(m) -} - -func _TaskService_GetTasksInProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TasksInProgressRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetTasksInProgress(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetTasksInProgress", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetTasksInProgress(ctx, req.(*TasksInProgressRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetPendingTaskForWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PendingTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, req.(*PendingTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).UpdateTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/UpdateTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_AckTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AckTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).AckTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/AckTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).AckTask(ctx, req.(*AckTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_AddLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddLogRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).AddLog(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/AddLog", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).AddLog(ctx, req.(*AddLogRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetTaskLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTaskLogsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetTaskLogs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetTaskLogs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetTaskLogs(ctx, req.(*GetTaskLogsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetTask(ctx, req.(*GetTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_RemoveTaskFromQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RemoveTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, req.(*RemoveTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetQueueSizesForTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(QueueSizesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, req.(*QueueSizesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetQueueInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(QueueInfoRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetQueueInfo(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueInfo", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetQueueInfo(ctx, req.(*QueueInfoRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetQueueAllInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(QueueAllInfoRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetQueueAllInfo(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetQueueAllInfo(ctx, req.(*QueueAllInfoRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _TaskService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "conductor.grpc.tasks.TaskService", - HandlerType: (*TaskServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Poll", - Handler: _TaskService_Poll_Handler, - }, - { - MethodName: "GetTasksInProgress", - Handler: _TaskService_GetTasksInProgress_Handler, - }, - { - MethodName: "GetPendingTaskForWorkflow", - Handler: _TaskService_GetPendingTaskForWorkflow_Handler, - }, - { - MethodName: "UpdateTask", - Handler: _TaskService_UpdateTask_Handler, - }, - { - MethodName: "AckTask", - Handler: _TaskService_AckTask_Handler, - }, - { - MethodName: "AddLog", - Handler: _TaskService_AddLog_Handler, - }, - { - MethodName: "GetTaskLogs", - Handler: _TaskService_GetTaskLogs_Handler, - }, - { - MethodName: "GetTask", - Handler: _TaskService_GetTask_Handler, - }, - { - MethodName: "RemoveTaskFromQueue", - Handler: _TaskService_RemoveTaskFromQueue_Handler, - }, - { - MethodName: "GetQueueSizesForTasks", - Handler: _TaskService_GetQueueSizesForTasks_Handler, - }, - { - MethodName: "GetQueueInfo", - Handler: _TaskService_GetQueueInfo_Handler, - }, - { - MethodName: "GetQueueAllInfo", - Handler: _TaskService_GetQueueAllInfo_Handler, - }, - }, - Streams: []grpc.StreamDesc{ - { - StreamName: "BatchPoll", - Handler: _TaskService_BatchPoll_Handler, - ServerStreams: true, - }, - }, - Metadata: "grpc/task_service.proto", -} - -func init() { - proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_2cd893b942ad08bb) -} - -var fileDescriptor_task_service_2cd893b942ad08bb = []byte{ - // 1114 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xdd, 0x72, 0xdb, 0x54, - 0x10, 0x1e, 0xc5, 0x89, 0x53, 0xaf, 0x93, 0xd4, 0x39, 0xf9, 0x33, 0x2a, 0x0c, 0x41, 0x2d, 0x6d, - 0x02, 0x54, 0xe9, 0x24, 0x0c, 0xd0, 0x0c, 0x33, 0x6d, 0x32, 0x43, 0x82, 0x69, 0xe8, 0x04, 0xa5, - 0x94, 0x9f, 0x1b, 0xa3, 0x48, 0xc7, 0xb2, 0xb0, 0xac, 0xe3, 0x48, 0x47, 0x69, 0xd2, 0xe7, 0xe0, - 0x8e, 0x07, 0xe1, 0x4d, 0x78, 0x07, 0x5e, 0x81, 0x2b, 0xe6, 0xfc, 0x48, 0x3a, 0x8e, 0x25, 0xdb, - 0x19, 0xa6, 0x77, 0x3a, 0xab, 0xdd, 0xfd, 0xbe, 0x5d, 0xad, 0xf7, 0x3b, 0x86, 0x0d, 0x2f, 0x1a, - 0x38, 0x3b, 0xd4, 0x8e, 0x7b, 0xed, 0x18, 0x47, 0x97, 0xbe, 0x83, 0xcd, 0x41, 0x44, 0x28, 0x41, - 0xab, 0x0e, 0x09, 0xdd, 0xc4, 0xa1, 0x24, 0x32, 0x99, 0x8b, 0xc9, 0x5c, 0x62, 0x7d, 0xa3, 0x4f, - 0x5c, 0x1c, 0x70, 0x7f, 0x7c, 0x85, 0x9d, 0x80, 0x78, 0xc2, 0x5d, 0x5f, 0xcf, 0x5f, 0x44, 0x38, - 0x4e, 0x02, 0x2a, 0xed, 0x8d, 0xdc, 0x2e, 0x2c, 0x46, 0x1b, 0xea, 0xa7, 0x24, 0x08, 0x2c, 0x7c, - 0x91, 0xe0, 0x98, 0xa2, 0x7b, 0x50, 0xe3, 0xe8, 0xf4, 0x7a, 0x80, 0x9b, 0xda, 0xa6, 0xb6, 0x55, - 0xb3, 0xee, 0x30, 0xc3, 0xab, 0xeb, 0x01, 0x66, 0x2f, 0xdf, 0x90, 0xa8, 0x87, 0xa3, 0xb6, 0xef, - 0x36, 0x67, 0xc4, 0x4b, 0x61, 0x68, 0xb9, 0x68, 0x1d, 0xaa, 0x2e, 0xe9, 0xdb, 0x7e, 0xd8, 0xac, - 0xf0, 0x37, 0xf2, 0x64, 0x3c, 0x85, 0x05, 0x01, 0x10, 0x0f, 0x48, 0x18, 0x63, 0xb4, 0x0d, 0xb3, - 0x2c, 0x21, 0x4f, 0x5e, 0xdf, 0x5d, 0x33, 0xf3, 0xc2, 0x38, 0x21, 0xf3, 0x95, 0x1d, 0xf7, 0x2c, - 0xee, 0x62, 0xfc, 0xa1, 0x41, 0xe3, 0xd0, 0xa6, 0x4e, 0xf7, 0x9d, 0x32, 0x44, 0xab, 0x30, 0xe7, - 0x90, 0x24, 0xa4, 0xcd, 0xd9, 0x4d, 0x6d, 0x6b, 0xce, 0x12, 0x07, 0xd4, 0x84, 0x79, 0xea, 0xf7, - 0x31, 0x49, 0x68, 0x73, 0x8e, 0xdb, 0xd3, 0xa3, 0xd1, 0x85, 0x75, 0x46, 0x32, 0x6e, 0x85, 0xa7, - 0x11, 0xf1, 0x22, 0x1c, 0xc7, 0xd3, 0x72, 0x8b, 0xa9, 0x1d, 0xd1, 0x76, 0x0f, 0x5f, 0xa7, 0xdc, - 0xb8, 0xe1, 0x05, 0xbe, 0xce, 0x39, 0x54, 0x14, 0x0e, 0xc6, 0x11, 0x6c, 0x8c, 0x20, 0xc9, 0x36, - 0x7e, 0x0a, 0x73, 0x7c, 0x06, 0x9a, 0xda, 0x66, 0xa5, 0xbc, 0x8f, 0xc2, 0xc7, 0xf8, 0x05, 0xd0, - 0x29, 0x0e, 0x5d, 0x3f, 0xf4, 0xb8, 0x55, 0xb2, 0xfd, 0x10, 0xea, 0xac, 0x37, 0x9d, 0x80, 0xbc, - 0x61, 0xed, 0x12, 0x7c, 0x21, 0x35, 0xb5, 0x5c, 0x64, 0xc0, 0x22, 0x2f, 0x27, 0xc2, 0x9d, 0x76, - 0x68, 0xf7, 0xb1, 0x64, 0x5d, 0xa7, 0x3c, 0x49, 0xe7, 0xa5, 0xdd, 0xc7, 0xc6, 0x73, 0x58, 0x19, - 0x4a, 0x7d, 0xfb, 0xaf, 0xfc, 0x2d, 0x2c, 0xff, 0x38, 0x70, 0x6d, 0x8a, 0x55, 0x6e, 0x7b, 0x50, - 0x15, 0x83, 0x2b, 0x33, 0xdc, 0x2b, 0xce, 0xc0, 0x5d, 0x2c, 0xe9, 0x6a, 0x3c, 0x06, 0xa4, 0x66, - 0x92, 0x54, 0x36, 0x60, 0x9e, 0x57, 0x91, 0x95, 0x58, 0x65, 0xc7, 0x96, 0x6b, 0x1c, 0xc1, 0xd2, - 0x81, 0xd3, 0x53, 0x51, 0xcb, 0x5c, 0xc7, 0xce, 0x95, 0x71, 0x1f, 0xee, 0x66, 0x79, 0x24, 0x66, - 0x03, 0x2a, 0xb6, 0x23, 0xaa, 0xbf, 0x63, 0xb1, 0x47, 0x63, 0x1f, 0x16, 0x0f, 0x5c, 0xf7, 0x84, - 0x78, 0x13, 0xb1, 0x1a, 0x50, 0x09, 0x88, 0x27, 0x51, 0xd8, 0xa3, 0xd1, 0x80, 0xa5, 0x34, 0x56, - 0xe4, 0x67, 0x95, 0x1e, 0x63, 0xca, 0x20, 0x4f, 0x88, 0x17, 0x4f, 0x4a, 0x69, 0x1c, 0xc3, 0xca, - 0x90, 0xbb, 0x64, 0xf9, 0x04, 0x66, 0x03, 0xe2, 0xa5, 0x23, 0xf4, 0x7e, 0x61, 0x8b, 0xbf, 0xb9, - 0xc2, 0x0e, 0x43, 0xe6, 0x9e, 0xc6, 0x36, 0x2c, 0xc9, 0x44, 0x13, 0x31, 0xbf, 0x86, 0xbb, 0x99, - 0xeb, 0xed, 0x87, 0xa2, 0x05, 0xcb, 0x16, 0xee, 0x93, 0xcb, 0xa1, 0xa1, 0x18, 0xfb, 0xf3, 0x52, - 0x88, 0xcc, 0x0c, 0x11, 0x59, 0x05, 0xa4, 0xa6, 0x92, 0x1d, 0xdc, 0x85, 0xe5, 0x1f, 0x12, 0x9c, - 0xe0, 0x33, 0xff, 0x2d, 0xce, 0x1a, 0xf8, 0x01, 0x40, 0x06, 0x20, 0xda, 0x52, 0xb3, 0x6a, 0x29, - 0x42, 0x6c, 0xfc, 0xa5, 0x01, 0x52, 0x83, 0x64, 0x59, 0xbf, 0xc1, 0xd2, 0x05, 0xb3, 0xb6, 0x3b, - 0x24, 0x6a, 0xcb, 0x02, 0x59, 0x43, 0xf7, 0xcd, 0xa2, 0xa5, 0x6d, 0x8e, 0x66, 0x10, 0xa6, 0x23, - 0x12, 0xf1, 0x86, 0x87, 0x34, 0xba, 0xb6, 0x16, 0x2e, 0x14, 0x93, 0xfe, 0x4c, 0x92, 0x55, 0x5d, - 0xd8, 0x9c, 0xb0, 0x4d, 0x22, 0xfa, 0xc0, 0x1e, 0xd9, 0x12, 0xb9, 0xb4, 0x83, 0x44, 0xfc, 0x4e, - 0xe7, 0x2c, 0x71, 0xd8, 0x9f, 0xf9, 0x4a, 0x33, 0x10, 0x34, 0x78, 0x82, 0x56, 0xd8, 0x21, 0xb2, - 0x58, 0xe3, 0x4f, 0x4d, 0x66, 0x15, 0x46, 0x59, 0xcc, 0x0b, 0xa8, 0x72, 0xe8, 0x74, 0x2a, 0xf6, - 0xc6, 0x14, 0xa1, 0x06, 0x0a, 0x4b, 0x2c, 0xd8, 0xcb, 0x14, 0xfa, 0x53, 0xa8, 0x2b, 0xe6, 0x49, - 0x8c, 0x2b, 0x2a, 0xe3, 0x35, 0x58, 0xe1, 0xa1, 0x07, 0x41, 0xa0, 0x92, 0xfe, 0xa7, 0x02, 0xab, - 0xc3, 0x76, 0xc9, 0xfb, 0xe5, 0x0d, 0xde, 0x5f, 0x8c, 0xe1, 0x7d, 0x23, 0xb6, 0x90, 0xfa, 0x97, - 0x50, 0x3b, 0xeb, 0xda, 0x91, 0xcb, 0x1c, 0x11, 0x82, 0xd9, 0xd8, 0x7f, 0x2b, 0x66, 0xae, 0x62, - 0xf1, 0x67, 0xa6, 0x26, 0x89, 0xed, 0xf4, 0xb0, 0x2b, 0xb9, 0xcb, 0x93, 0xfe, 0xb7, 0x06, 0xb5, - 0xac, 0x3b, 0xe8, 0x67, 0xa8, 0xc6, 0x2c, 0x4d, 0x4a, 0xeb, 0xf9, 0x6d, 0x69, 0x31, 0x8b, 0xc9, - 0x99, 0xa4, 0x04, 0x45, 0x3e, 0x9d, 0x40, 0x5d, 0x31, 0x17, 0xf4, 0xf6, 0x3b, 0xb5, 0xb7, 0xf5, - 0xdd, 0xcf, 0x6f, 0x81, 0x9c, 0x55, 0xae, 0x7c, 0x11, 0x06, 0x38, 0xfe, 0x63, 0xfe, 0x0f, 0xc0, - 0x7c, 0x9c, 0x72, 0xc0, 0xdd, 0x7f, 0x6b, 0x50, 0x67, 0xe3, 0x7e, 0x26, 0x6e, 0x42, 0xe8, 0x7b, - 0x98, 0x65, 0x17, 0x01, 0xf4, 0x51, 0x71, 0x62, 0xe5, 0x92, 0xa0, 0x1b, 0xe3, 0x5c, 0xe4, 0xc4, - 0x9c, 0x40, 0x2d, 0xbb, 0x5c, 0xa0, 0x87, 0xc5, 0x01, 0x37, 0x6f, 0x1f, 0x7a, 0xf1, 0xd2, 0x7a, - 0xa2, 0xa1, 0x8b, 0x6c, 0x23, 0x2b, 0x6a, 0x8d, 0x3e, 0x2b, 0x4e, 0x5b, 0x7c, 0x7d, 0xd0, 0x1f, - 0x4f, 0xe9, 0x2d, 0x0b, 0x18, 0xc0, 0x7b, 0xc7, 0x98, 0x2a, 0xea, 0x7b, 0x44, 0xa2, 0x9f, 0xa4, - 0x7a, 0xa3, 0xad, 0x92, 0x0e, 0x8c, 0x5c, 0x03, 0xf4, 0xed, 0x29, 0x3c, 0x25, 0x62, 0x1b, 0x20, - 0x17, 0x58, 0xf4, 0xa8, 0x38, 0x70, 0x44, 0xcc, 0xf5, 0xad, 0xc9, 0x8e, 0x12, 0xe0, 0x35, 0xcc, - 0x4b, 0x29, 0x45, 0x0f, 0x8a, 0x83, 0x86, 0x15, 0x5b, 0xff, 0x78, 0x82, 0x97, 0xcc, 0x7b, 0x06, - 0x55, 0xa1, 0xa0, 0xe8, 0x7e, 0x49, 0x80, 0xaa, 0xcd, 0xfa, 0x83, 0xf1, 0x4e, 0x32, 0xe9, 0x39, - 0xd4, 0x15, 0x55, 0x2d, 0xeb, 0xf8, 0xa8, 0x4e, 0x97, 0x75, 0xbc, 0x48, 0xa2, 0x5f, 0xc3, 0xbc, - 0x34, 0x97, 0x35, 0x64, 0x58, 0x8f, 0xcb, 0x1a, 0x72, 0x53, 0x8a, 0xbb, 0xb0, 0x92, 0x8b, 0xe2, - 0x51, 0x44, 0xfa, 0xfc, 0x17, 0x58, 0xf6, 0x49, 0x47, 0xa4, 0xb8, 0xec, 0x93, 0x8e, 0x0a, 0x2d, - 0xfa, 0x1d, 0xd6, 0x8e, 0x31, 0xcd, 0x45, 0x4f, 0x6a, 0x58, 0x5c, 0x86, 0x35, 0xa2, 0xca, 0x65, - 0x58, 0x05, 0x4a, 0xdc, 0x86, 0x85, 0x14, 0x8b, 0x6f, 0xdf, 0x87, 0x13, 0xc5, 0x4b, 0x20, 0x3c, - 0x9a, 0x52, 0xe4, 0x50, 0x97, 0x5f, 0x6a, 0xd4, 0x15, 0x86, 0xb6, 0xa7, 0x59, 0x73, 0x02, 0xe6, - 0x93, 0xe9, 0x37, 0xe2, 0x61, 0x00, 0xba, 0x43, 0xfa, 0x66, 0x88, 0x69, 0x27, 0xf0, 0xaf, 0x6e, - 0x04, 0x1e, 0x2e, 0x2a, 0x7b, 0xf1, 0xf4, 0xfc, 0xd7, 0x67, 0x9e, 0x4f, 0xbb, 0xc9, 0xb9, 0xe9, - 0x90, 0xfe, 0x8e, 0x8c, 0xd8, 0xc9, 0x22, 0x76, 0x9c, 0xc0, 0xc7, 0x21, 0xdd, 0xf1, 0x08, 0xff, - 0x73, 0x99, 0xdb, 0xb3, 0xff, 0x9a, 0xf1, 0x79, 0x95, 0xef, 0xb2, 0xbd, 0xff, 0x02, 0x00, 0x00, - 0xff, 0xff, 0xc5, 0xda, 0xa9, 0x5e, 0x80, 0x0e, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go deleted file mode 100644 index e73a12be9..000000000 --- a/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go +++ /dev/null @@ -1,1822 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/workflow_service.proto - -package workflows // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import search "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type StartWorkflowResponse struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StartWorkflowResponse) Reset() { *m = StartWorkflowResponse{} } -func (m *StartWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*StartWorkflowResponse) ProtoMessage() {} -func (*StartWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{0} -} -func (m *StartWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StartWorkflowResponse.Unmarshal(m, b) -} -func (m *StartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StartWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *StartWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_StartWorkflowResponse.Merge(dst, src) -} -func (m *StartWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_StartWorkflowResponse.Size(m) -} -func (m *StartWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_StartWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_StartWorkflowResponse proto.InternalMessageInfo - -func (m *StartWorkflowResponse) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type GetWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed,proto3" json:"include_closed,omitempty"` - IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } -func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsRequest) ProtoMessage() {} -func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{1} -} -func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) -} -func (m *GetWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsRequest.Merge(dst, src) -} -func (m *GetWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsRequest.Size(m) -} -func (m *GetWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsRequest proto.InternalMessageInfo - -func (m *GetWorkflowsRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetWorkflowsRequest) GetCorrelationId() []string { - if m != nil { - return m.CorrelationId - } - return nil -} - -func (m *GetWorkflowsRequest) GetIncludeClosed() bool { - if m != nil { - return m.IncludeClosed - } - return false -} - -func (m *GetWorkflowsRequest) GetIncludeTasks() bool { - if m != nil { - return m.IncludeTasks - } - return false -} - -type GetWorkflowsResponse struct { - WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById,proto3" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } -func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsResponse) ProtoMessage() {} -func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2} -} -func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) -} -func (m *GetWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsResponse.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsResponse.Merge(dst, src) -} -func (m *GetWorkflowsResponse) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsResponse.Size(m) -} -func (m *GetWorkflowsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsResponse proto.InternalMessageInfo - -func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsResponse_Workflows { - if m != nil { - return m.WorkflowsById - } - return nil -} - -type GetWorkflowsResponse_Workflows struct { - Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows,proto3" json:"workflows,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResponse_Workflows{} } -func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} -func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2, 0} -} -func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) -} -func (m *GetWorkflowsResponse_Workflows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsResponse_Workflows) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsResponse_Workflows.Merge(dst, src) -} -func (m *GetWorkflowsResponse_Workflows) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Size(m) -} -func (m *GetWorkflowsResponse_Workflows) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsResponse_Workflows.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsResponse_Workflows proto.InternalMessageInfo - -func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { - if m != nil { - return m.Workflows - } - return nil -} - -type GetWorkflowStatusRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusRequest{} } -func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowStatusRequest) ProtoMessage() {} -func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{3} -} -func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) -} -func (m *GetWorkflowStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowStatusRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowStatusRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowStatusRequest.Merge(dst, src) -} -func (m *GetWorkflowStatusRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowStatusRequest.Size(m) -} -func (m *GetWorkflowStatusRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowStatusRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowStatusRequest proto.InternalMessageInfo - -func (m *GetWorkflowStatusRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { - if m != nil { - return m.IncludeTasks - } - return false -} - -type GetWorkflowStatusResponse struct { - Workflow *model.Workflow `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowStatusResponse) Reset() { *m = GetWorkflowStatusResponse{} } -func (m *GetWorkflowStatusResponse) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowStatusResponse) ProtoMessage() {} -func (*GetWorkflowStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{4} -} -func (m *GetWorkflowStatusResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowStatusResponse.Unmarshal(m, b) -} -func (m *GetWorkflowStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowStatusResponse.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowStatusResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowStatusResponse.Merge(dst, src) -} -func (m *GetWorkflowStatusResponse) XXX_Size() int { - return xxx_messageInfo_GetWorkflowStatusResponse.Size(m) -} -func (m *GetWorkflowStatusResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowStatusResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowStatusResponse proto.InternalMessageInfo - -func (m *GetWorkflowStatusResponse) GetWorkflow() *model.Workflow { - if m != nil { - return m.Workflow - } - return nil -} - -type RemoveWorkflowRequest struct { - WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId,proto3" json:"workflod_id,omitempty"` - ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow,proto3" json:"archive_workflow,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } -func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*RemoveWorkflowRequest) ProtoMessage() {} -func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{5} -} -func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) -} -func (m *RemoveWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *RemoveWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveWorkflowRequest.Merge(dst, src) -} -func (m *RemoveWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_RemoveWorkflowRequest.Size(m) -} -func (m *RemoveWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveWorkflowRequest proto.InternalMessageInfo - -func (m *RemoveWorkflowRequest) GetWorkflodId() string { - if m != nil { - return m.WorkflodId - } - return "" -} - -func (m *RemoveWorkflowRequest) GetArchiveWorkflow() bool { - if m != nil { - return m.ArchiveWorkflow - } - return false -} - -type RemoveWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveWorkflowResponse) Reset() { *m = RemoveWorkflowResponse{} } -func (m *RemoveWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*RemoveWorkflowResponse) ProtoMessage() {} -func (*RemoveWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{6} -} -func (m *RemoveWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveWorkflowResponse.Unmarshal(m, b) -} -func (m *RemoveWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *RemoveWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveWorkflowResponse.Merge(dst, src) -} -func (m *RemoveWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_RemoveWorkflowResponse.Size(m) -} -func (m *RemoveWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveWorkflowResponse proto.InternalMessageInfo - -type GetRunningWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` - EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsRequest{} } -func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*GetRunningWorkflowsRequest) ProtoMessage() {} -func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{7} -} -func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) -} -func (m *GetRunningWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetRunningWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *GetRunningWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetRunningWorkflowsRequest.Merge(dst, src) -} -func (m *GetRunningWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_GetRunningWorkflowsRequest.Size(m) -} -func (m *GetRunningWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetRunningWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetRunningWorkflowsRequest proto.InternalMessageInfo - -func (m *GetRunningWorkflowsRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetRunningWorkflowsRequest) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *GetRunningWorkflowsRequest) GetStartTime() int64 { - if m != nil { - return m.StartTime - } - return 0 -} - -func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { - if m != nil { - return m.EndTime - } - return 0 -} - -type GetRunningWorkflowsResponse struct { - WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds,proto3" json:"workflow_ids,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflowsResponse{} } -func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } -func (*GetRunningWorkflowsResponse) ProtoMessage() {} -func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{8} -} -func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) -} -func (m *GetRunningWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetRunningWorkflowsResponse.Marshal(b, m, deterministic) -} -func (dst *GetRunningWorkflowsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetRunningWorkflowsResponse.Merge(dst, src) -} -func (m *GetRunningWorkflowsResponse) XXX_Size() int { - return xxx_messageInfo_GetRunningWorkflowsResponse.Size(m) -} -func (m *GetRunningWorkflowsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetRunningWorkflowsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetRunningWorkflowsResponse proto.InternalMessageInfo - -func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { - if m != nil { - return m.WorkflowIds - } - return nil -} - -type DecideWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DecideWorkflowRequest) Reset() { *m = DecideWorkflowRequest{} } -func (m *DecideWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*DecideWorkflowRequest) ProtoMessage() {} -func (*DecideWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{9} -} -func (m *DecideWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DecideWorkflowRequest.Unmarshal(m, b) -} -func (m *DecideWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DecideWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *DecideWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DecideWorkflowRequest.Merge(dst, src) -} -func (m *DecideWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_DecideWorkflowRequest.Size(m) -} -func (m *DecideWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_DecideWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_DecideWorkflowRequest proto.InternalMessageInfo - -func (m *DecideWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type DecideWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DecideWorkflowResponse) Reset() { *m = DecideWorkflowResponse{} } -func (m *DecideWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*DecideWorkflowResponse) ProtoMessage() {} -func (*DecideWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{10} -} -func (m *DecideWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DecideWorkflowResponse.Unmarshal(m, b) -} -func (m *DecideWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DecideWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *DecideWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DecideWorkflowResponse.Merge(dst, src) -} -func (m *DecideWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_DecideWorkflowResponse.Size(m) -} -func (m *DecideWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_DecideWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_DecideWorkflowResponse proto.InternalMessageInfo - -type PauseWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PauseWorkflowRequest) Reset() { *m = PauseWorkflowRequest{} } -func (m *PauseWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*PauseWorkflowRequest) ProtoMessage() {} -func (*PauseWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{11} -} -func (m *PauseWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PauseWorkflowRequest.Unmarshal(m, b) -} -func (m *PauseWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PauseWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *PauseWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PauseWorkflowRequest.Merge(dst, src) -} -func (m *PauseWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_PauseWorkflowRequest.Size(m) -} -func (m *PauseWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PauseWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PauseWorkflowRequest proto.InternalMessageInfo - -func (m *PauseWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type PauseWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PauseWorkflowResponse) Reset() { *m = PauseWorkflowResponse{} } -func (m *PauseWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*PauseWorkflowResponse) ProtoMessage() {} -func (*PauseWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{12} -} -func (m *PauseWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PauseWorkflowResponse.Unmarshal(m, b) -} -func (m *PauseWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PauseWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *PauseWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_PauseWorkflowResponse.Merge(dst, src) -} -func (m *PauseWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_PauseWorkflowResponse.Size(m) -} -func (m *PauseWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_PauseWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_PauseWorkflowResponse proto.InternalMessageInfo - -type ResumeWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ResumeWorkflowRequest) Reset() { *m = ResumeWorkflowRequest{} } -func (m *ResumeWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*ResumeWorkflowRequest) ProtoMessage() {} -func (*ResumeWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{13} -} -func (m *ResumeWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ResumeWorkflowRequest.Unmarshal(m, b) -} -func (m *ResumeWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ResumeWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *ResumeWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_ResumeWorkflowRequest.Merge(dst, src) -} -func (m *ResumeWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_ResumeWorkflowRequest.Size(m) -} -func (m *ResumeWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_ResumeWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_ResumeWorkflowRequest proto.InternalMessageInfo - -func (m *ResumeWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type ResumeWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ResumeWorkflowResponse) Reset() { *m = ResumeWorkflowResponse{} } -func (m *ResumeWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*ResumeWorkflowResponse) ProtoMessage() {} -func (*ResumeWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{14} -} -func (m *ResumeWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ResumeWorkflowResponse.Unmarshal(m, b) -} -func (m *ResumeWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ResumeWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *ResumeWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_ResumeWorkflowResponse.Merge(dst, src) -} -func (m *ResumeWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_ResumeWorkflowResponse.Size(m) -} -func (m *ResumeWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_ResumeWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_ResumeWorkflowResponse proto.InternalMessageInfo - -type SkipTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` - Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request,proto3" json:"request,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } -func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } -func (*SkipTaskRequest) ProtoMessage() {} -func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{15} -} -func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) -} -func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) -} -func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SkipTaskRequest.Merge(dst, src) -} -func (m *SkipTaskRequest) XXX_Size() int { - return xxx_messageInfo_SkipTaskRequest.Size(m) -} -func (m *SkipTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo - -func (m *SkipTaskRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *SkipTaskRequest) GetTaskReferenceName() string { - if m != nil { - return m.TaskReferenceName - } - return "" -} - -func (m *SkipTaskRequest) GetRequest() *model.SkipTaskRequest { - if m != nil { - return m.Request - } - return nil -} - -type SkipTaskResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SkipTaskResponse) Reset() { *m = SkipTaskResponse{} } -func (m *SkipTaskResponse) String() string { return proto.CompactTextString(m) } -func (*SkipTaskResponse) ProtoMessage() {} -func (*SkipTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{16} -} -func (m *SkipTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SkipTaskResponse.Unmarshal(m, b) -} -func (m *SkipTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SkipTaskResponse.Marshal(b, m, deterministic) -} -func (dst *SkipTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_SkipTaskResponse.Merge(dst, src) -} -func (m *SkipTaskResponse) XXX_Size() int { - return xxx_messageInfo_SkipTaskResponse.Size(m) -} -func (m *SkipTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_SkipTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_SkipTaskResponse proto.InternalMessageInfo - -type RerunWorkflowResponse struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RerunWorkflowResponse) Reset() { *m = RerunWorkflowResponse{} } -func (m *RerunWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*RerunWorkflowResponse) ProtoMessage() {} -func (*RerunWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{17} -} -func (m *RerunWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RerunWorkflowResponse.Unmarshal(m, b) -} -func (m *RerunWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RerunWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *RerunWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RerunWorkflowResponse.Merge(dst, src) -} -func (m *RerunWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_RerunWorkflowResponse.Size(m) -} -func (m *RerunWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RerunWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RerunWorkflowResponse proto.InternalMessageInfo - -func (m *RerunWorkflowResponse) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type RestartWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RestartWorkflowRequest) Reset() { *m = RestartWorkflowRequest{} } -func (m *RestartWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*RestartWorkflowRequest) ProtoMessage() {} -func (*RestartWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{18} -} -func (m *RestartWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RestartWorkflowRequest.Unmarshal(m, b) -} -func (m *RestartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RestartWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *RestartWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RestartWorkflowRequest.Merge(dst, src) -} -func (m *RestartWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_RestartWorkflowRequest.Size(m) -} -func (m *RestartWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RestartWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RestartWorkflowRequest proto.InternalMessageInfo - -func (m *RestartWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type RestartWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RestartWorkflowResponse) Reset() { *m = RestartWorkflowResponse{} } -func (m *RestartWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*RestartWorkflowResponse) ProtoMessage() {} -func (*RestartWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{19} -} -func (m *RestartWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RestartWorkflowResponse.Unmarshal(m, b) -} -func (m *RestartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RestartWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *RestartWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RestartWorkflowResponse.Merge(dst, src) -} -func (m *RestartWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_RestartWorkflowResponse.Size(m) -} -func (m *RestartWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RestartWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RestartWorkflowResponse proto.InternalMessageInfo - -type RetryWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RetryWorkflowRequest) Reset() { *m = RetryWorkflowRequest{} } -func (m *RetryWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*RetryWorkflowRequest) ProtoMessage() {} -func (*RetryWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{20} -} -func (m *RetryWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RetryWorkflowRequest.Unmarshal(m, b) -} -func (m *RetryWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RetryWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *RetryWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RetryWorkflowRequest.Merge(dst, src) -} -func (m *RetryWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_RetryWorkflowRequest.Size(m) -} -func (m *RetryWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RetryWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RetryWorkflowRequest proto.InternalMessageInfo - -func (m *RetryWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type RetryWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RetryWorkflowResponse) Reset() { *m = RetryWorkflowResponse{} } -func (m *RetryWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*RetryWorkflowResponse) ProtoMessage() {} -func (*RetryWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{21} -} -func (m *RetryWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RetryWorkflowResponse.Unmarshal(m, b) -} -func (m *RetryWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RetryWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *RetryWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RetryWorkflowResponse.Merge(dst, src) -} -func (m *RetryWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_RetryWorkflowResponse.Size(m) -} -func (m *RetryWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RetryWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RetryWorkflowResponse proto.InternalMessageInfo - -type ResetWorkflowCallbacksRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ResetWorkflowCallbacksRequest) Reset() { *m = ResetWorkflowCallbacksRequest{} } -func (m *ResetWorkflowCallbacksRequest) String() string { return proto.CompactTextString(m) } -func (*ResetWorkflowCallbacksRequest) ProtoMessage() {} -func (*ResetWorkflowCallbacksRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{22} -} -func (m *ResetWorkflowCallbacksRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ResetWorkflowCallbacksRequest.Unmarshal(m, b) -} -func (m *ResetWorkflowCallbacksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ResetWorkflowCallbacksRequest.Marshal(b, m, deterministic) -} -func (dst *ResetWorkflowCallbacksRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_ResetWorkflowCallbacksRequest.Merge(dst, src) -} -func (m *ResetWorkflowCallbacksRequest) XXX_Size() int { - return xxx_messageInfo_ResetWorkflowCallbacksRequest.Size(m) -} -func (m *ResetWorkflowCallbacksRequest) XXX_DiscardUnknown() { - xxx_messageInfo_ResetWorkflowCallbacksRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_ResetWorkflowCallbacksRequest proto.InternalMessageInfo - -func (m *ResetWorkflowCallbacksRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type ResetWorkflowCallbacksResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ResetWorkflowCallbacksResponse) Reset() { *m = ResetWorkflowCallbacksResponse{} } -func (m *ResetWorkflowCallbacksResponse) String() string { return proto.CompactTextString(m) } -func (*ResetWorkflowCallbacksResponse) ProtoMessage() {} -func (*ResetWorkflowCallbacksResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{23} -} -func (m *ResetWorkflowCallbacksResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ResetWorkflowCallbacksResponse.Unmarshal(m, b) -} -func (m *ResetWorkflowCallbacksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ResetWorkflowCallbacksResponse.Marshal(b, m, deterministic) -} -func (dst *ResetWorkflowCallbacksResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_ResetWorkflowCallbacksResponse.Merge(dst, src) -} -func (m *ResetWorkflowCallbacksResponse) XXX_Size() int { - return xxx_messageInfo_ResetWorkflowCallbacksResponse.Size(m) -} -func (m *ResetWorkflowCallbacksResponse) XXX_DiscardUnknown() { - xxx_messageInfo_ResetWorkflowCallbacksResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_ResetWorkflowCallbacksResponse proto.InternalMessageInfo - -type TerminateWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - Reason string `protobuf:"bytes,2,opt,name=reason,proto3" json:"reason,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowRequest{} } -func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*TerminateWorkflowRequest) ProtoMessage() {} -func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{24} -} -func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) -} -func (m *TerminateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TerminateWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *TerminateWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_TerminateWorkflowRequest.Merge(dst, src) -} -func (m *TerminateWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_TerminateWorkflowRequest.Size(m) -} -func (m *TerminateWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_TerminateWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_TerminateWorkflowRequest proto.InternalMessageInfo - -func (m *TerminateWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *TerminateWorkflowRequest) GetReason() string { - if m != nil { - return m.Reason - } - return "" -} - -type TerminateWorkflowResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TerminateWorkflowResponse) Reset() { *m = TerminateWorkflowResponse{} } -func (m *TerminateWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*TerminateWorkflowResponse) ProtoMessage() {} -func (*TerminateWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{25} -} -func (m *TerminateWorkflowResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TerminateWorkflowResponse.Unmarshal(m, b) -} -func (m *TerminateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TerminateWorkflowResponse.Marshal(b, m, deterministic) -} -func (dst *TerminateWorkflowResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_TerminateWorkflowResponse.Merge(dst, src) -} -func (m *TerminateWorkflowResponse) XXX_Size() int { - return xxx_messageInfo_TerminateWorkflowResponse.Size(m) -} -func (m *TerminateWorkflowResponse) XXX_DiscardUnknown() { - xxx_messageInfo_TerminateWorkflowResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_TerminateWorkflowResponse proto.InternalMessageInfo - -type WorkflowSummarySearchResult struct { - TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits,proto3" json:"total_hits,omitempty"` - Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySearchResult{} } -func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } -func (*WorkflowSummarySearchResult) ProtoMessage() {} -func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{26} -} -func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) -} -func (m *WorkflowSummarySearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowSummarySearchResult.Marshal(b, m, deterministic) -} -func (dst *WorkflowSummarySearchResult) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowSummarySearchResult.Merge(dst, src) -} -func (m *WorkflowSummarySearchResult) XXX_Size() int { - return xxx_messageInfo_WorkflowSummarySearchResult.Size(m) -} -func (m *WorkflowSummarySearchResult) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowSummarySearchResult.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowSummarySearchResult proto.InternalMessageInfo - -func (m *WorkflowSummarySearchResult) GetTotalHits() int64 { - if m != nil { - return m.TotalHits - } - return 0 -} - -func (m *WorkflowSummarySearchResult) GetResults() []*model.WorkflowSummary { - if m != nil { - return m.Results - } - return nil -} - -func init() { - proto.RegisterType((*StartWorkflowResponse)(nil), "conductor.grpc.workflows.StartWorkflowResponse") - proto.RegisterType((*GetWorkflowsRequest)(nil), "conductor.grpc.workflows.GetWorkflowsRequest") - proto.RegisterType((*GetWorkflowsResponse)(nil), "conductor.grpc.workflows.GetWorkflowsResponse") - proto.RegisterMapType((map[string]*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.WorkflowsByIdEntry") - proto.RegisterType((*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.Workflows") - proto.RegisterType((*GetWorkflowStatusRequest)(nil), "conductor.grpc.workflows.GetWorkflowStatusRequest") - proto.RegisterType((*GetWorkflowStatusResponse)(nil), "conductor.grpc.workflows.GetWorkflowStatusResponse") - proto.RegisterType((*RemoveWorkflowRequest)(nil), "conductor.grpc.workflows.RemoveWorkflowRequest") - proto.RegisterType((*RemoveWorkflowResponse)(nil), "conductor.grpc.workflows.RemoveWorkflowResponse") - proto.RegisterType((*GetRunningWorkflowsRequest)(nil), "conductor.grpc.workflows.GetRunningWorkflowsRequest") - proto.RegisterType((*GetRunningWorkflowsResponse)(nil), "conductor.grpc.workflows.GetRunningWorkflowsResponse") - proto.RegisterType((*DecideWorkflowRequest)(nil), "conductor.grpc.workflows.DecideWorkflowRequest") - proto.RegisterType((*DecideWorkflowResponse)(nil), "conductor.grpc.workflows.DecideWorkflowResponse") - proto.RegisterType((*PauseWorkflowRequest)(nil), "conductor.grpc.workflows.PauseWorkflowRequest") - proto.RegisterType((*PauseWorkflowResponse)(nil), "conductor.grpc.workflows.PauseWorkflowResponse") - proto.RegisterType((*ResumeWorkflowRequest)(nil), "conductor.grpc.workflows.ResumeWorkflowRequest") - proto.RegisterType((*ResumeWorkflowResponse)(nil), "conductor.grpc.workflows.ResumeWorkflowResponse") - proto.RegisterType((*SkipTaskRequest)(nil), "conductor.grpc.workflows.SkipTaskRequest") - proto.RegisterType((*SkipTaskResponse)(nil), "conductor.grpc.workflows.SkipTaskResponse") - proto.RegisterType((*RerunWorkflowResponse)(nil), "conductor.grpc.workflows.RerunWorkflowResponse") - proto.RegisterType((*RestartWorkflowRequest)(nil), "conductor.grpc.workflows.RestartWorkflowRequest") - proto.RegisterType((*RestartWorkflowResponse)(nil), "conductor.grpc.workflows.RestartWorkflowResponse") - proto.RegisterType((*RetryWorkflowRequest)(nil), "conductor.grpc.workflows.RetryWorkflowRequest") - proto.RegisterType((*RetryWorkflowResponse)(nil), "conductor.grpc.workflows.RetryWorkflowResponse") - proto.RegisterType((*ResetWorkflowCallbacksRequest)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksRequest") - proto.RegisterType((*ResetWorkflowCallbacksResponse)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksResponse") - proto.RegisterType((*TerminateWorkflowRequest)(nil), "conductor.grpc.workflows.TerminateWorkflowRequest") - proto.RegisterType((*TerminateWorkflowResponse)(nil), "conductor.grpc.workflows.TerminateWorkflowResponse") - proto.RegisterType((*WorkflowSummarySearchResult)(nil), "conductor.grpc.workflows.WorkflowSummarySearchResult") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// WorkflowServiceClient is the client API for WorkflowService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type WorkflowServiceClient interface { - // POST / - StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) - // GET /{name}/correlated/{correlationId} - GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) - // GET /{workflowId} - GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) - // DELETE /{workflodId}/remove - RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) - // GET /running/{name} - GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) - // PUT /decide/{workflowId} - DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) - // PUT /{workflowId}/pause - PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) - // PUT /{workflowId}/pause - ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) - // PUT /{workflowId}/skiptask/{taskReferenceName} - SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) - // POST /{workflowId}/rerun - RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) - // POST /{workflowId}/restart - RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) - // POST /{workflowId}retry - RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) - // POST /{workflowId}/resetcallbacks - ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) - // DELETE /{workflowId} - TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) - // GET /search - Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) - SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) -} - -type workflowServiceClient struct { - cc *grpc.ClientConn -} - -func NewWorkflowServiceClient(cc *grpc.ClientConn) WorkflowServiceClient { - return &workflowServiceClient{cc} -} - -func (c *workflowServiceClient) StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) { - out := new(StartWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/StartWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) { - out := new(GetWorkflowsResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) { - out := new(model.Workflow) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) { - out := new(RemoveWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) { - out := new(GetRunningWorkflowsResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) { - out := new(DecideWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) { - out := new(PauseWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) { - out := new(ResumeWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) { - out := new(SkipTaskResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) { - out := new(RerunWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) { - out := new(RestartWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) { - out := new(RetryWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) { - out := new(ResetWorkflowCallbacksResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) { - out := new(TerminateWorkflowResponse) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { - out := new(WorkflowSummarySearchResult) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/Search", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { - out := new(WorkflowSummarySearchResult) - err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SearchByTasks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// WorkflowServiceServer is the server API for WorkflowService service. -type WorkflowServiceServer interface { - // POST / - StartWorkflow(context.Context, *model.StartWorkflowRequest) (*StartWorkflowResponse, error) - // GET /{name}/correlated/{correlationId} - GetWorkflows(context.Context, *GetWorkflowsRequest) (*GetWorkflowsResponse, error) - // GET /{workflowId} - GetWorkflowStatus(context.Context, *GetWorkflowStatusRequest) (*model.Workflow, error) - // DELETE /{workflodId}/remove - RemoveWorkflow(context.Context, *RemoveWorkflowRequest) (*RemoveWorkflowResponse, error) - // GET /running/{name} - GetRunningWorkflows(context.Context, *GetRunningWorkflowsRequest) (*GetRunningWorkflowsResponse, error) - // PUT /decide/{workflowId} - DecideWorkflow(context.Context, *DecideWorkflowRequest) (*DecideWorkflowResponse, error) - // PUT /{workflowId}/pause - PauseWorkflow(context.Context, *PauseWorkflowRequest) (*PauseWorkflowResponse, error) - // PUT /{workflowId}/pause - ResumeWorkflow(context.Context, *ResumeWorkflowRequest) (*ResumeWorkflowResponse, error) - // PUT /{workflowId}/skiptask/{taskReferenceName} - SkipTaskFromWorkflow(context.Context, *SkipTaskRequest) (*SkipTaskResponse, error) - // POST /{workflowId}/rerun - RerunWorkflow(context.Context, *model.RerunWorkflowRequest) (*RerunWorkflowResponse, error) - // POST /{workflowId}/restart - RestartWorkflow(context.Context, *RestartWorkflowRequest) (*RestartWorkflowResponse, error) - // POST /{workflowId}retry - RetryWorkflow(context.Context, *RetryWorkflowRequest) (*RetryWorkflowResponse, error) - // POST /{workflowId}/resetcallbacks - ResetWorkflowCallbacks(context.Context, *ResetWorkflowCallbacksRequest) (*ResetWorkflowCallbacksResponse, error) - // DELETE /{workflowId} - TerminateWorkflow(context.Context, *TerminateWorkflowRequest) (*TerminateWorkflowResponse, error) - // GET /search - Search(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) - SearchByTasks(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) -} - -func RegisterWorkflowServiceServer(s *grpc.Server, srv WorkflowServiceServer) { - s.RegisterService(&_WorkflowService_serviceDesc, srv) -} - -func _WorkflowService_StartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.StartWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).StartWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/StartWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).StartWorkflow(ctx, req.(*model.StartWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetWorkflows(ctx, req.(*GetWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetWorkflowStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowStatusRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, req.(*GetWorkflowStatusRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RemoveWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RemoveWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, req.(*RemoveWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetRunningWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetRunningWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, req.(*GetRunningWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_DecideWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DecideWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).DecideWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).DecideWorkflow(ctx, req.(*DecideWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_PauseWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PauseWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).PauseWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).PauseWorkflow(ctx, req.(*PauseWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_ResumeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ResumeWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, req.(*ResumeWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_SkipTaskFromWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SkipTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, req.(*SkipTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RerunWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.RerunWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RerunWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RerunWorkflow(ctx, req.(*model.RerunWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RestartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RestartWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RestartWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RestartWorkflow(ctx, req.(*RestartWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RetryWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RetryWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RetryWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RetryWorkflow(ctx, req.(*RetryWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_ResetWorkflowCallbacks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ResetWorkflowCallbacksRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, req.(*ResetWorkflowCallbacksRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_TerminateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TerminateWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, req.(*TerminateWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(search.Request) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).Search(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/Search", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).Search(ctx, req.(*search.Request)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_SearchByTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(search.Request) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).SearchByTasks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/conductor.grpc.workflows.WorkflowService/SearchByTasks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).SearchByTasks(ctx, req.(*search.Request)) - } - return interceptor(ctx, in, info, handler) -} - -var _WorkflowService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "conductor.grpc.workflows.WorkflowService", - HandlerType: (*WorkflowServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "StartWorkflow", - Handler: _WorkflowService_StartWorkflow_Handler, - }, - { - MethodName: "GetWorkflows", - Handler: _WorkflowService_GetWorkflows_Handler, - }, - { - MethodName: "GetWorkflowStatus", - Handler: _WorkflowService_GetWorkflowStatus_Handler, - }, - { - MethodName: "RemoveWorkflow", - Handler: _WorkflowService_RemoveWorkflow_Handler, - }, - { - MethodName: "GetRunningWorkflows", - Handler: _WorkflowService_GetRunningWorkflows_Handler, - }, - { - MethodName: "DecideWorkflow", - Handler: _WorkflowService_DecideWorkflow_Handler, - }, - { - MethodName: "PauseWorkflow", - Handler: _WorkflowService_PauseWorkflow_Handler, - }, - { - MethodName: "ResumeWorkflow", - Handler: _WorkflowService_ResumeWorkflow_Handler, - }, - { - MethodName: "SkipTaskFromWorkflow", - Handler: _WorkflowService_SkipTaskFromWorkflow_Handler, - }, - { - MethodName: "RerunWorkflow", - Handler: _WorkflowService_RerunWorkflow_Handler, - }, - { - MethodName: "RestartWorkflow", - Handler: _WorkflowService_RestartWorkflow_Handler, - }, - { - MethodName: "RetryWorkflow", - Handler: _WorkflowService_RetryWorkflow_Handler, - }, - { - MethodName: "ResetWorkflowCallbacks", - Handler: _WorkflowService_ResetWorkflowCallbacks_Handler, - }, - { - MethodName: "TerminateWorkflow", - Handler: _WorkflowService_TerminateWorkflow_Handler, - }, - { - MethodName: "Search", - Handler: _WorkflowService_Search_Handler, - }, - { - MethodName: "SearchByTasks", - Handler: _WorkflowService_SearchByTasks_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "grpc/workflow_service.proto", -} - -func init() { - proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_fc7b0bf1a282d9fc) -} - -var fileDescriptor_workflow_service_fc7b0bf1a282d9fc = []byte{ - // 1121 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xcd, 0x6e, 0xdb, 0x46, - 0x10, 0x86, 0xa4, 0xc4, 0xb6, 0xc6, 0x96, 0x7f, 0x36, 0xb6, 0x43, 0xd3, 0x48, 0xaa, 0xb2, 0x08, - 0xe0, 0x14, 0x28, 0xd5, 0x2a, 0x0d, 0xac, 0xe6, 0x94, 0xda, 0x69, 0x53, 0x5f, 0x82, 0x60, 0x65, - 0xa0, 0x40, 0x2f, 0x2c, 0x45, 0xae, 0x65, 0x42, 0xfc, 0x51, 0x77, 0x97, 0x72, 0x54, 0xf4, 0x54, - 0xf4, 0xd6, 0x57, 0x28, 0xfa, 0x20, 0xbd, 0xf6, 0xc5, 0x0a, 0x2e, 0x97, 0x94, 0x48, 0xad, 0x18, - 0xc9, 0x40, 0x6f, 0xd2, 0xcc, 0x7c, 0x33, 0xb3, 0x33, 0xb3, 0xf3, 0xad, 0x04, 0xa7, 0x43, 0x3a, - 0x76, 0x3a, 0x77, 0x11, 0x1d, 0xdd, 0xf8, 0xd1, 0x9d, 0xc5, 0x08, 0x9d, 0x78, 0x0e, 0x31, 0xc7, - 0x34, 0xe2, 0x11, 0xd2, 0x9c, 0x28, 0x74, 0x63, 0x87, 0x47, 0xd4, 0x4c, 0xcc, 0xcc, 0xcc, 0x8c, - 0xe9, 0x07, 0x02, 0xc6, 0x88, 0x4d, 0x9d, 0xdb, 0xd4, 0x58, 0x3f, 0x0c, 0x22, 0x97, 0xf8, 0xb9, - 0x2b, 0x29, 0x3d, 0x2d, 0x4a, 0x59, 0x1c, 0x04, 0x36, 0x9d, 0x16, 0x95, 0x6c, 0xe4, 0x8d, 0xb9, - 0xcd, 0x46, 0x94, 0xfc, 0x12, 0x13, 0xc6, 0xa5, 0xb2, 0x2d, 0x95, 0xdc, 0xa6, 0x3c, 0x83, 0x2b, - 0x2d, 0x28, 0xa1, 0x71, 0xa8, 0xb4, 0x30, 0x7a, 0x70, 0xd4, 0x4f, 0xf0, 0x3f, 0x4a, 0x2d, 0x26, - 0x6c, 0x1c, 0x85, 0x8c, 0xa0, 0x4f, 0x60, 0x3b, 0x3f, 0xb3, 0xe7, 0x6a, 0xb5, 0x76, 0xed, 0xac, - 0x89, 0x21, 0x13, 0x5d, 0xb9, 0xc6, 0x5f, 0x35, 0x78, 0xf4, 0x96, 0xe4, 0x40, 0x86, 0x53, 0xbf, - 0x08, 0xc1, 0x83, 0xd0, 0x0e, 0x88, 0x44, 0x88, 0xcf, 0xe8, 0x19, 0xec, 0x3a, 0x11, 0xa5, 0xc4, - 0xb7, 0xb9, 0x17, 0x85, 0x89, 0xbf, 0x7a, 0xbb, 0x71, 0xd6, 0xc4, 0xad, 0x39, 0xe9, 0x95, 0x9b, - 0x98, 0x79, 0xa1, 0xe3, 0xc7, 0x2e, 0xb1, 0x1c, 0x3f, 0x62, 0xc4, 0xd5, 0x1a, 0xed, 0xda, 0xd9, - 0x16, 0x6e, 0x49, 0xe9, 0xa5, 0x10, 0xa2, 0xcf, 0x20, 0x13, 0x58, 0x49, 0x51, 0x98, 0xf6, 0x40, - 0x58, 0xed, 0x48, 0xe1, 0x75, 0x22, 0x33, 0xfe, 0xad, 0xc3, 0x61, 0x31, 0x3d, 0x79, 0x30, 0x0f, - 0xf6, 0xf2, 0x5a, 0x5b, 0x83, 0x69, 0x7a, 0xb8, 0xc6, 0xd9, 0x76, 0xf7, 0x5b, 0x73, 0x59, 0x33, - 0x4d, 0x95, 0x23, 0x33, 0x97, 0x5c, 0x4c, 0xaf, 0xdc, 0xef, 0x42, 0x4e, 0xa7, 0xb8, 0x75, 0x37, - 0x2f, 0xd3, 0xdf, 0x40, 0x33, 0x37, 0x42, 0xe7, 0xd0, 0xcc, 0xb5, 0x32, 0xe2, 0xc9, 0x5c, 0x44, - 0xd1, 0x8e, 0xdc, 0x27, 0x9e, 0xd9, 0xea, 0xbf, 0x02, 0x5a, 0x0c, 0x85, 0xf6, 0xa1, 0x31, 0x22, - 0x53, 0x59, 0xe5, 0xe4, 0x23, 0x7a, 0x07, 0x0f, 0x27, 0xb6, 0x1f, 0x13, 0xad, 0xde, 0xae, 0x9d, - 0x6d, 0x77, 0x7b, 0xf7, 0x3d, 0x0e, 0x4e, 0xdd, 0xbc, 0xaa, 0xf7, 0x6a, 0xc6, 0xcf, 0xa0, 0xcd, - 0x19, 0xf7, 0xb9, 0xcd, 0xe3, 0xbc, 0xd1, 0x1f, 0x9b, 0x90, 0xc5, 0x3e, 0xd5, 0x15, 0x7d, 0xc2, - 0x70, 0xa2, 0x88, 0x20, 0x7b, 0xf5, 0x12, 0xb6, 0x32, 0x7f, 0xc2, 0x7f, 0x65, 0xc9, 0x72, 0x53, - 0xc3, 0x81, 0x23, 0x4c, 0x82, 0x68, 0x42, 0x66, 0x53, 0x5d, 0x4e, 0xd9, 0x5d, 0x4c, 0xd9, 0xbd, - 0x72, 0xd1, 0x73, 0xd8, 0x4f, 0x2e, 0xac, 0x37, 0x21, 0x56, 0x1e, 0x38, 0xcd, 0x7a, 0x4f, 0xca, - 0x33, 0x97, 0x86, 0x06, 0xc7, 0xe5, 0x20, 0x69, 0xd6, 0xc6, 0x1f, 0x35, 0xd0, 0xdf, 0x12, 0x8e, - 0xe3, 0x30, 0xf4, 0xc2, 0xe1, 0x4a, 0x17, 0x44, 0x83, 0xcd, 0x09, 0xa1, 0xcc, 0x8b, 0x42, 0x11, - 0xee, 0x21, 0xce, 0xbe, 0xa2, 0x27, 0x00, 0xe2, 0x82, 0x5b, 0xdc, 0x0b, 0x88, 0xb8, 0x0f, 0x0d, - 0xdc, 0x14, 0x92, 0x6b, 0x2f, 0x20, 0xe8, 0x04, 0xb6, 0x48, 0xe8, 0xa6, 0xca, 0x07, 0x42, 0xb9, - 0x49, 0x42, 0x37, 0x51, 0x19, 0xaf, 0xe1, 0x54, 0x99, 0x85, 0xac, 0xed, 0xa7, 0xb0, 0x33, 0xd7, - 0xbe, 0x74, 0x24, 0x9b, 0x78, 0x7b, 0xd6, 0x3f, 0x96, 0x2c, 0x87, 0x37, 0xc4, 0xf1, 0xdc, 0x8a, - 0x3a, 0x2e, 0x59, 0x0e, 0x1a, 0x1c, 0x97, 0x91, 0xb2, 0x38, 0xe7, 0x70, 0xf8, 0xde, 0x8e, 0xd9, - 0xfa, 0x2e, 0x1f, 0xc3, 0x51, 0x09, 0x28, 0x3d, 0xf6, 0x92, 0x6e, 0xb3, 0x38, 0xb8, 0x57, 0x96, - 0x65, 0xa4, 0xf4, 0xf9, 0x77, 0x0d, 0xf6, 0xfa, 0x23, 0x6f, 0x9c, 0xcc, 0xe8, 0xca, 0xf3, 0x6e, - 0xc2, 0xa3, 0x64, 0xce, 0x2d, 0x4a, 0x6e, 0x08, 0x25, 0xa1, 0x43, 0x2c, 0xd1, 0xe7, 0xba, 0x30, - 0x3c, 0xe0, 0xc2, 0x95, 0xd4, 0xbc, 0x4b, 0x9a, 0xfe, 0x0a, 0x36, 0xe5, 0x32, 0x16, 0x7d, 0xdd, - 0xee, 0xb6, 0x17, 0x86, 0xbb, 0x94, 0x03, 0xce, 0x00, 0x06, 0x82, 0xfd, 0x99, 0x6e, 0xbe, 0x10, - 0x34, 0x0e, 0xd7, 0xdf, 0xe5, 0xdf, 0x88, 0x42, 0x14, 0x79, 0x60, 0xc5, 0x1a, 0x9e, 0xc0, 0xe3, - 0x05, 0xe8, 0xac, 0xd5, 0x98, 0x70, 0x3a, 0xbd, 0x4f, 0xab, 0x4b, 0x40, 0xe9, 0xf1, 0x35, 0x3c, - 0xc1, 0x84, 0xcd, 0xd6, 0xc5, 0xa5, 0xed, 0xfb, 0x03, 0xdb, 0x19, 0xad, 0xbc, 0x93, 0x8c, 0x36, - 0x3c, 0x5d, 0xe6, 0x41, 0xc6, 0xe8, 0x83, 0x76, 0x4d, 0x68, 0xe0, 0x85, 0x36, 0x5f, 0x7b, 0xa2, - 0xd0, 0x31, 0x6c, 0x50, 0x62, 0x33, 0x79, 0x8d, 0x9b, 0x58, 0x7e, 0x33, 0x4e, 0xe1, 0x44, 0xe1, - 0x54, 0x46, 0xfc, 0x00, 0xa7, 0xf9, 0xfe, 0x4b, 0xd9, 0xbf, 0x2f, 0x9e, 0x0d, 0xc9, 0x6c, 0xfa, - 0x3c, 0xd9, 0x00, 0x3c, 0xe2, 0xb6, 0x6f, 0xdd, 0x7a, 0x9c, 0x89, 0x98, 0x0d, 0xdc, 0x14, 0x92, - 0x1f, 0x3c, 0xce, 0xd2, 0x29, 0x4a, 0x0c, 0x99, 0x20, 0x55, 0xd5, 0x14, 0x95, 0xbc, 0xe3, 0x0c, - 0xd0, 0xfd, 0x67, 0x17, 0xf6, 0x72, 0x65, 0xfa, 0xb0, 0x41, 0x43, 0x68, 0x15, 0x5e, 0x04, 0xe8, - 0xd9, 0xe2, 0x54, 0x2a, 0x26, 0x45, 0xef, 0x2c, 0xe7, 0x1b, 0xf5, 0x0b, 0x23, 0x80, 0x9d, 0x79, - 0x22, 0x42, 0x5f, 0xac, 0x4a, 0x58, 0x69, 0x3c, 0x73, 0x3d, 0x7e, 0x43, 0x03, 0x38, 0x58, 0x20, - 0x1a, 0xd4, 0x5d, 0xc9, 0x49, 0x81, 0xf7, 0xf4, 0xe5, 0x14, 0x84, 0x18, 0xec, 0x16, 0x39, 0x01, - 0x55, 0x54, 0x45, 0x49, 0x51, 0xfa, 0x97, 0xab, 0x03, 0xe4, 0xc1, 0x7e, 0x4f, 0x1f, 0x62, 0xe5, - 0x45, 0x8f, 0xbe, 0xae, 0x3c, 0xdb, 0x12, 0x76, 0xd2, 0x5f, 0xae, 0x89, 0x92, 0x49, 0x30, 0xd8, - 0x2d, 0x2e, 0xfc, 0xaa, 0x93, 0x2b, 0x49, 0xa5, 0xea, 0xe4, 0x6a, 0x2e, 0x41, 0x63, 0x68, 0x15, - 0x28, 0x01, 0x55, 0xcc, 0x84, 0x8a, 0x74, 0xaa, 0x66, 0x56, 0xc9, 0x35, 0x69, 0x83, 0xe7, 0x19, - 0xa3, 0xba, 0xc1, 0x0a, 0x56, 0xaa, 0x6e, 0xb0, 0x8a, 0x8c, 0x50, 0x00, 0x87, 0xd9, 0xae, 0xff, - 0x9e, 0x46, 0x41, 0x1e, 0xfa, 0x79, 0xc5, 0x8d, 0x2b, 0xf2, 0x86, 0xfe, 0xf9, 0x2a, 0xa6, 0x32, - 0xdc, 0x10, 0x5a, 0x05, 0x1a, 0x51, 0x2c, 0x80, 0x12, 0xcd, 0x7c, 0xb4, 0x98, 0x6a, 0x5a, 0x9a, - 0xc0, 0x5e, 0x89, 0x3a, 0x50, 0x75, 0x71, 0x54, 0x6b, 0xe7, 0xab, 0x35, 0x10, 0xb3, 0xb1, 0x29, - 0xd0, 0x4b, 0xd5, 0xd8, 0xa8, 0x08, 0xac, 0xfa, 0xa4, 0x0a, 0xde, 0x42, 0x7f, 0xd6, 0x04, 0xc1, - 0x2a, 0x68, 0x07, 0x9d, 0x57, 0xe6, 0xbf, 0x9c, 0xea, 0xf4, 0xde, 0xfa, 0x40, 0x99, 0xcd, 0x6f, - 0x70, 0xb0, 0x40, 0x46, 0x55, 0x9b, 0x70, 0x19, 0x1d, 0xea, 0x2f, 0xd6, 0xc2, 0xc8, 0xe8, 0x16, - 0x6c, 0xa4, 0xf4, 0x86, 0x9e, 0x96, 0xe1, 0xf2, 0xd7, 0xf2, 0x0a, 0xab, 0xa8, 0x8a, 0x2f, 0x6f, - 0xa0, 0x95, 0x7e, 0xbf, 0x98, 0x8a, 0x9f, 0x18, 0xff, 0x53, 0x9c, 0x0b, 0x0e, 0xba, 0x13, 0x05, - 0x66, 0x48, 0xf8, 0x8d, 0xef, 0x7d, 0x28, 0xf9, 0xb8, 0x38, 0x28, 0xf1, 0xea, 0xfb, 0xc1, 0x4f, - 0x97, 0x43, 0x8f, 0xdf, 0xc6, 0x03, 0xd3, 0x89, 0x82, 0x8e, 0x44, 0x75, 0x72, 0x54, 0xc7, 0xf1, - 0x3d, 0x12, 0xf2, 0xce, 0x30, 0x12, 0x7f, 0x1c, 0xcc, 0xe4, 0x85, 0xbf, 0x1f, 0xd8, 0x60, 0x43, - 0xdc, 0xc0, 0x17, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0x39, 0x12, 0x2a, 0x80, 0x97, 0x10, 0x00, - 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go deleted file mode 100644 index d47b9b6b6..000000000 --- a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go +++ /dev/null @@ -1,124 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/dynamicforkjointask.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type DynamicForkJoinTask struct { - TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` - WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName,proto3" json:"workflow_name,omitempty"` - ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DynamicForkJoinTask) Reset() { *m = DynamicForkJoinTask{} } -func (m *DynamicForkJoinTask) String() string { return proto.CompactTextString(m) } -func (*DynamicForkJoinTask) ProtoMessage() {} -func (*DynamicForkJoinTask) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointask_60f4ea3626679478, []int{0} -} -func (m *DynamicForkJoinTask) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DynamicForkJoinTask.Unmarshal(m, b) -} -func (m *DynamicForkJoinTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DynamicForkJoinTask.Marshal(b, m, deterministic) -} -func (dst *DynamicForkJoinTask) XXX_Merge(src proto.Message) { - xxx_messageInfo_DynamicForkJoinTask.Merge(dst, src) -} -func (m *DynamicForkJoinTask) XXX_Size() int { - return xxx_messageInfo_DynamicForkJoinTask.Size(m) -} -func (m *DynamicForkJoinTask) XXX_DiscardUnknown() { - xxx_messageInfo_DynamicForkJoinTask.DiscardUnknown(m) -} - -var xxx_messageInfo_DynamicForkJoinTask proto.InternalMessageInfo - -func (m *DynamicForkJoinTask) GetTaskName() string { - if m != nil { - return m.TaskName - } - return "" -} - -func (m *DynamicForkJoinTask) GetWorkflowName() string { - if m != nil { - return m.WorkflowName - } - return "" -} - -func (m *DynamicForkJoinTask) GetReferenceName() string { - if m != nil { - return m.ReferenceName - } - return "" -} - -func (m *DynamicForkJoinTask) GetInput() map[string]*_struct.Value { - if m != nil { - return m.Input - } - return nil -} - -func (m *DynamicForkJoinTask) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func init() { - proto.RegisterType((*DynamicForkJoinTask)(nil), "conductor.proto.DynamicForkJoinTask") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.DynamicForkJoinTask.InputEntry") -} - -func init() { - proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_60f4ea3626679478) -} - -var fileDescriptor_dynamicforkjointask_60f4ea3626679478 = []byte{ - // 325 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0x4f, 0x4b, 0x03, 0x31, - 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x55, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, - 0x50, 0x2f, 0xd2, 0x63, 0xb1, 0x82, 0x1e, 0xa4, 0x14, 0xf1, 0xe0, 0x45, 0x76, 0xd3, 0xd9, 0x35, - 0xee, 0x6e, 0x66, 0x49, 0x13, 0xeb, 0x7e, 0x26, 0xbf, 0xa4, 0x6c, 0xd2, 0x56, 0x29, 0xbd, 0x4d, - 0xde, 0xfc, 0xde, 0xe4, 0x65, 0x42, 0x2e, 0x32, 0x9c, 0x43, 0xca, 0xe7, 0x85, 0x0a, 0x32, 0x29, - 0x22, 0xd4, 0xc9, 0x27, 0x4a, 0x65, 0x82, 0x45, 0xc2, 0x72, 0x8d, 0x06, 0xe9, 0x91, 0x40, 0x35, - 0xb7, 0xc2, 0xa0, 0xf6, 0x42, 0xf7, 0x3c, 0x46, 0x8c, 0x53, 0xe0, 0xee, 0x14, 0xda, 0x88, 0x2f, - 0x8c, 0xb6, 0xc2, 0xf8, 0xee, 0xe5, 0x4f, 0x95, 0x9c, 0xdc, 0xfb, 0x61, 0x0f, 0xa8, 0x93, 0x27, - 0x94, 0xea, 0x25, 0x58, 0x24, 0xb4, 0x47, 0x9a, 0xe5, 0xd0, 0x77, 0x15, 0x64, 0xd0, 0xa9, 0xf4, - 0x2b, 0x83, 0xe6, 0x6c, 0xbf, 0x14, 0x9e, 0x83, 0x0c, 0xe8, 0x15, 0x69, 0x2f, 0x51, 0x27, 0x51, - 0x8a, 0x4b, 0x0f, 0x54, 0x1d, 0x70, 0xb0, 0x16, 0x1d, 0x74, 0x4d, 0x0e, 0x35, 0x44, 0xa0, 0x41, - 0x09, 0xf0, 0x54, 0xcd, 0x51, 0xed, 0x8d, 0xea, 0xb0, 0x09, 0x69, 0x48, 0x95, 0x5b, 0xd3, 0xa9, - 0xf7, 0x6b, 0x83, 0xd6, 0x90, 0xb3, 0xad, 0xfc, 0x6c, 0x47, 0x3a, 0xf6, 0x58, 0x3a, 0x26, 0xca, - 0xe8, 0x62, 0xe6, 0xdd, 0x94, 0x92, 0xba, 0x29, 0x72, 0xe8, 0x34, 0xdc, 0x1d, 0xae, 0xee, 0x4e, - 0x09, 0xf9, 0x03, 0xe9, 0x31, 0xa9, 0x25, 0x50, 0xac, 0xde, 0x52, 0x96, 0xf4, 0x86, 0x34, 0xbe, - 0x82, 0xd4, 0xfa, 0xf8, 0xad, 0xe1, 0x19, 0xf3, 0x9b, 0x62, 0xeb, 0x4d, 0xb1, 0xd7, 0xb2, 0x3b, - 0xf3, 0xd0, 0xa8, 0x7a, 0x57, 0x19, 0xe7, 0xa4, 0x27, 0x30, 0x63, 0x0a, 0x4c, 0x94, 0xca, 0xef, - 0xed, 0xa8, 0xe3, 0xd3, 0x1d, 0x59, 0xa7, 0xe1, 0xdb, 0x28, 0x96, 0xe6, 0xc3, 0x86, 0x4c, 0x60, - 0xc6, 0x57, 0x56, 0xbe, 0xb1, 0x72, 0x91, 0x4a, 0x50, 0x86, 0xc7, 0x18, 0xeb, 0x5c, 0xfc, 0xd3, - 0xdd, 0x47, 0x87, 0x7b, 0x6e, 0xf2, 0xed, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6d, 0xe6, 0x21, - 0x30, 0xf8, 0x01, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go deleted file mode 100644 index 9650213be..000000000 --- a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go +++ /dev/null @@ -1,82 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/dynamicforkjointasklist.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type DynamicForkJoinTaskList struct { - DynamicTasks []*DynamicForkJoinTask `protobuf:"bytes,1,rep,name=dynamic_tasks,json=dynamicTasks,proto3" json:"dynamic_tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DynamicForkJoinTaskList) Reset() { *m = DynamicForkJoinTaskList{} } -func (m *DynamicForkJoinTaskList) String() string { return proto.CompactTextString(m) } -func (*DynamicForkJoinTaskList) ProtoMessage() {} -func (*DynamicForkJoinTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e, []int{0} -} -func (m *DynamicForkJoinTaskList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DynamicForkJoinTaskList.Unmarshal(m, b) -} -func (m *DynamicForkJoinTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DynamicForkJoinTaskList.Marshal(b, m, deterministic) -} -func (dst *DynamicForkJoinTaskList) XXX_Merge(src proto.Message) { - xxx_messageInfo_DynamicForkJoinTaskList.Merge(dst, src) -} -func (m *DynamicForkJoinTaskList) XXX_Size() int { - return xxx_messageInfo_DynamicForkJoinTaskList.Size(m) -} -func (m *DynamicForkJoinTaskList) XXX_DiscardUnknown() { - xxx_messageInfo_DynamicForkJoinTaskList.DiscardUnknown(m) -} - -var xxx_messageInfo_DynamicForkJoinTaskList proto.InternalMessageInfo - -func (m *DynamicForkJoinTaskList) GetDynamicTasks() []*DynamicForkJoinTask { - if m != nil { - return m.DynamicTasks - } - return nil -} - -func init() { - proto.RegisterType((*DynamicForkJoinTaskList)(nil), "conductor.proto.DynamicForkJoinTaskList") -} - -func init() { - proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e) -} - -var fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e = []byte{ - // 200 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xce, 0xcd, 0x4f, 0x49, - 0xcd, 0xd1, 0x4f, 0xa9, 0xcc, 0x4b, 0xcc, 0xcd, 0x4c, 0x4e, 0xcb, 0x2f, 0xca, 0xce, 0xca, 0xcf, - 0xcc, 0x2b, 0x49, 0x2c, 0xce, 0xce, 0xc9, 0x2c, 0x2e, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, - 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x48, 0xc9, 0xe3, 0xd4, - 0x05, 0x51, 0xa0, 0x94, 0xc2, 0x25, 0xee, 0x02, 0x91, 0x74, 0xcb, 0x2f, 0xca, 0xf6, 0xca, 0xcf, - 0xcc, 0x0b, 0x49, 0x2c, 0xce, 0xf6, 0xc9, 0x2c, 0x2e, 0x11, 0xf2, 0xe4, 0xe2, 0x85, 0xea, 0x8b, - 0x07, 0x69, 0x28, 0x96, 0x60, 0x54, 0x60, 0xd6, 0xe0, 0x36, 0x52, 0xd1, 0x43, 0xb3, 0x44, 0x0f, - 0x8b, 0x01, 0x41, 0x3c, 0x50, 0xad, 0x20, 0x4e, 0xb1, 0x53, 0x09, 0x97, 0x74, 0x72, 0x7e, 0xae, - 0x5e, 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x05, 0xba, 0x01, 0x4e, 0x92, 0x38, 0x9c, 0x10, 0x90, - 0x14, 0x65, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0xd5, 0xae, - 0x0f, 0xd7, 0xae, 0x9f, 0x9c, 0x93, 0x99, 0x9a, 0x57, 0xa2, 0x9f, 0x9e, 0x9f, 0x5e, 0x54, 0x90, - 0x8c, 0x24, 0x0e, 0xf6, 0x75, 0x12, 0x1b, 0xd8, 0x74, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, - 0x76, 0xa8, 0x2e, 0xed, 0x3b, 0x01, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go b/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go deleted file mode 100644 index ac1c2a8ca..000000000 --- a/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go +++ /dev/null @@ -1,185 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/eventexecution.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type EventExecution_Status int32 - -const ( - EventExecution_IN_PROGRESS EventExecution_Status = 0 - EventExecution_COMPLETED EventExecution_Status = 1 - EventExecution_FAILED EventExecution_Status = 2 - EventExecution_SKIPPED EventExecution_Status = 3 -) - -var EventExecution_Status_name = map[int32]string{ - 0: "IN_PROGRESS", - 1: "COMPLETED", - 2: "FAILED", - 3: "SKIPPED", -} -var EventExecution_Status_value = map[string]int32{ - "IN_PROGRESS": 0, - "COMPLETED": 1, - "FAILED": 2, - "SKIPPED": 3, -} - -func (x EventExecution_Status) String() string { - return proto.EnumName(EventExecution_Status_name, int32(x)) -} -func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0, 0} -} - -type EventExecution struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - MessageId string `protobuf:"bytes,2,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` - Event string `protobuf:"bytes,4,opt,name=event,proto3" json:"event,omitempty"` - Created int64 `protobuf:"varint,5,opt,name=created,proto3" json:"created,omitempty"` - Status EventExecution_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.EventExecution_Status" json:"status,omitempty"` - Action EventHandler_Action_Type `protobuf:"varint,7,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` - Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EventExecution) Reset() { *m = EventExecution{} } -func (m *EventExecution) String() string { return proto.CompactTextString(m) } -func (*EventExecution) ProtoMessage() {} -func (*EventExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0} -} -func (m *EventExecution) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EventExecution.Unmarshal(m, b) -} -func (m *EventExecution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EventExecution.Marshal(b, m, deterministic) -} -func (dst *EventExecution) XXX_Merge(src proto.Message) { - xxx_messageInfo_EventExecution.Merge(dst, src) -} -func (m *EventExecution) XXX_Size() int { - return xxx_messageInfo_EventExecution.Size(m) -} -func (m *EventExecution) XXX_DiscardUnknown() { - xxx_messageInfo_EventExecution.DiscardUnknown(m) -} - -var xxx_messageInfo_EventExecution proto.InternalMessageInfo - -func (m *EventExecution) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *EventExecution) GetMessageId() string { - if m != nil { - return m.MessageId - } - return "" -} - -func (m *EventExecution) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *EventExecution) GetEvent() string { - if m != nil { - return m.Event - } - return "" -} - -func (m *EventExecution) GetCreated() int64 { - if m != nil { - return m.Created - } - return 0 -} - -func (m *EventExecution) GetStatus() EventExecution_Status { - if m != nil { - return m.Status - } - return EventExecution_IN_PROGRESS -} - -func (m *EventExecution) GetAction() EventHandler_Action_Type { - if m != nil { - return m.Action - } - return EventHandler_Action_START_WORKFLOW -} - -func (m *EventExecution) GetOutput() map[string]*_struct.Value { - if m != nil { - return m.Output - } - return nil -} - -func init() { - proto.RegisterType((*EventExecution)(nil), "conductor.proto.EventExecution") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventExecution.OutputEntry") - proto.RegisterEnum("conductor.proto.EventExecution_Status", EventExecution_Status_name, EventExecution_Status_value) -} - -func init() { - proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_680c67ac3fada8e2) -} - -var fileDescriptor_eventexecution_680c67ac3fada8e2 = []byte{ - // 434 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8b, 0xd3, 0x40, - 0x14, 0xc6, 0x4d, 0xb2, 0x4d, 0xed, 0x0b, 0x76, 0xc3, 0x20, 0x32, 0x54, 0x85, 0xb2, 0x07, 0xa9, - 0x28, 0x13, 0xa8, 0x17, 0xd9, 0x83, 0xd0, 0xdd, 0x46, 0x2d, 0xae, 0x36, 0xa6, 0x8b, 0x07, 0x2f, - 0x4b, 0x3a, 0x79, 0x9b, 0x0d, 0x9b, 0xcc, 0x94, 0x64, 0x66, 0xd9, 0xfe, 0xb9, 0xfe, 0x27, 0xd2, - 0x49, 0x22, 0xdd, 0x22, 0xec, 0x6d, 0xde, 0xf7, 0x7d, 0xbf, 0xe4, 0xbd, 0x37, 0x03, 0xa3, 0x52, - 0xa6, 0x58, 0x04, 0x78, 0x87, 0x42, 0xe1, 0x3d, 0x72, 0xad, 0x72, 0x29, 0xd8, 0xa6, 0x92, 0x4a, - 0x92, 0x63, 0x2e, 0x45, 0xaa, 0xb9, 0x92, 0x55, 0x23, 0x8c, 0xe8, 0x5e, 0xf8, 0x26, 0x11, 0x69, - 0x81, 0x9d, 0xf3, 0x2a, 0x93, 0x32, 0x2b, 0x30, 0x30, 0xd5, 0x5a, 0x5f, 0x07, 0xb5, 0xaa, 0x34, - 0x57, 0x8d, 0x7b, 0xf2, 0xc7, 0x81, 0x61, 0xb8, 0x83, 0xc2, 0xee, 0x0f, 0x64, 0x08, 0x76, 0x9e, - 0x52, 0x6b, 0x6c, 0x4d, 0x06, 0xb1, 0x9d, 0xa7, 0xe4, 0x35, 0x40, 0x89, 0x75, 0x9d, 0x64, 0x78, - 0x95, 0xa7, 0xd4, 0x36, 0xfa, 0xa0, 0x55, 0x16, 0x29, 0x21, 0x70, 0x24, 0x92, 0x12, 0xa9, 0x63, - 0x0c, 0x73, 0x26, 0xcf, 0xa1, 0x67, 0x3a, 0xa1, 0x47, 0x46, 0x6c, 0x0a, 0x42, 0xa1, 0xcf, 0x2b, - 0x4c, 0x14, 0xa6, 0xb4, 0x37, 0xb6, 0x26, 0x4e, 0xdc, 0x95, 0xe4, 0x13, 0xb8, 0xb5, 0x4a, 0x94, - 0xae, 0xa9, 0x3b, 0xb6, 0x26, 0xc3, 0xe9, 0x1b, 0x76, 0x30, 0x1f, 0x7b, 0xd8, 0x23, 0x5b, 0x99, - 0x74, 0xdc, 0x52, 0x64, 0x06, 0x6e, 0xc2, 0x77, 0x06, 0xed, 0x1b, 0xfe, 0xed, 0xff, 0xf9, 0xaf, - 0xed, 0x62, 0x66, 0x26, 0xcb, 0x2e, 0xb7, 0x1b, 0x8c, 0x5b, 0x90, 0x9c, 0x83, 0x2b, 0xb5, 0xda, - 0x68, 0x45, 0x9f, 0x8e, 0x9d, 0x89, 0x37, 0x7d, 0xf7, 0x58, 0x0b, 0x4b, 0x93, 0x0e, 0x85, 0xaa, - 0xb6, 0x71, 0x8b, 0x8e, 0x7e, 0x82, 0xb7, 0x27, 0x13, 0x1f, 0x9c, 0x5b, 0xdc, 0xb6, 0xab, 0xdc, - 0x1d, 0xc9, 0x7b, 0xe8, 0xdd, 0x25, 0x85, 0x46, 0xb3, 0x46, 0x6f, 0xfa, 0x82, 0x35, 0x97, 0xc3, - 0xba, 0xcb, 0x61, 0xbf, 0x76, 0x6e, 0xdc, 0x84, 0x4e, 0xed, 0x8f, 0xd6, 0xc9, 0x0c, 0xdc, 0x66, - 0x58, 0x72, 0x0c, 0xde, 0xe2, 0xc7, 0x55, 0x14, 0x2f, 0xbf, 0xc4, 0xe1, 0x6a, 0xe5, 0x3f, 0x21, - 0xcf, 0x60, 0x70, 0xbe, 0xfc, 0x1e, 0x5d, 0x84, 0x97, 0xe1, 0xdc, 0xb7, 0x08, 0x80, 0xfb, 0x79, - 0xb6, 0xb8, 0x08, 0xe7, 0xbe, 0x4d, 0x3c, 0xe8, 0xaf, 0xbe, 0x2d, 0xa2, 0x28, 0x9c, 0xfb, 0xce, - 0xd9, 0x2d, 0xbc, 0xe4, 0xb2, 0x64, 0x02, 0xd5, 0x75, 0x91, 0xdf, 0x1f, 0xce, 0x75, 0xe6, 0x3f, - 0x1c, 0x2c, 0x5a, 0xff, 0x3e, 0xcd, 0x72, 0x75, 0xa3, 0xd7, 0x8c, 0xcb, 0x32, 0x68, 0xa9, 0xe0, - 0x1f, 0x15, 0xf0, 0x22, 0x47, 0xa1, 0x82, 0x4c, 0x66, 0xd5, 0x86, 0xef, 0xe9, 0xe6, 0x05, 0xae, - 0x5d, 0xf3, 0xd1, 0x0f, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x30, 0x90, 0x3d, 0xc6, 0xbe, 0x02, - 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go b/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go deleted file mode 100644 index def177112..000000000 --- a/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go +++ /dev/null @@ -1,379 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/eventhandler.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import any "github.com/golang/protobuf/ptypes/any" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type EventHandler_Action_Type int32 - -const ( - EventHandler_Action_START_WORKFLOW EventHandler_Action_Type = 0 - EventHandler_Action_COMPLETE_TASK EventHandler_Action_Type = 1 - EventHandler_Action_FAIL_TASK EventHandler_Action_Type = 2 -) - -var EventHandler_Action_Type_name = map[int32]string{ - 0: "START_WORKFLOW", - 1: "COMPLETE_TASK", - 2: "FAIL_TASK", -} -var EventHandler_Action_Type_value = map[string]int32{ - "START_WORKFLOW": 0, - "COMPLETE_TASK": 1, - "FAIL_TASK": 2, -} - -func (x EventHandler_Action_Type) String() string { - return proto.EnumName(EventHandler_Action_Type_name, int32(x)) -} -func (EventHandler_Action_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2, 0} -} - -type EventHandler struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Event string `protobuf:"bytes,2,opt,name=event,proto3" json:"event,omitempty"` - Condition string `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` - Actions []*EventHandler_Action `protobuf:"bytes,4,rep,name=actions,proto3" json:"actions,omitempty"` - Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EventHandler) Reset() { *m = EventHandler{} } -func (m *EventHandler) String() string { return proto.CompactTextString(m) } -func (*EventHandler) ProtoMessage() {} -func (*EventHandler) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0} -} -func (m *EventHandler) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EventHandler.Unmarshal(m, b) -} -func (m *EventHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EventHandler.Marshal(b, m, deterministic) -} -func (dst *EventHandler) XXX_Merge(src proto.Message) { - xxx_messageInfo_EventHandler.Merge(dst, src) -} -func (m *EventHandler) XXX_Size() int { - return xxx_messageInfo_EventHandler.Size(m) -} -func (m *EventHandler) XXX_DiscardUnknown() { - xxx_messageInfo_EventHandler.DiscardUnknown(m) -} - -var xxx_messageInfo_EventHandler proto.InternalMessageInfo - -func (m *EventHandler) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *EventHandler) GetEvent() string { - if m != nil { - return m.Event - } - return "" -} - -func (m *EventHandler) GetCondition() string { - if m != nil { - return m.Condition - } - return "" -} - -func (m *EventHandler) GetActions() []*EventHandler_Action { - if m != nil { - return m.Actions - } - return nil -} - -func (m *EventHandler) GetActive() bool { - if m != nil { - return m.Active - } - return false -} - -type EventHandler_StartWorkflow struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - InputMessage *any.Any `protobuf:"bytes,5,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EventHandler_StartWorkflow) Reset() { *m = EventHandler_StartWorkflow{} } -func (m *EventHandler_StartWorkflow) String() string { return proto.CompactTextString(m) } -func (*EventHandler_StartWorkflow) ProtoMessage() {} -func (*EventHandler_StartWorkflow) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 0} -} -func (m *EventHandler_StartWorkflow) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EventHandler_StartWorkflow.Unmarshal(m, b) -} -func (m *EventHandler_StartWorkflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EventHandler_StartWorkflow.Marshal(b, m, deterministic) -} -func (dst *EventHandler_StartWorkflow) XXX_Merge(src proto.Message) { - xxx_messageInfo_EventHandler_StartWorkflow.Merge(dst, src) -} -func (m *EventHandler_StartWorkflow) XXX_Size() int { - return xxx_messageInfo_EventHandler_StartWorkflow.Size(m) -} -func (m *EventHandler_StartWorkflow) XXX_DiscardUnknown() { - xxx_messageInfo_EventHandler_StartWorkflow.DiscardUnknown(m) -} - -var xxx_messageInfo_EventHandler_StartWorkflow proto.InternalMessageInfo - -func (m *EventHandler_StartWorkflow) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *EventHandler_StartWorkflow) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *EventHandler_StartWorkflow) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *EventHandler_StartWorkflow) GetInput() map[string]*_struct.Value { - if m != nil { - return m.Input - } - return nil -} - -func (m *EventHandler_StartWorkflow) GetInputMessage() *any.Any { - if m != nil { - return m.InputMessage - } - return nil -} - -type EventHandler_TaskDetails struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` - Output map[string]*_struct.Value `protobuf:"bytes,3,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - OutputMessage *any.Any `protobuf:"bytes,4,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EventHandler_TaskDetails) Reset() { *m = EventHandler_TaskDetails{} } -func (m *EventHandler_TaskDetails) String() string { return proto.CompactTextString(m) } -func (*EventHandler_TaskDetails) ProtoMessage() {} -func (*EventHandler_TaskDetails) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 1} -} -func (m *EventHandler_TaskDetails) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EventHandler_TaskDetails.Unmarshal(m, b) -} -func (m *EventHandler_TaskDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EventHandler_TaskDetails.Marshal(b, m, deterministic) -} -func (dst *EventHandler_TaskDetails) XXX_Merge(src proto.Message) { - xxx_messageInfo_EventHandler_TaskDetails.Merge(dst, src) -} -func (m *EventHandler_TaskDetails) XXX_Size() int { - return xxx_messageInfo_EventHandler_TaskDetails.Size(m) -} -func (m *EventHandler_TaskDetails) XXX_DiscardUnknown() { - xxx_messageInfo_EventHandler_TaskDetails.DiscardUnknown(m) -} - -var xxx_messageInfo_EventHandler_TaskDetails proto.InternalMessageInfo - -func (m *EventHandler_TaskDetails) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *EventHandler_TaskDetails) GetTaskRefName() string { - if m != nil { - return m.TaskRefName - } - return "" -} - -func (m *EventHandler_TaskDetails) GetOutput() map[string]*_struct.Value { - if m != nil { - return m.Output - } - return nil -} - -func (m *EventHandler_TaskDetails) GetOutputMessage() *any.Any { - if m != nil { - return m.OutputMessage - } - return nil -} - -type EventHandler_Action struct { - Action EventHandler_Action_Type `protobuf:"varint,1,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` - StartWorkflow *EventHandler_StartWorkflow `protobuf:"bytes,2,opt,name=start_workflow,json=startWorkflow,proto3" json:"start_workflow,omitempty"` - CompleteTask *EventHandler_TaskDetails `protobuf:"bytes,3,opt,name=complete_task,json=completeTask,proto3" json:"complete_task,omitempty"` - FailTask *EventHandler_TaskDetails `protobuf:"bytes,4,opt,name=fail_task,json=failTask,proto3" json:"fail_task,omitempty"` - ExpandInlineJson bool `protobuf:"varint,5,opt,name=expand_inline_json,json=expandInlineJson,proto3" json:"expand_inline_json,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EventHandler_Action) Reset() { *m = EventHandler_Action{} } -func (m *EventHandler_Action) String() string { return proto.CompactTextString(m) } -func (*EventHandler_Action) ProtoMessage() {} -func (*EventHandler_Action) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2} -} -func (m *EventHandler_Action) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EventHandler_Action.Unmarshal(m, b) -} -func (m *EventHandler_Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EventHandler_Action.Marshal(b, m, deterministic) -} -func (dst *EventHandler_Action) XXX_Merge(src proto.Message) { - xxx_messageInfo_EventHandler_Action.Merge(dst, src) -} -func (m *EventHandler_Action) XXX_Size() int { - return xxx_messageInfo_EventHandler_Action.Size(m) -} -func (m *EventHandler_Action) XXX_DiscardUnknown() { - xxx_messageInfo_EventHandler_Action.DiscardUnknown(m) -} - -var xxx_messageInfo_EventHandler_Action proto.InternalMessageInfo - -func (m *EventHandler_Action) GetAction() EventHandler_Action_Type { - if m != nil { - return m.Action - } - return EventHandler_Action_START_WORKFLOW -} - -func (m *EventHandler_Action) GetStartWorkflow() *EventHandler_StartWorkflow { - if m != nil { - return m.StartWorkflow - } - return nil -} - -func (m *EventHandler_Action) GetCompleteTask() *EventHandler_TaskDetails { - if m != nil { - return m.CompleteTask - } - return nil -} - -func (m *EventHandler_Action) GetFailTask() *EventHandler_TaskDetails { - if m != nil { - return m.FailTask - } - return nil -} - -func (m *EventHandler_Action) GetExpandInlineJson() bool { - if m != nil { - return m.ExpandInlineJson - } - return false -} - -func init() { - proto.RegisterType((*EventHandler)(nil), "conductor.proto.EventHandler") - proto.RegisterType((*EventHandler_StartWorkflow)(nil), "conductor.proto.EventHandler.StartWorkflow") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.StartWorkflow.InputEntry") - proto.RegisterType((*EventHandler_TaskDetails)(nil), "conductor.proto.EventHandler.TaskDetails") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.TaskDetails.OutputEntry") - proto.RegisterType((*EventHandler_Action)(nil), "conductor.proto.EventHandler.Action") - proto.RegisterEnum("conductor.proto.EventHandler_Action_Type", EventHandler_Action_Type_name, EventHandler_Action_Type_value) -} - -func init() { - proto.RegisterFile("model/eventhandler.proto", fileDescriptor_eventhandler_d75293086a3c9db8) -} - -var fileDescriptor_eventhandler_d75293086a3c9db8 = []byte{ - // 665 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, - 0x18, 0x77, 0x7f, 0x61, 0x4f, 0xe9, 0x9c, 0x17, 0x42, 0xea, 0x24, 0x91, 0x10, 0x4d, 0x30, 0x92, - 0x36, 0x99, 0xd1, 0x28, 0x1a, 0x93, 0xa1, 0x23, 0x4e, 0x06, 0xc3, 0x63, 0x91, 0xc4, 0x37, 0xcd, - 0xad, 0xbd, 0x8d, 0xba, 0xee, 0x6e, 0x69, 0xaf, 0x83, 0x7d, 0x1e, 0x3f, 0x81, 0x9f, 0xc0, 0xf7, - 0x7e, 0x2a, 0x73, 0x77, 0x2d, 0x14, 0x34, 0x28, 0x89, 0xef, 0x9e, 0xbf, 0xbf, 0xe7, 0x77, 0xbf, - 0xe7, 0x69, 0xc1, 0x9a, 0x72, 0x9f, 0x86, 0x0e, 0x9d, 0x53, 0x26, 0x4e, 0x09, 0xf3, 0x43, 0x1a, - 0xd9, 0xb3, 0x88, 0x0b, 0x8e, 0xee, 0x7a, 0x9c, 0xf9, 0x89, 0x27, 0x78, 0x1a, 0x68, 0xae, 0x8f, - 0x39, 0x1f, 0x87, 0xd4, 0x51, 0xde, 0x30, 0x19, 0x39, 0xb1, 0x88, 0x12, 0x4f, 0xa4, 0xd9, 0xfb, - 0xd7, 0xb3, 0x84, 0x2d, 0x74, 0x6a, 0xf3, 0x67, 0x0d, 0x56, 0x3a, 0x72, 0xc0, 0x07, 0x3d, 0x00, - 0x21, 0x28, 0x33, 0x32, 0xa5, 0x56, 0x61, 0xa3, 0xb0, 0x55, 0xc3, 0xca, 0x46, 0xab, 0x50, 0x51, - 0x24, 0xac, 0xa2, 0x0a, 0x6a, 0x07, 0xad, 0x43, 0x4d, 0xd2, 0x08, 0x44, 0xc0, 0x99, 0x55, 0x52, - 0x99, 0xcb, 0x00, 0x7a, 0x0b, 0x4b, 0xc4, 0x93, 0x56, 0x6c, 0x95, 0x37, 0x4a, 0x5b, 0x46, 0xeb, - 0x91, 0x7d, 0x8d, 0xb4, 0x9d, 0x9f, 0x6b, 0xb7, 0x55, 0x31, 0xce, 0x9a, 0xd0, 0x1a, 0x54, 0xa5, - 0x39, 0xa7, 0x56, 0x65, 0xa3, 0xb0, 0xb5, 0x8c, 0x53, 0xaf, 0xf9, 0xa3, 0x08, 0xe6, 0xb1, 0x20, - 0x91, 0x38, 0xe1, 0xd1, 0x64, 0x14, 0xf2, 0xb3, 0x3f, 0x32, 0xb6, 0x60, 0x69, 0x4e, 0xa3, 0x58, - 0x32, 0x93, 0x9c, 0x2b, 0x38, 0x73, 0xd1, 0x63, 0xa8, 0x7b, 0x3c, 0x8a, 0x68, 0x48, 0xe4, 0x1c, - 0x37, 0xf0, 0x53, 0xea, 0x66, 0x2e, 0xda, 0xf5, 0x51, 0x0f, 0x2a, 0x01, 0x9b, 0x25, 0x22, 0x25, - 0xff, 0xe2, 0x66, 0xf2, 0x57, 0x08, 0xd9, 0x5d, 0xd9, 0xd8, 0x61, 0x22, 0x5a, 0x60, 0x0d, 0x82, - 0x5e, 0x81, 0xa9, 0x0c, 0x77, 0x4a, 0xe3, 0x98, 0x8c, 0xf5, 0x9b, 0x8c, 0xd6, 0xaa, 0xad, 0x17, - 0x63, 0x67, 0x8b, 0xb1, 0xdb, 0x6c, 0x81, 0x57, 0x54, 0xe9, 0x81, 0xae, 0x6c, 0x1e, 0x01, 0x5c, - 0xe2, 0xa1, 0x06, 0x94, 0x26, 0x74, 0x91, 0x3e, 0x55, 0x9a, 0x68, 0x1b, 0x2a, 0x73, 0x12, 0x26, - 0x54, 0xbd, 0xd3, 0x68, 0xad, 0xfd, 0x06, 0xf9, 0x59, 0x66, 0xb1, 0x2e, 0xda, 0x29, 0xbe, 0x2c, - 0x34, 0xbf, 0x17, 0xc1, 0x18, 0x90, 0x78, 0xf2, 0x9e, 0x0a, 0x12, 0x84, 0x31, 0x7a, 0x08, 0xc6, - 0x59, 0x4a, 0x5d, 0xca, 0xa1, 0xb1, 0x21, 0x0b, 0x75, 0x7d, 0xb4, 0x09, 0xa6, 0x20, 0xf1, 0xc4, - 0x8d, 0xe8, 0xc8, 0x55, 0x4a, 0xeb, 0x33, 0x30, 0x64, 0x10, 0xd3, 0xd1, 0xa1, 0x14, 0xfc, 0x00, - 0xaa, 0x3c, 0x11, 0x52, 0xb0, 0x92, 0x12, 0xec, 0xf9, 0xcd, 0x82, 0xe5, 0xe6, 0xdb, 0x7d, 0xd5, - 0xa7, 0xf5, 0x4a, 0x41, 0xd0, 0x6b, 0xa8, 0x6b, 0xeb, 0x42, 0xb1, 0xf2, 0x0d, 0x8a, 0x99, 0xba, - 0x36, 0x93, 0xec, 0x13, 0x18, 0x39, 0xcc, 0xff, 0xa2, 0xd9, 0xb7, 0x12, 0x54, 0xf5, 0x85, 0xa2, - 0xb6, 0x3e, 0x4c, 0xce, 0x14, 0x62, 0xbd, 0xf5, 0xe4, 0x5f, 0xee, 0xda, 0x1e, 0x2c, 0x66, 0x14, - 0xa7, 0x8d, 0x08, 0x43, 0x3d, 0x96, 0x17, 0xe3, 0x66, 0x22, 0xa7, 0x44, 0x9e, 0xde, 0xe2, 0xca, - 0xb0, 0x19, 0x5f, 0xf9, 0x0a, 0x0e, 0xc1, 0xf4, 0xf8, 0x74, 0x16, 0x52, 0x41, 0x5d, 0xb9, 0x18, - 0x75, 0xd6, 0xc6, 0xdf, 0xd8, 0xe5, 0xf6, 0x80, 0x57, 0xb2, 0x7e, 0x19, 0x44, 0x7b, 0x50, 0x1b, - 0x91, 0x20, 0xd4, 0x58, 0xe5, 0xdb, 0x62, 0x2d, 0xcb, 0x5e, 0x85, 0xb3, 0x0d, 0x88, 0x9e, 0xcf, - 0x08, 0xf3, 0xdd, 0x80, 0x85, 0x01, 0xa3, 0xee, 0xd7, 0x98, 0xb3, 0xf4, 0x9b, 0x6e, 0xe8, 0x4c, - 0x57, 0x25, 0x3e, 0xc6, 0x9c, 0x6d, 0xbe, 0x81, 0xb2, 0x54, 0x0a, 0x21, 0xa8, 0x1f, 0x0f, 0xda, - 0x78, 0xe0, 0x9e, 0xf4, 0xf1, 0xfe, 0x5e, 0xaf, 0x7f, 0xd2, 0xb8, 0x83, 0xee, 0x81, 0xf9, 0xae, - 0x7f, 0x70, 0xd4, 0xeb, 0x0c, 0x3a, 0xee, 0xa0, 0x7d, 0xbc, 0xdf, 0x28, 0x20, 0x13, 0x6a, 0x7b, - 0xed, 0x6e, 0x4f, 0xbb, 0xc5, 0xdd, 0x00, 0x1e, 0x78, 0x7c, 0x6a, 0x33, 0x2a, 0x46, 0x61, 0x70, - 0x7e, 0x9d, 0xed, 0x6e, 0x3d, 0x4f, 0xf7, 0x68, 0xf8, 0x65, 0x67, 0x1c, 0x88, 0xd3, 0x64, 0x68, - 0x7b, 0x7c, 0xea, 0xa4, 0x3d, 0xce, 0x45, 0x8f, 0xe3, 0x85, 0x01, 0x65, 0xc2, 0x19, 0xf3, 0x71, - 0x34, 0xf3, 0x72, 0x71, 0xf5, 0x53, 0x1e, 0x56, 0x15, 0xe4, 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, - 0xff, 0x15, 0xb8, 0xa4, 0xd6, 0xa4, 0x05, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/polldata.pb.go b/polyglot-clients/gogrpc/conductor/model/polldata.pb.go deleted file mode 100644 index b2ba7ff6f..000000000 --- a/polyglot-clients/gogrpc/conductor/model/polldata.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/polldata.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type PollData struct { - QueueName string `protobuf:"bytes,1,opt,name=queue_name,json=queueName,proto3" json:"queue_name,omitempty"` - Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` - WorkerId string `protobuf:"bytes,3,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - LastPollTime int64 `protobuf:"varint,4,opt,name=last_poll_time,json=lastPollTime,proto3" json:"last_poll_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PollData) Reset() { *m = PollData{} } -func (m *PollData) String() string { return proto.CompactTextString(m) } -func (*PollData) ProtoMessage() {} -func (*PollData) Descriptor() ([]byte, []int) { - return fileDescriptor_polldata_17cab9e308fb8d52, []int{0} -} -func (m *PollData) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PollData.Unmarshal(m, b) -} -func (m *PollData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PollData.Marshal(b, m, deterministic) -} -func (dst *PollData) XXX_Merge(src proto.Message) { - xxx_messageInfo_PollData.Merge(dst, src) -} -func (m *PollData) XXX_Size() int { - return xxx_messageInfo_PollData.Size(m) -} -func (m *PollData) XXX_DiscardUnknown() { - xxx_messageInfo_PollData.DiscardUnknown(m) -} - -var xxx_messageInfo_PollData proto.InternalMessageInfo - -func (m *PollData) GetQueueName() string { - if m != nil { - return m.QueueName - } - return "" -} - -func (m *PollData) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -func (m *PollData) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *PollData) GetLastPollTime() int64 { - if m != nil { - return m.LastPollTime - } - return 0 -} - -func init() { - proto.RegisterType((*PollData)(nil), "conductor.proto.PollData") -} - -func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_17cab9e308fb8d52) } - -var fileDescriptor_polldata_17cab9e308fb8d52 = []byte{ - // 229 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x90, 0xc1, 0x4a, 0x03, 0x31, - 0x10, 0x86, 0x59, 0x2b, 0xa5, 0x3b, 0x88, 0x42, 0x10, 0x59, 0x28, 0x42, 0x11, 0x0f, 0x3d, 0x6d, - 0x0e, 0xde, 0x3c, 0x16, 0x2f, 0x5e, 0xa4, 0x14, 0x4f, 0x5e, 0x96, 0x6c, 0x32, 0xae, 0xc1, 0x4c, - 0x66, 0x8d, 0x13, 0xf4, 0x01, 0x7c, 0x70, 0xd9, 0xb4, 0x8a, 0x78, 0x9c, 0xef, 0xff, 0x99, 0xf9, - 0x18, 0x38, 0x27, 0x76, 0x18, 0xf4, 0xc8, 0x21, 0x38, 0x23, 0xa6, 0x1d, 0x13, 0x0b, 0xab, 0x33, - 0xcb, 0xd1, 0x65, 0x2b, 0x9c, 0xf6, 0xe0, 0xea, 0xab, 0x82, 0xc5, 0x96, 0x43, 0xb8, 0x33, 0x62, - 0xd4, 0x25, 0xc0, 0x5b, 0xc6, 0x8c, 0x5d, 0x34, 0x84, 0x4d, 0xb5, 0xaa, 0xd6, 0xf5, 0xae, 0x2e, - 0xe4, 0xc1, 0x10, 0xaa, 0x0b, 0x98, 0x3b, 0x26, 0xe3, 0x63, 0x73, 0x54, 0xa2, 0xc3, 0xa4, 0x96, - 0x50, 0x7f, 0x70, 0x7a, 0xc5, 0xd4, 0x79, 0xd7, 0xcc, 0x4a, 0xb4, 0xd8, 0x83, 0x7b, 0xa7, 0xae, - 0xe1, 0x34, 0x98, 0x77, 0xe9, 0x26, 0x91, 0x4e, 0x3c, 0x61, 0x73, 0xbc, 0xaa, 0xd6, 0xb3, 0xdd, - 0xc9, 0x44, 0xa7, 0xcb, 0x8f, 0x9e, 0x70, 0x83, 0xb0, 0xb4, 0x4c, 0x6d, 0x44, 0x79, 0x0e, 0xfe, - 0xb3, 0xfd, 0x67, 0xb9, 0x81, 0x1f, 0xc5, 0x6d, 0xff, 0x74, 0x3b, 0x78, 0x79, 0xc9, 0x7d, 0x6b, - 0x99, 0xf4, 0xa1, 0xaf, 0x7f, 0xfb, 0xda, 0x06, 0x8f, 0x51, 0xf4, 0xc0, 0x43, 0x1a, 0xed, 0x1f, - 0x5e, 0xbe, 0xd1, 0xcf, 0xcb, 0xba, 0x9b, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x37, 0x71, - 0xb0, 0x1d, 0x01, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go deleted file mode 100644 index 5268688d5..000000000 --- a/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go +++ /dev/null @@ -1,128 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/rerunworkflowrequest.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type RerunWorkflowRequest struct { - ReRunFromWorkflowId string `protobuf:"bytes,1,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` - WorkflowInput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=workflow_input,json=workflowInput,proto3" json:"workflow_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - ReRunFromTaskId string `protobuf:"bytes,3,opt,name=re_run_from_task_id,json=reRunFromTaskId,proto3" json:"re_run_from_task_id,omitempty"` - TaskInput map[string]*_struct.Value `protobuf:"bytes,4,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - CorrelationId string `protobuf:"bytes,5,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RerunWorkflowRequest) Reset() { *m = RerunWorkflowRequest{} } -func (m *RerunWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*RerunWorkflowRequest) ProtoMessage() {} -func (*RerunWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8, []int{0} -} -func (m *RerunWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RerunWorkflowRequest.Unmarshal(m, b) -} -func (m *RerunWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RerunWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *RerunWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RerunWorkflowRequest.Merge(dst, src) -} -func (m *RerunWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_RerunWorkflowRequest.Size(m) -} -func (m *RerunWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RerunWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RerunWorkflowRequest proto.InternalMessageInfo - -func (m *RerunWorkflowRequest) GetReRunFromWorkflowId() string { - if m != nil { - return m.ReRunFromWorkflowId - } - return "" -} - -func (m *RerunWorkflowRequest) GetWorkflowInput() map[string]*_struct.Value { - if m != nil { - return m.WorkflowInput - } - return nil -} - -func (m *RerunWorkflowRequest) GetReRunFromTaskId() string { - if m != nil { - return m.ReRunFromTaskId - } - return "" -} - -func (m *RerunWorkflowRequest) GetTaskInput() map[string]*_struct.Value { - if m != nil { - return m.TaskInput - } - return nil -} - -func (m *RerunWorkflowRequest) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func init() { - proto.RegisterType((*RerunWorkflowRequest)(nil), "conductor.proto.RerunWorkflowRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.TaskInputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.WorkflowInputEntry") -} - -func init() { - proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8) -} - -var fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8 = []byte{ - // 369 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4f, 0xef, 0xd2, 0x30, - 0x18, 0xc7, 0x33, 0xf8, 0x61, 0x42, 0x09, 0x60, 0x8a, 0x41, 0x82, 0x1e, 0x16, 0x13, 0x13, 0x0e, - 0xa4, 0x4b, 0x90, 0x03, 0xe1, 0x48, 0xa2, 0x09, 0x37, 0x33, 0x89, 0x1a, 0x2f, 0xcb, 0xd6, 0x75, - 0x73, 0xd9, 0xd6, 0x07, 0x9e, 0xb5, 0x22, 0xaf, 0xc0, 0xb7, 0x6d, 0xd6, 0x6d, 0x30, 0x91, 0x83, - 0x07, 0x6f, 0xeb, 0xb3, 0xef, 0x9f, 0xcf, 0x9e, 0x8e, 0xd8, 0x39, 0x84, 0x22, 0x73, 0x50, 0xa0, - 0x96, 0x67, 0xc0, 0x34, 0xca, 0xe0, 0x8c, 0xe2, 0xa4, 0x45, 0xa1, 0xd8, 0x11, 0x41, 0x01, 0x1d, - 0x73, 0x90, 0xa1, 0xe6, 0x0a, 0xb0, 0x1a, 0xcc, 0x5f, 0xc7, 0x00, 0x71, 0x26, 0x1c, 0x73, 0x0a, - 0x74, 0xe4, 0x14, 0x0a, 0x35, 0xaf, 0xe5, 0x6f, 0x7e, 0x3d, 0x91, 0x17, 0x6e, 0x99, 0xf6, 0xa5, - 0x4e, 0x73, 0xab, 0x34, 0xba, 0x26, 0x2f, 0x51, 0x78, 0xa8, 0xa5, 0x17, 0x21, 0xe4, 0x5e, 0x53, - 0xe6, 0x25, 0xe1, 0xcc, 0xb2, 0xad, 0x45, 0xdf, 0x9d, 0xa0, 0x70, 0xb5, 0xfc, 0x80, 0x90, 0x37, - 0xd6, 0x7d, 0x48, 0x3d, 0x32, 0xba, 0x29, 0xe5, 0x51, 0xab, 0x59, 0xc7, 0xee, 0x2e, 0x06, 0xab, - 0x0d, 0xbb, 0xc3, 0x62, 0x8f, 0x4a, 0xd9, 0x35, 0xa9, 0xb4, 0xbe, 0x97, 0x0a, 0x2f, 0xee, 0xf0, - 0xdc, 0x9e, 0xd1, 0x25, 0x99, 0xb4, 0xb1, 0x94, 0x5f, 0xa4, 0x25, 0x52, 0xd7, 0x20, 0x8d, 0xaf, - 0x48, 0x07, 0xbf, 0x48, 0xf7, 0x21, 0xfd, 0x44, 0x48, 0xa5, 0x30, 0x28, 0x4f, 0x06, 0x65, 0xfd, - 0x6f, 0x28, 0x26, 0xe1, 0x86, 0xd1, 0x57, 0xcd, 0x99, 0xbe, 0x25, 0x23, 0x0e, 0x88, 0x22, 0xf3, - 0x55, 0x02, 0xb2, 0x6c, 0xef, 0x99, 0xf6, 0x61, 0x6b, 0xba, 0x0f, 0xe7, 0x5f, 0x09, 0xfd, 0xfb, - 0x73, 0xe8, 0x73, 0xd2, 0x4d, 0xc5, 0xa5, 0x5e, 0x61, 0xf9, 0x48, 0x97, 0xa4, 0xf7, 0xc3, 0xcf, - 0xb4, 0x98, 0x75, 0x6c, 0x6b, 0x31, 0x58, 0x4d, 0x59, 0x75, 0x5f, 0xac, 0xb9, 0x2f, 0xf6, 0xb9, - 0x7c, 0xeb, 0x56, 0xa2, 0x6d, 0x67, 0x63, 0xcd, 0x0f, 0x64, 0xf4, 0x27, 0xdd, 0xff, 0x48, 0xdd, - 0x9d, 0xc8, 0x2b, 0x0e, 0x39, 0x93, 0x42, 0x45, 0x59, 0xf2, 0xf3, 0x7e, 0x49, 0xbb, 0xe9, 0xa3, - 0x2d, 0x7d, 0x0c, 0xbe, 0x6d, 0xe3, 0x44, 0x7d, 0xd7, 0x01, 0xe3, 0x90, 0x3b, 0xb5, 0xd7, 0xb9, - 0x7a, 0x1d, 0x9e, 0x25, 0x42, 0x2a, 0x27, 0x86, 0x18, 0x8f, 0xbc, 0x35, 0x37, 0xbf, 0x71, 0xf0, - 0xcc, 0x44, 0xbf, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xce, 0x3a, 0x9b, 0x51, 0xd6, 0x02, 0x00, - 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go deleted file mode 100644 index 9d1e094d8..000000000 --- a/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go +++ /dev/null @@ -1,119 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/skiptaskrequest.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import any "github.com/golang/protobuf/ptypes/any" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type SkipTaskRequest struct { - TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput,proto3" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - TaskInputMessage *any.Any `protobuf:"bytes,3,opt,name=task_input_message,json=taskInputMessage,proto3" json:"task_input_message,omitempty"` - TaskOutputMessage *any.Any `protobuf:"bytes,4,opt,name=task_output_message,json=taskOutputMessage,proto3" json:"task_output_message,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } -func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } -func (*SkipTaskRequest) ProtoMessage() {} -func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_skiptaskrequest_fb745ec89a45d156, []int{0} -} -func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) -} -func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) -} -func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SkipTaskRequest.Merge(dst, src) -} -func (m *SkipTaskRequest) XXX_Size() int { - return xxx_messageInfo_SkipTaskRequest.Size(m) -} -func (m *SkipTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo - -func (m *SkipTaskRequest) GetTaskInput() map[string]*_struct.Value { - if m != nil { - return m.TaskInput - } - return nil -} - -func (m *SkipTaskRequest) GetTaskOutput() map[string]*_struct.Value { - if m != nil { - return m.TaskOutput - } - return nil -} - -func (m *SkipTaskRequest) GetTaskInputMessage() *any.Any { - if m != nil { - return m.TaskInputMessage - } - return nil -} - -func (m *SkipTaskRequest) GetTaskOutputMessage() *any.Any { - if m != nil { - return m.TaskOutputMessage - } - return nil -} - -func init() { - proto.RegisterType((*SkipTaskRequest)(nil), "conductor.proto.SkipTaskRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskInputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskOutputEntry") -} - -func init() { - proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_fb745ec89a45d156) -} - -var fileDescriptor_skiptaskrequest_fb745ec89a45d156 = []byte{ - // 348 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xbd, 0x4e, 0xc3, 0x30, - 0x14, 0x85, 0x95, 0x06, 0x90, 0xea, 0x4a, 0xb4, 0x35, 0x08, 0x85, 0x94, 0xa1, 0x62, 0xea, 0x80, - 0x6c, 0x54, 0x16, 0xd4, 0x8d, 0x0a, 0x06, 0x06, 0xfe, 0x42, 0x61, 0x60, 0xa9, 0x12, 0xd7, 0x0d, - 0x51, 0x12, 0x3b, 0xc4, 0x36, 0x22, 0xcf, 0xcc, 0x4b, 0xa0, 0xd8, 0xfd, 0xc3, 0x48, 0x88, 0x81, - 0xcd, 0xbe, 0xf7, 0x9e, 0xef, 0x9e, 0x23, 0x1b, 0xf4, 0x72, 0x3e, 0xa3, 0x19, 0x16, 0x69, 0x52, - 0xc8, 0x50, 0xa4, 0x25, 0x7d, 0x53, 0x54, 0x48, 0x54, 0x94, 0x5c, 0x72, 0xd8, 0x26, 0x9c, 0xcd, - 0x14, 0x91, 0xbc, 0x34, 0x05, 0xff, 0x28, 0xe6, 0x3c, 0xce, 0x28, 0xd6, 0xb7, 0x48, 0xcd, 0xb1, - 0x90, 0xa5, 0x22, 0x8b, 0x71, 0xff, 0xd0, 0xee, 0x86, 0xac, 0x32, 0xad, 0xe3, 0x4f, 0x17, 0xb4, - 0x1f, 0xd3, 0xa4, 0x98, 0x84, 0x22, 0x0d, 0xcc, 0x0e, 0x78, 0x0b, 0x40, 0xbd, 0x72, 0x9a, 0xb0, - 0x42, 0x49, 0xcf, 0xe9, 0xbb, 0x83, 0xd6, 0x10, 0x23, 0x6b, 0x25, 0xb2, 0x54, 0xa8, 0x3e, 0x5f, - 0xd7, 0x8a, 0x2b, 0x26, 0xcb, 0x2a, 0x68, 0xca, 0xe5, 0x1d, 0x3e, 0x80, 0x96, 0xe6, 0x71, 0x25, - 0x6b, 0x60, 0x43, 0x03, 0x4f, 0xff, 0x04, 0xbc, 0xd3, 0x12, 0x43, 0xd4, 0xa6, 0x4c, 0x01, 0x8e, - 0x01, 0x5c, 0x5b, 0x9c, 0xe6, 0x54, 0x88, 0x30, 0xa6, 0x9e, 0xdb, 0x77, 0x06, 0xad, 0xe1, 0x3e, - 0x32, 0x71, 0xd1, 0x32, 0x2e, 0xba, 0x60, 0x55, 0xd0, 0x59, 0xf9, 0xb9, 0x31, 0xd3, 0xf0, 0x12, - 0xec, 0x6d, 0xd8, 0x5a, 0x41, 0xb6, 0x7e, 0x81, 0x74, 0xd7, 0x16, 0x16, 0x14, 0x7f, 0x02, 0x76, - 0xbf, 0x27, 0x87, 0x1d, 0xe0, 0xa6, 0xb4, 0xf2, 0x9c, 0xbe, 0x33, 0x68, 0x06, 0xf5, 0x11, 0x9e, - 0x80, 0xed, 0xf7, 0x30, 0x53, 0xd4, 0x6b, 0x68, 0xf6, 0xc1, 0x0f, 0xf6, 0x73, 0xdd, 0x0d, 0xcc, - 0xd0, 0xa8, 0x71, 0xee, 0xf8, 0x4f, 0xa0, 0x6d, 0xc5, 0xff, 0x0f, 0xec, 0x38, 0x03, 0x3d, 0xc2, - 0x73, 0xc4, 0xa8, 0x9c, 0x67, 0xc9, 0x87, 0xfd, 0x02, 0xe3, 0xae, 0xf5, 0x04, 0xf7, 0xd1, 0xcb, - 0x28, 0x4e, 0xe4, 0xab, 0x8a, 0x10, 0xe1, 0x39, 0x5e, 0xc8, 0xf0, 0x4a, 0x86, 0x49, 0x96, 0x50, - 0x26, 0x71, 0xcc, 0xe3, 0xb2, 0x20, 0x1b, 0x75, 0xfd, 0x77, 0xa3, 0x1d, 0x4d, 0x3d, 0xfb, 0x0a, - 0x00, 0x00, 0xff, 0xff, 0xcb, 0xb6, 0xee, 0xfd, 0xcb, 0x02, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go deleted file mode 100644 index 76ef27809..000000000 --- a/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go +++ /dev/null @@ -1,137 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/startworkflowrequest.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type StartWorkflowRequest struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - TaskToDomain map[string]string `protobuf:"bytes,5,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - WorkflowDef *WorkflowDef `protobuf:"bytes,6,opt,name=workflow_def,json=workflowDef,proto3" json:"workflow_def,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StartWorkflowRequest) Reset() { *m = StartWorkflowRequest{} } -func (m *StartWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*StartWorkflowRequest) ProtoMessage() {} -func (*StartWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_startworkflowrequest_3ab5c2434a152277, []int{0} -} -func (m *StartWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StartWorkflowRequest.Unmarshal(m, b) -} -func (m *StartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StartWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *StartWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_StartWorkflowRequest.Merge(dst, src) -} -func (m *StartWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_StartWorkflowRequest.Size(m) -} -func (m *StartWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_StartWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_StartWorkflowRequest proto.InternalMessageInfo - -func (m *StartWorkflowRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *StartWorkflowRequest) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *StartWorkflowRequest) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *StartWorkflowRequest) GetInput() map[string]*_struct.Value { - if m != nil { - return m.Input - } - return nil -} - -func (m *StartWorkflowRequest) GetTaskToDomain() map[string]string { - if m != nil { - return m.TaskToDomain - } - return nil -} - -func (m *StartWorkflowRequest) GetWorkflowDef() *WorkflowDef { - if m != nil { - return m.WorkflowDef - } - return nil -} - -func init() { - proto.RegisterType((*StartWorkflowRequest)(nil), "conductor.proto.StartWorkflowRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.StartWorkflowRequest.InputEntry") - proto.RegisterMapType((map[string]string)(nil), "conductor.proto.StartWorkflowRequest.TaskToDomainEntry") -} - -func init() { - proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_3ab5c2434a152277) -} - -var fileDescriptor_startworkflowrequest_3ab5c2434a152277 = []byte{ - // 396 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x51, 0xab, 0xd3, 0x30, - 0x14, 0x80, 0xe9, 0xed, 0xed, 0x95, 0x9b, 0x5e, 0xaf, 0x1a, 0x2e, 0xd7, 0x32, 0xf7, 0x50, 0x04, - 0xa1, 0x0f, 0x92, 0xca, 0x7c, 0x50, 0xf6, 0x32, 0x18, 0x53, 0xd8, 0xdb, 0xa8, 0x43, 0x41, 0x90, - 0xd2, 0xa6, 0x69, 0x0d, 0x6d, 0x73, 0xb6, 0x34, 0xdd, 0xdc, 0x1f, 0xf6, 0x77, 0x48, 0xd3, 0xd6, - 0x95, 0x6d, 0x0f, 0xf7, 0x2d, 0xe7, 0x24, 0xdf, 0x97, 0x93, 0x73, 0x82, 0xdc, 0x12, 0x12, 0x56, - 0xf8, 0x95, 0x8a, 0xa4, 0xda, 0x83, 0xcc, 0xd3, 0x02, 0xf6, 0x92, 0x6d, 0x6b, 0x56, 0x29, 0xb2, - 0x91, 0xa0, 0x00, 0xbf, 0xa0, 0x20, 0x92, 0x9a, 0x2a, 0x90, 0x6d, 0x62, 0xf4, 0xba, 0x45, 0xfa, - 0xd3, 0x09, 0x4b, 0xbb, 0x8d, 0x71, 0x06, 0x90, 0x15, 0xcc, 0xd7, 0x51, 0x5c, 0xa7, 0x7e, 0xa5, - 0x64, 0x4d, 0x3b, 0xcf, 0xdb, 0xbf, 0x26, 0x7a, 0xf8, 0xd6, 0x5c, 0xf3, 0xa3, 0x03, 0x83, 0xf6, - 0x1a, 0x8c, 0xd1, 0xb5, 0x88, 0x4a, 0xe6, 0x18, 0xae, 0xe1, 0xdd, 0x06, 0x7a, 0x8d, 0x1d, 0xf4, - 0x6c, 0xc7, 0x64, 0xc5, 0x41, 0x38, 0x57, 0xae, 0xe1, 0x59, 0x41, 0x1f, 0xe2, 0x77, 0xe8, 0x9e, - 0x82, 0x94, 0xac, 0x88, 0x14, 0x07, 0x11, 0xf2, 0xc4, 0x31, 0x35, 0xf7, 0x7c, 0x90, 0x5d, 0x26, - 0xf8, 0x2b, 0xb2, 0xb8, 0xd8, 0xd4, 0xca, 0xb9, 0x76, 0x4d, 0xcf, 0x9e, 0x7c, 0x20, 0x27, 0xaf, - 0x20, 0x97, 0x4a, 0x21, 0xcb, 0x06, 0xf9, 0x22, 0x94, 0x3c, 0x04, 0x2d, 0x8e, 0x7f, 0xa1, 0x7b, - 0x15, 0x55, 0x79, 0xa8, 0x20, 0x4c, 0xa0, 0x8c, 0xb8, 0x70, 0x2c, 0x2d, 0xfc, 0xf4, 0x34, 0xe1, - 0x3a, 0xaa, 0xf2, 0x35, 0x2c, 0x34, 0xd9, 0x7a, 0xef, 0xd4, 0x20, 0x85, 0x67, 0xe8, 0xae, 0xef, - 0x63, 0x98, 0xb0, 0xd4, 0xb9, 0x71, 0x0d, 0xcf, 0x9e, 0x8c, 0xcf, 0xe4, 0xbd, 0x77, 0xc1, 0xd2, - 0xc0, 0xde, 0x1f, 0x83, 0xd1, 0x0a, 0xa1, 0x63, 0xd1, 0xf8, 0x25, 0x32, 0x73, 0x76, 0xe8, 0x3a, - 0xd9, 0x2c, 0xf1, 0x7b, 0x64, 0xed, 0xa2, 0xa2, 0x66, 0xba, 0x8d, 0xf6, 0xe4, 0x91, 0xb4, 0x33, - 0x22, 0xfd, 0x8c, 0xc8, 0xf7, 0x66, 0x37, 0x68, 0x0f, 0x4d, 0xaf, 0x3e, 0x1b, 0xa3, 0x19, 0x7a, - 0x75, 0x56, 0xf5, 0x05, 0xf1, 0xc3, 0x50, 0x7c, 0x3b, 0x10, 0xcc, 0xb7, 0xe8, 0x0d, 0x85, 0x92, - 0x08, 0xa6, 0xd2, 0x82, 0xff, 0x39, 0x7d, 0xca, 0xfc, 0xf1, 0x52, 0xa3, 0x56, 0xf1, 0xcf, 0x69, - 0xc6, 0xd5, 0xef, 0x3a, 0x26, 0x14, 0x4a, 0xbf, 0x63, 0xfd, 0xff, 0xac, 0x4f, 0x0b, 0xce, 0x84, - 0xf2, 0x33, 0xc8, 0xe4, 0x86, 0x0e, 0xf2, 0xfa, 0x2f, 0xc6, 0x37, 0x5a, 0xfd, 0xf1, 0x5f, 0x00, - 0x00, 0x00, 0xff, 0xff, 0x26, 0x3f, 0xa7, 0x2d, 0xce, 0x02, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go b/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go deleted file mode 100644 index 3a331c4ff..000000000 --- a/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/subworkflowparams.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type SubWorkflowParams struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SubWorkflowParams) Reset() { *m = SubWorkflowParams{} } -func (m *SubWorkflowParams) String() string { return proto.CompactTextString(m) } -func (*SubWorkflowParams) ProtoMessage() {} -func (*SubWorkflowParams) Descriptor() ([]byte, []int) { - return fileDescriptor_subworkflowparams_247aeccdfb62062e, []int{0} -} -func (m *SubWorkflowParams) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SubWorkflowParams.Unmarshal(m, b) -} -func (m *SubWorkflowParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SubWorkflowParams.Marshal(b, m, deterministic) -} -func (dst *SubWorkflowParams) XXX_Merge(src proto.Message) { - xxx_messageInfo_SubWorkflowParams.Merge(dst, src) -} -func (m *SubWorkflowParams) XXX_Size() int { - return xxx_messageInfo_SubWorkflowParams.Size(m) -} -func (m *SubWorkflowParams) XXX_DiscardUnknown() { - xxx_messageInfo_SubWorkflowParams.DiscardUnknown(m) -} - -var xxx_messageInfo_SubWorkflowParams proto.InternalMessageInfo - -func (m *SubWorkflowParams) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *SubWorkflowParams) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func init() { - proto.RegisterType((*SubWorkflowParams)(nil), "conductor.proto.SubWorkflowParams") -} - -func init() { - proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_247aeccdfb62062e) -} - -var fileDescriptor_subworkflowparams_247aeccdfb62062e = []byte{ - // 183 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcd, 0xcd, 0x4f, 0x49, - 0xcd, 0xd1, 0x2f, 0x2e, 0x4d, 0x2a, 0xcf, 0x2f, 0xca, 0x4e, 0xcb, 0xc9, 0x2f, 0x2f, 0x48, 0x2c, - 0x4a, 0xcc, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, - 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x28, 0x39, 0x72, 0x09, 0x06, 0x97, 0x26, 0x85, 0x43, 0xd5, - 0x06, 0x80, 0xd5, 0x0a, 0x09, 0x71, 0xb1, 0xe4, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a, - 0x70, 0x06, 0x81, 0xd9, 0x42, 0x12, 0x5c, 0xec, 0x65, 0xa9, 0x45, 0xc5, 0x99, 0xf9, 0x79, 0x12, - 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x30, 0xae, 0x53, 0x1e, 0x97, 0x74, 0x72, 0x7e, 0xae, 0x5e, - 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x85, 0x1e, 0x9a, 0x0d, 0x4e, 0xc2, 0x18, 0xe6, 0x07, 0x24, - 0x45, 0x59, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0x35, 0xea, - 0xc3, 0x35, 0xea, 0x27, 0xe7, 0x64, 0xa6, 0xe6, 0x95, 0xe8, 0xa7, 0xe7, 0xa7, 0x17, 0x15, 0x24, - 0x23, 0x89, 0x83, 0xfd, 0x96, 0xc4, 0x06, 0x36, 0xd7, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x52, - 0x9c, 0xc5, 0x01, 0xeb, 0x00, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/task.pb.go b/polyglot-clients/gogrpc/conductor/model/task.pb.go deleted file mode 100644 index 5dc07ae9d..000000000 --- a/polyglot-clients/gogrpc/conductor/model/task.pb.go +++ /dev/null @@ -1,422 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/task.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import any "github.com/golang/protobuf/ptypes/any" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type Task_Status int32 - -const ( - Task_IN_PROGRESS Task_Status = 0 - Task_CANCELED Task_Status = 1 - Task_FAILED Task_Status = 2 - Task_FAILED_WITH_TERMINAL_ERROR Task_Status = 3 - Task_COMPLETED Task_Status = 4 - Task_COMPLETED_WITH_ERRORS Task_Status = 5 - Task_SCHEDULED Task_Status = 6 - Task_TIMED_OUT Task_Status = 7 - Task_READY_FOR_RERUN Task_Status = 8 - Task_SKIPPED Task_Status = 9 -) - -var Task_Status_name = map[int32]string{ - 0: "IN_PROGRESS", - 1: "CANCELED", - 2: "FAILED", - 3: "FAILED_WITH_TERMINAL_ERROR", - 4: "COMPLETED", - 5: "COMPLETED_WITH_ERRORS", - 6: "SCHEDULED", - 7: "TIMED_OUT", - 8: "READY_FOR_RERUN", - 9: "SKIPPED", -} -var Task_Status_value = map[string]int32{ - "IN_PROGRESS": 0, - "CANCELED": 1, - "FAILED": 2, - "FAILED_WITH_TERMINAL_ERROR": 3, - "COMPLETED": 4, - "COMPLETED_WITH_ERRORS": 5, - "SCHEDULED": 6, - "TIMED_OUT": 7, - "READY_FOR_RERUN": 8, - "SKIPPED": 9, -} - -func (x Task_Status) String() string { - return proto.EnumName(Task_Status_name, int32(x)) -} -func (Task_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_task_0f54bf88f0e3aec0, []int{0, 0} -} - -type Task struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - Status Task_Status `protobuf:"varint,2,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` - InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData,proto3" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName,proto3" json:"reference_task_name,omitempty"` - RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` - Seq int32 `protobuf:"varint,6,opt,name=seq,proto3" json:"seq,omitempty"` - CorrelationId string `protobuf:"bytes,7,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - PollCount int32 `protobuf:"varint,8,opt,name=poll_count,json=pollCount,proto3" json:"poll_count,omitempty"` - TaskDefName string `protobuf:"bytes,9,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` - ScheduledTime int64 `protobuf:"varint,10,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` - StartTime int64 `protobuf:"varint,11,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` - EndTime int64 `protobuf:"varint,12,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - UpdateTime int64 `protobuf:"varint,13,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` - StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds,proto3" json:"start_delay_in_seconds,omitempty"` - RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId,proto3" json:"retried_task_id,omitempty"` - Retried bool `protobuf:"varint,16,opt,name=retried,proto3" json:"retried,omitempty"` - Executed bool `protobuf:"varint,17,opt,name=executed,proto3" json:"executed,omitempty"` - CallbackFromWorker bool `protobuf:"varint,18,opt,name=callback_from_worker,json=callbackFromWorker,proto3" json:"callback_from_worker,omitempty"` - ResponseTimeoutSeconds int32 `protobuf:"varint,19,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` - WorkflowInstanceId string `protobuf:"bytes,20,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` - WorkflowType string `protobuf:"bytes,21,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` - TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,23,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` - CallbackAfterSeconds int64 `protobuf:"varint,24,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` - WorkerId string `protobuf:"bytes,25,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - OutputData map[string]*_struct.Value `protobuf:"bytes,26,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - WorkflowTask *WorkflowTask `protobuf:"bytes,27,opt,name=workflow_task,json=workflowTask,proto3" json:"workflow_task,omitempty"` - Domain string `protobuf:"bytes,28,opt,name=domain,proto3" json:"domain,omitempty"` - InputMessage *any.Any `protobuf:"bytes,29,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` - OutputMessage *any.Any `protobuf:"bytes,30,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` - RateLimitPerSecond int32 `protobuf:"varint,31,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Task) Reset() { *m = Task{} } -func (m *Task) String() string { return proto.CompactTextString(m) } -func (*Task) ProtoMessage() {} -func (*Task) Descriptor() ([]byte, []int) { - return fileDescriptor_task_0f54bf88f0e3aec0, []int{0} -} -func (m *Task) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Task.Unmarshal(m, b) -} -func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Task.Marshal(b, m, deterministic) -} -func (dst *Task) XXX_Merge(src proto.Message) { - xxx_messageInfo_Task.Merge(dst, src) -} -func (m *Task) XXX_Size() int { - return xxx_messageInfo_Task.Size(m) -} -func (m *Task) XXX_DiscardUnknown() { - xxx_messageInfo_Task.DiscardUnknown(m) -} - -var xxx_messageInfo_Task proto.InternalMessageInfo - -func (m *Task) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *Task) GetStatus() Task_Status { - if m != nil { - return m.Status - } - return Task_IN_PROGRESS -} - -func (m *Task) GetInputData() map[string]*_struct.Value { - if m != nil { - return m.InputData - } - return nil -} - -func (m *Task) GetReferenceTaskName() string { - if m != nil { - return m.ReferenceTaskName - } - return "" -} - -func (m *Task) GetRetryCount() int32 { - if m != nil { - return m.RetryCount - } - return 0 -} - -func (m *Task) GetSeq() int32 { - if m != nil { - return m.Seq - } - return 0 -} - -func (m *Task) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *Task) GetPollCount() int32 { - if m != nil { - return m.PollCount - } - return 0 -} - -func (m *Task) GetTaskDefName() string { - if m != nil { - return m.TaskDefName - } - return "" -} - -func (m *Task) GetScheduledTime() int64 { - if m != nil { - return m.ScheduledTime - } - return 0 -} - -func (m *Task) GetStartTime() int64 { - if m != nil { - return m.StartTime - } - return 0 -} - -func (m *Task) GetEndTime() int64 { - if m != nil { - return m.EndTime - } - return 0 -} - -func (m *Task) GetUpdateTime() int64 { - if m != nil { - return m.UpdateTime - } - return 0 -} - -func (m *Task) GetStartDelayInSeconds() int32 { - if m != nil { - return m.StartDelayInSeconds - } - return 0 -} - -func (m *Task) GetRetriedTaskId() string { - if m != nil { - return m.RetriedTaskId - } - return "" -} - -func (m *Task) GetRetried() bool { - if m != nil { - return m.Retried - } - return false -} - -func (m *Task) GetExecuted() bool { - if m != nil { - return m.Executed - } - return false -} - -func (m *Task) GetCallbackFromWorker() bool { - if m != nil { - return m.CallbackFromWorker - } - return false -} - -func (m *Task) GetResponseTimeoutSeconds() int32 { - if m != nil { - return m.ResponseTimeoutSeconds - } - return 0 -} - -func (m *Task) GetWorkflowInstanceId() string { - if m != nil { - return m.WorkflowInstanceId - } - return "" -} - -func (m *Task) GetWorkflowType() string { - if m != nil { - return m.WorkflowType - } - return "" -} - -func (m *Task) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *Task) GetReasonForIncompletion() string { - if m != nil { - return m.ReasonForIncompletion - } - return "" -} - -func (m *Task) GetCallbackAfterSeconds() int64 { - if m != nil { - return m.CallbackAfterSeconds - } - return 0 -} - -func (m *Task) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *Task) GetOutputData() map[string]*_struct.Value { - if m != nil { - return m.OutputData - } - return nil -} - -func (m *Task) GetWorkflowTask() *WorkflowTask { - if m != nil { - return m.WorkflowTask - } - return nil -} - -func (m *Task) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -func (m *Task) GetInputMessage() *any.Any { - if m != nil { - return m.InputMessage - } - return nil -} - -func (m *Task) GetOutputMessage() *any.Any { - if m != nil { - return m.OutputMessage - } - return nil -} - -func (m *Task) GetRateLimitPerSecond() int32 { - if m != nil { - return m.RateLimitPerSecond - } - return 0 -} - -func init() { - proto.RegisterType((*Task)(nil), "conductor.proto.Task") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.InputDataEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.OutputDataEntry") - proto.RegisterEnum("conductor.proto.Task_Status", Task_Status_name, Task_Status_value) -} - -func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_0f54bf88f0e3aec0) } - -var fileDescriptor_task_0f54bf88f0e3aec0 = []byte{ - // 1004 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x73, 0xda, 0x46, - 0x14, 0x2d, 0xb1, 0xcd, 0xc7, 0xc5, 0x80, 0xb2, 0xb6, 0xf1, 0x1a, 0xdb, 0x31, 0xe3, 0xd6, 0x1d, - 0x1e, 0x3a, 0x90, 0x3a, 0x99, 0x4e, 0x9a, 0x3e, 0x61, 0x90, 0x1b, 0x4d, 0x6d, 0xc3, 0x08, 0x5c, - 0x4f, 0xfb, 0xb2, 0xb3, 0x96, 0x16, 0xa2, 0x41, 0xd2, 0xd2, 0xd5, 0xaa, 0x09, 0xbf, 0xa7, 0xbf, - 0xa3, 0xff, 0xad, 0xb3, 0xbb, 0x48, 0xa1, 0x4e, 0xa6, 0x4f, 0x7d, 0xdb, 0x3d, 0xe7, 0xdc, 0xcb, - 0xbd, 0x47, 0x7b, 0x2f, 0x60, 0x45, 0xdc, 0x67, 0x61, 0x4f, 0xd2, 0x64, 0xd1, 0x5d, 0x0a, 0x2e, - 0x39, 0x6a, 0x78, 0x3c, 0xf6, 0x53, 0x4f, 0x72, 0x61, 0x80, 0x16, 0x36, 0x92, 0x0f, 0x5c, 0x2c, - 0x66, 0x21, 0xff, 0xf0, 0x49, 0xda, 0x3a, 0x99, 0x73, 0x3e, 0x0f, 0x59, 0x4f, 0xdf, 0x1e, 0xd3, - 0x59, 0x2f, 0x91, 0x22, 0xf5, 0xe4, 0x9a, 0x3d, 0x7a, 0xca, 0xd2, 0x78, 0x65, 0xa8, 0xf3, 0xbf, - 0x6a, 0xb0, 0x3d, 0xa5, 0xc9, 0x02, 0x1d, 0x43, 0x45, 0xe5, 0x23, 0x72, 0xb5, 0x64, 0xb8, 0xd0, - 0x2e, 0x74, 0x2a, 0x6e, 0x59, 0x01, 0xd3, 0xd5, 0x92, 0xa1, 0xd7, 0x50, 0x4c, 0x24, 0x95, 0x69, - 0x82, 0x9f, 0xb5, 0x0b, 0x9d, 0xfa, 0xe5, 0x49, 0xf7, 0x49, 0x69, 0x5d, 0x95, 0xa3, 0x3b, 0xd1, - 0x1a, 0x77, 0xad, 0x45, 0x03, 0x80, 0x20, 0x5e, 0xa6, 0x92, 0xf8, 0x54, 0x52, 0xbc, 0xd5, 0xde, - 0xea, 0x54, 0x2f, 0xbf, 0xf9, 0x72, 0xa4, 0xa3, 0x74, 0x43, 0x2a, 0xa9, 0x1d, 0x4b, 0xb1, 0x72, - 0x2b, 0x41, 0x76, 0x47, 0x5d, 0xd8, 0x13, 0x6c, 0xc6, 0x04, 0x8b, 0x3d, 0x46, 0x74, 0x85, 0x31, - 0x8d, 0x18, 0xde, 0xd6, 0x15, 0x3e, 0xcf, 0x29, 0x95, 0xe5, 0x8e, 0x46, 0x0c, 0x9d, 0x41, 0x55, - 0x30, 0x29, 0x56, 0xc4, 0xe3, 0x69, 0x2c, 0xf1, 0x4e, 0xbb, 0xd0, 0xd9, 0x71, 0x41, 0x43, 0x03, - 0x85, 0x20, 0x0b, 0xb6, 0x12, 0xf6, 0x07, 0x2e, 0x6a, 0x42, 0x1d, 0xd1, 0x05, 0xd4, 0x3d, 0x2e, - 0x04, 0x0b, 0xa9, 0x0c, 0x78, 0x4c, 0x02, 0x1f, 0x97, 0x74, 0xf6, 0xda, 0x06, 0xea, 0xf8, 0xe8, - 0x14, 0x60, 0xc9, 0xc3, 0x70, 0x9d, 0xb8, 0xac, 0xe3, 0x2b, 0x0a, 0x31, 0x79, 0xcf, 0xa1, 0xa6, - 0xcb, 0xf3, 0xd9, 0xcc, 0x94, 0x58, 0xd1, 0x49, 0xaa, 0x0a, 0x1c, 0xb2, 0x99, 0x2e, 0xee, 0x02, - 0xea, 0x89, 0xf7, 0x9e, 0xf9, 0x69, 0xc8, 0x7c, 0x22, 0x83, 0x88, 0x61, 0x68, 0x17, 0x3a, 0x5b, - 0x6e, 0x2d, 0x47, 0xa7, 0x41, 0xc4, 0xd4, 0x2f, 0x25, 0x92, 0x0a, 0x69, 0x24, 0x55, 0x2d, 0xa9, - 0x68, 0x44, 0xd3, 0x47, 0x50, 0x66, 0xf1, 0x3a, 0x7e, 0x57, 0x93, 0x25, 0x16, 0x9b, 0xc8, 0x33, - 0xa8, 0xa6, 0x4b, 0x9f, 0x4a, 0x66, 0xd8, 0x9a, 0x66, 0xc1, 0x40, 0x5a, 0xf0, 0x0a, 0x9a, 0x26, - 0xb5, 0xcf, 0x42, 0xba, 0x22, 0x41, 0x4c, 0x12, 0xa6, 0xbe, 0x48, 0x82, 0xeb, 0xba, 0xa1, 0x3d, - 0xcd, 0x0e, 0x15, 0xe9, 0xc4, 0x13, 0x43, 0xa1, 0x6f, 0xa1, 0xa1, 0x0c, 0x0c, 0x54, 0xd1, 0xaa, - 0xc5, 0xc0, 0xc7, 0x0d, 0xe3, 0xd0, 0x1a, 0x56, 0xee, 0x3b, 0x3e, 0xc2, 0x50, 0x5a, 0x03, 0xd8, - 0x6a, 0x17, 0x3a, 0x65, 0x37, 0xbb, 0xa2, 0x16, 0x94, 0xd9, 0x47, 0xe6, 0xa5, 0x92, 0xf9, 0xf8, - 0xb9, 0xa6, 0xf2, 0x3b, 0x7a, 0x09, 0xfb, 0x1e, 0x0d, 0xc3, 0x47, 0xea, 0x2d, 0xc8, 0x4c, 0xf0, - 0x88, 0xa8, 0xf7, 0xcd, 0x04, 0x46, 0x5a, 0x87, 0x32, 0xee, 0x5a, 0xf0, 0xe8, 0x41, 0x33, 0xe8, - 0x0d, 0x60, 0xc1, 0x92, 0x25, 0x8f, 0x13, 0xd3, 0x27, 0x4f, 0x65, 0xde, 0xc6, 0x9e, 0x6e, 0xa3, - 0x99, 0xf1, 0x53, 0x43, 0x67, 0x9d, 0xbc, 0x84, 0xfd, 0x6c, 0x7a, 0x48, 0x10, 0x27, 0x92, 0xaa, - 0x57, 0x15, 0xf8, 0x78, 0x5f, 0xb7, 0x83, 0x32, 0xce, 0x59, 0x53, 0x8e, 0x8f, 0xbe, 0x86, 0x5a, - 0x1e, 0xa1, 0x67, 0xe3, 0x40, 0x4b, 0x77, 0x33, 0x50, 0xcf, 0xc7, 0x21, 0x94, 0x32, 0x63, 0x9a, - 0x9a, 0x2e, 0x4a, 0xe3, 0xc8, 0x0f, 0x70, 0x28, 0x18, 0x4d, 0x78, 0x4c, 0x66, 0x5c, 0x90, 0x20, - 0xf6, 0x78, 0xb4, 0x0c, 0x99, 0x7a, 0x50, 0xf8, 0x50, 0x0b, 0x0f, 0x0c, 0x7d, 0xcd, 0x85, 0xb3, - 0x41, 0xa2, 0xd7, 0xd0, 0xcc, 0x3d, 0xa1, 0x33, 0xc9, 0x44, 0xde, 0x1f, 0xd6, 0x9f, 0x34, 0x77, - 0xac, 0xaf, 0xc8, 0xac, 0xbb, 0x63, 0xa8, 0x18, 0xef, 0x54, 0x21, 0x47, 0x66, 0x86, 0x0d, 0xe0, - 0xf8, 0xe8, 0x1a, 0xaa, 0x3c, 0x95, 0xf9, 0x38, 0xb6, 0xf4, 0x38, 0x5e, 0x7c, 0x79, 0x1c, 0x47, - 0x5a, 0xf8, 0x69, 0x1e, 0x81, 0xe7, 0x00, 0xba, 0xda, 0x34, 0x84, 0x26, 0x0b, 0x7c, 0xdc, 0x2e, - 0x74, 0xaa, 0x97, 0xa7, 0x9f, 0x65, 0x7a, 0xc8, 0x1c, 0xa2, 0xc9, 0x62, 0xc3, 0x2f, 0xb5, 0x6c, - 0x9a, 0x50, 0xf4, 0x79, 0x44, 0x83, 0x18, 0x9f, 0x18, 0xbb, 0xcc, 0x0d, 0xfd, 0x08, 0x35, 0xb3, - 0x31, 0x22, 0x96, 0x24, 0x74, 0xce, 0xf0, 0xa9, 0xce, 0xbd, 0xdf, 0x35, 0x0b, 0xac, 0x9b, 0x2d, - 0xb0, 0x6e, 0x3f, 0x5e, 0xb9, 0xbb, 0x5a, 0x7a, 0x6b, 0x94, 0xe8, 0x27, 0xa8, 0xaf, 0xdb, 0xcb, - 0x62, 0x5f, 0xfc, 0x47, 0x6c, 0xcd, 0x68, 0xb3, 0xe0, 0xef, 0xe1, 0x40, 0xa8, 0xa1, 0x09, 0x83, - 0x28, 0x90, 0x64, 0x99, 0xdb, 0x8d, 0xcf, 0xf4, 0x6b, 0x42, 0x8a, 0xbc, 0x51, 0xdc, 0x38, 0x33, - 0xbb, 0x35, 0x85, 0xfa, 0xbf, 0x97, 0x96, 0x5a, 0x2c, 0x0b, 0xb6, 0x5a, 0xef, 0x4e, 0x75, 0x44, - 0xdf, 0xc1, 0xce, 0x9f, 0x34, 0x4c, 0x99, 0xde, 0x9a, 0xd5, 0xcb, 0xe6, 0x67, 0xa5, 0xfc, 0xaa, - 0x58, 0xd7, 0x88, 0xde, 0x3e, 0x7b, 0x53, 0x68, 0xdd, 0x43, 0xe3, 0x89, 0xf7, 0xff, 0x47, 0xda, - 0xf3, 0xbf, 0x0b, 0x50, 0x34, 0xcb, 0x19, 0x35, 0xa0, 0xea, 0xdc, 0x91, 0xb1, 0x3b, 0xfa, 0xd9, - 0xb5, 0x27, 0x13, 0xeb, 0x2b, 0xb4, 0x0b, 0xe5, 0x41, 0xff, 0x6e, 0x60, 0xdf, 0xd8, 0x43, 0xab, - 0x80, 0x00, 0x8a, 0xd7, 0x7d, 0x47, 0x9d, 0x9f, 0xa1, 0x17, 0xd0, 0x32, 0x67, 0xf2, 0xe0, 0x4c, - 0xdf, 0x91, 0xa9, 0xed, 0xde, 0x3a, 0x77, 0xfd, 0x1b, 0x62, 0xbb, 0xee, 0xc8, 0xb5, 0xb6, 0x50, - 0x0d, 0x2a, 0x83, 0xd1, 0xed, 0xf8, 0xc6, 0x9e, 0xda, 0x43, 0x6b, 0x1b, 0x1d, 0xc1, 0x41, 0x7e, - 0x35, 0x11, 0x5a, 0x38, 0xb1, 0x76, 0x94, 0x72, 0x32, 0x78, 0x67, 0x0f, 0xef, 0x55, 0xe2, 0xa2, - 0xba, 0x4e, 0x9d, 0x5b, 0x7b, 0x48, 0x46, 0xf7, 0x53, 0xab, 0x84, 0xf6, 0xa0, 0xe1, 0xda, 0xfd, - 0xe1, 0x6f, 0xe4, 0x7a, 0xe4, 0x12, 0xd7, 0x76, 0xef, 0xef, 0xac, 0x32, 0xaa, 0x42, 0x69, 0xf2, - 0x8b, 0x33, 0x1e, 0xdb, 0x43, 0xab, 0x72, 0x45, 0xe1, 0xd8, 0xe3, 0x51, 0x37, 0x66, 0x72, 0x16, - 0x06, 0x1f, 0x9f, 0xbe, 0xb4, 0xab, 0xa2, 0x7a, 0x54, 0xe3, 0xc7, 0xdf, 0xdf, 0xce, 0x03, 0xf9, - 0x3e, 0x7d, 0xec, 0x7a, 0x3c, 0xea, 0xad, 0xb5, 0xbd, 0x5c, 0xdb, 0xf3, 0xc2, 0x80, 0xc5, 0xb2, - 0x37, 0xe7, 0x73, 0xb1, 0xf4, 0x36, 0x70, 0xfd, 0xa7, 0xfa, 0x58, 0xd4, 0xa9, 0x5e, 0xfd, 0x13, - 0x00, 0x00, 0xff, 0xff, 0xe9, 0xb6, 0x2c, 0x87, 0x87, 0x07, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go b/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go deleted file mode 100644 index a51239e06..000000000 --- a/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go +++ /dev/null @@ -1,254 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/taskdef.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type TaskDef_RetryLogic int32 - -const ( - TaskDef_FIXED TaskDef_RetryLogic = 0 - TaskDef_EXPONENTIAL_BACKOFF TaskDef_RetryLogic = 1 -) - -var TaskDef_RetryLogic_name = map[int32]string{ - 0: "FIXED", - 1: "EXPONENTIAL_BACKOFF", -} -var TaskDef_RetryLogic_value = map[string]int32{ - "FIXED": 0, - "EXPONENTIAL_BACKOFF": 1, -} - -func (x TaskDef_RetryLogic) String() string { - return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) -} -func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0, 0} -} - -type TaskDef_TimeoutPolicy int32 - -const ( - TaskDef_RETRY TaskDef_TimeoutPolicy = 0 - TaskDef_TIME_OUT_WF TaskDef_TimeoutPolicy = 1 - TaskDef_ALERT_ONLY TaskDef_TimeoutPolicy = 2 -) - -var TaskDef_TimeoutPolicy_name = map[int32]string{ - 0: "RETRY", - 1: "TIME_OUT_WF", - 2: "ALERT_ONLY", -} -var TaskDef_TimeoutPolicy_value = map[string]int32{ - "RETRY": 0, - "TIME_OUT_WF": 1, - "ALERT_ONLY": 2, -} - -func (x TaskDef_TimeoutPolicy) String() string { - return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) -} -func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0, 1} -} - -type TaskDef struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` - RetryCount int32 `protobuf:"varint,3,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` - TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` - InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys,proto3" json:"input_keys,omitempty"` - OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys,proto3" json:"output_keys,omitempty"` - TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,proto3,enum=conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` - RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,proto3,enum=conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` - RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds,proto3" json:"retry_delay_seconds,omitempty"` - ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` - ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit,proto3" json:"concurrent_exec_limit,omitempty"` - InputTemplate map[string]*_struct.Value `protobuf:"bytes,12,rep,name=input_template,json=inputTemplate,proto3" json:"input_template,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - RateLimitPerSecond int32 `protobuf:"varint,13,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskDef) Reset() { *m = TaskDef{} } -func (m *TaskDef) String() string { return proto.CompactTextString(m) } -func (*TaskDef) ProtoMessage() {} -func (*TaskDef) Descriptor() ([]byte, []int) { - return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0} -} -func (m *TaskDef) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskDef.Unmarshal(m, b) -} -func (m *TaskDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskDef.Marshal(b, m, deterministic) -} -func (dst *TaskDef) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskDef.Merge(dst, src) -} -func (m *TaskDef) XXX_Size() int { - return xxx_messageInfo_TaskDef.Size(m) -} -func (m *TaskDef) XXX_DiscardUnknown() { - xxx_messageInfo_TaskDef.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskDef proto.InternalMessageInfo - -func (m *TaskDef) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *TaskDef) GetDescription() string { - if m != nil { - return m.Description - } - return "" -} - -func (m *TaskDef) GetRetryCount() int32 { - if m != nil { - return m.RetryCount - } - return 0 -} - -func (m *TaskDef) GetTimeoutSeconds() int64 { - if m != nil { - return m.TimeoutSeconds - } - return 0 -} - -func (m *TaskDef) GetInputKeys() []string { - if m != nil { - return m.InputKeys - } - return nil -} - -func (m *TaskDef) GetOutputKeys() []string { - if m != nil { - return m.OutputKeys - } - return nil -} - -func (m *TaskDef) GetTimeoutPolicy() TaskDef_TimeoutPolicy { - if m != nil { - return m.TimeoutPolicy - } - return TaskDef_RETRY -} - -func (m *TaskDef) GetRetryLogic() TaskDef_RetryLogic { - if m != nil { - return m.RetryLogic - } - return TaskDef_FIXED -} - -func (m *TaskDef) GetRetryDelaySeconds() int32 { - if m != nil { - return m.RetryDelaySeconds - } - return 0 -} - -func (m *TaskDef) GetResponseTimeoutSeconds() int32 { - if m != nil { - return m.ResponseTimeoutSeconds - } - return 0 -} - -func (m *TaskDef) GetConcurrentExecLimit() int32 { - if m != nil { - return m.ConcurrentExecLimit - } - return 0 -} - -func (m *TaskDef) GetInputTemplate() map[string]*_struct.Value { - if m != nil { - return m.InputTemplate - } - return nil -} - -func (m *TaskDef) GetRateLimitPerSecond() int32 { - if m != nil { - return m.RateLimitPerSecond - } - return 0 -} - -func init() { - proto.RegisterType((*TaskDef)(nil), "conductor.proto.TaskDef") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskDef.InputTemplateEntry") - proto.RegisterEnum("conductor.proto.TaskDef_RetryLogic", TaskDef_RetryLogic_name, TaskDef_RetryLogic_value) - proto.RegisterEnum("conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) -} - -func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_dbc1866a3715b3e8) } - -var fileDescriptor_taskdef_dbc1866a3715b3e8 = []byte{ - // 593 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x53, 0x51, 0x6f, 0xd3, 0x4c, - 0x10, 0xac, 0x9b, 0xa6, 0xfd, 0xb2, 0xf9, 0x92, 0x86, 0x8b, 0x5a, 0xac, 0x02, 0xc2, 0x2a, 0x12, - 0x44, 0x02, 0xd9, 0x10, 0x5e, 0xaa, 0xf2, 0xd4, 0x36, 0x8e, 0x14, 0x35, 0x6d, 0x22, 0x63, 0xa0, - 0xe5, 0xc5, 0x72, 0x2e, 0x1b, 0x63, 0xc5, 0xf6, 0x59, 0xe7, 0x33, 0xaa, 0xff, 0x23, 0x3f, 0x0a, - 0xdd, 0xd9, 0x69, 0xd3, 0xa0, 0xbe, 0xdd, 0xcd, 0xcc, 0xcd, 0xee, 0x8e, 0xd7, 0xd0, 0x8d, 0xd9, - 0x1c, 0x23, 0x4b, 0xf8, 0xd9, 0x72, 0x8e, 0x0b, 0x33, 0xe5, 0x4c, 0x30, 0xb2, 0x4f, 0x59, 0x32, - 0xcf, 0xa9, 0x60, 0xbc, 0x04, 0x8e, 0x5e, 0x06, 0x8c, 0x05, 0x11, 0x5a, 0xea, 0x36, 0xcb, 0x17, - 0x56, 0x26, 0x78, 0x4e, 0x45, 0xc9, 0x1e, 0xff, 0xd9, 0x85, 0x3d, 0xd7, 0xcf, 0x96, 0x03, 0x5c, - 0x10, 0x02, 0x3b, 0x89, 0x1f, 0xa3, 0xae, 0x19, 0x5a, 0xaf, 0xe1, 0xa8, 0x33, 0x31, 0xa0, 0x39, - 0xc7, 0x8c, 0xf2, 0x30, 0x15, 0x21, 0x4b, 0xf4, 0x6d, 0x45, 0xad, 0x43, 0xe4, 0x35, 0x34, 0x39, - 0x0a, 0x5e, 0x78, 0x94, 0xe5, 0x89, 0xd0, 0x6b, 0x86, 0xd6, 0xab, 0x3b, 0xa0, 0xa0, 0x0b, 0x89, - 0x90, 0x77, 0xb0, 0x2f, 0xc2, 0x18, 0x59, 0x2e, 0xbc, 0x0c, 0x65, 0x77, 0x99, 0xbe, 0x63, 0x68, - 0xbd, 0x9a, 0xd3, 0xae, 0xe0, 0xaf, 0x25, 0x4a, 0x5e, 0x01, 0x84, 0x49, 0x9a, 0x0b, 0x6f, 0x89, - 0x45, 0xa6, 0xd7, 0x8d, 0x5a, 0xaf, 0xe1, 0x34, 0x14, 0x72, 0x89, 0x45, 0x26, 0x0b, 0xb1, 0x5c, - 0xdc, 0xf3, 0xbb, 0x8a, 0x87, 0x12, 0x52, 0x82, 0x2b, 0x58, 0x39, 0x7a, 0x29, 0x8b, 0x42, 0x5a, - 0xe8, 0x7b, 0x86, 0xd6, 0x6b, 0xf7, 0xdf, 0x9a, 0x1b, 0x99, 0x98, 0xd5, 0xc4, 0xa6, 0x5b, 0xca, - 0xa7, 0x4a, 0xed, 0xb4, 0xc4, 0xfa, 0x95, 0x0c, 0x56, 0x83, 0x45, 0x2c, 0x08, 0xa9, 0xfe, 0x9f, - 0xf2, 0x7a, 0xf3, 0xa4, 0x97, 0x23, 0xb5, 0x63, 0x29, 0xad, 0xa6, 0x57, 0x67, 0x62, 0x42, 0xb7, - 0x74, 0x99, 0x63, 0xe4, 0x17, 0xf7, 0x09, 0x34, 0x54, 0x4c, 0xcf, 0x14, 0x35, 0x90, 0xcc, 0x2a, - 0x84, 0x13, 0xd0, 0x39, 0x66, 0x29, 0x4b, 0x32, 0xf4, 0x36, 0x63, 0x03, 0xf5, 0xe8, 0x70, 0xc5, - 0xbb, 0x8f, 0xe3, 0xeb, 0xc3, 0x01, 0x65, 0x09, 0xcd, 0x39, 0xc7, 0x44, 0x78, 0x78, 0x87, 0xd4, - 0x8b, 0xc2, 0x38, 0x14, 0x7a, 0x53, 0x3d, 0xeb, 0x3e, 0x90, 0xf6, 0x1d, 0xd2, 0xb1, 0xa4, 0x88, - 0x03, 0xed, 0x32, 0x72, 0x81, 0x71, 0x1a, 0xf9, 0x02, 0xf5, 0xff, 0x8d, 0x5a, 0xaf, 0xd9, 0x7f, - 0xff, 0xe4, 0x98, 0x23, 0x29, 0x77, 0x2b, 0xb5, 0x9d, 0x08, 0x5e, 0x38, 0xad, 0x70, 0x1d, 0x23, - 0x9f, 0xe0, 0x80, 0xfb, 0x02, 0xcb, 0xe2, 0x5e, 0x8a, 0xbc, 0xea, 0x5f, 0x6f, 0xa9, 0x3e, 0x88, - 0x24, 0x55, 0xf5, 0x29, 0xf2, 0xb2, 0xf7, 0xa3, 0x1b, 0x20, 0xff, 0xfa, 0x92, 0x0e, 0xd4, 0x96, - 0x58, 0x54, 0xeb, 0x28, 0x8f, 0xe4, 0x03, 0xd4, 0x7f, 0xfb, 0x51, 0x8e, 0x6a, 0x0f, 0x9b, 0xfd, - 0x43, 0xb3, 0xdc, 0x6d, 0x73, 0xb5, 0xdb, 0xe6, 0x77, 0xc9, 0x3a, 0xa5, 0xe8, 0x74, 0xfb, 0x44, - 0x3b, 0xfe, 0x08, 0xf0, 0xf0, 0x61, 0x48, 0x03, 0xea, 0xc3, 0xd1, 0x8d, 0x3d, 0xe8, 0x6c, 0x91, - 0xe7, 0xd0, 0xb5, 0x6f, 0xa6, 0x93, 0x6b, 0xfb, 0xda, 0x1d, 0x9d, 0x8d, 0xbd, 0xf3, 0xb3, 0x8b, - 0xcb, 0xc9, 0x70, 0xd8, 0xd1, 0x8e, 0xbf, 0x40, 0xeb, 0xd1, 0x5a, 0xc8, 0x47, 0x8e, 0xed, 0x3a, - 0xb7, 0x9d, 0x2d, 0xb2, 0x0f, 0x4d, 0x77, 0x74, 0x65, 0x7b, 0x93, 0x6f, 0xae, 0xf7, 0x63, 0xd8, - 0xd1, 0x48, 0x1b, 0xe0, 0x6c, 0x6c, 0x3b, 0xae, 0x37, 0xb9, 0x1e, 0xdf, 0x76, 0xb6, 0xcf, 0xe7, - 0xf0, 0x82, 0xb2, 0xd8, 0x4c, 0x50, 0x2c, 0xa2, 0xf0, 0x6e, 0x33, 0xc4, 0xf3, 0x46, 0x95, 0xe2, - 0x74, 0xf6, 0xf3, 0x34, 0x08, 0xc5, 0xaf, 0x7c, 0x66, 0x52, 0x16, 0x5b, 0x95, 0xdc, 0xba, 0x97, - 0x5b, 0x34, 0x0a, 0x31, 0x11, 0x56, 0xc0, 0x02, 0x9e, 0xd2, 0x35, 0x5c, 0xfd, 0xf1, 0xb3, 0x5d, - 0xe5, 0xf6, 0xf9, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc2, 0xd8, 0x2b, 0x35, 0x01, 0x04, 0x00, - 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go b/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go deleted file mode 100644 index f8c999074..000000000 --- a/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go +++ /dev/null @@ -1,98 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/taskexeclog.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type TaskExecLog struct { - Log string `protobuf:"bytes,1,opt,name=log,proto3" json:"log,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - CreatedTime int64 `protobuf:"varint,3,opt,name=created_time,json=createdTime,proto3" json:"created_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskExecLog) Reset() { *m = TaskExecLog{} } -func (m *TaskExecLog) String() string { return proto.CompactTextString(m) } -func (*TaskExecLog) ProtoMessage() {} -func (*TaskExecLog) Descriptor() ([]byte, []int) { - return fileDescriptor_taskexeclog_e9c8274b44d54689, []int{0} -} -func (m *TaskExecLog) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskExecLog.Unmarshal(m, b) -} -func (m *TaskExecLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskExecLog.Marshal(b, m, deterministic) -} -func (dst *TaskExecLog) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskExecLog.Merge(dst, src) -} -func (m *TaskExecLog) XXX_Size() int { - return xxx_messageInfo_TaskExecLog.Size(m) -} -func (m *TaskExecLog) XXX_DiscardUnknown() { - xxx_messageInfo_TaskExecLog.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskExecLog proto.InternalMessageInfo - -func (m *TaskExecLog) GetLog() string { - if m != nil { - return m.Log - } - return "" -} - -func (m *TaskExecLog) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *TaskExecLog) GetCreatedTime() int64 { - if m != nil { - return m.CreatedTime - } - return 0 -} - -func init() { - proto.RegisterType((*TaskExecLog)(nil), "conductor.proto.TaskExecLog") -} - -func init() { - proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_e9c8274b44d54689) -} - -var fileDescriptor_taskexeclog_e9c8274b44d54689 = []byte{ - // 205 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0x31, 0x4b, 0xc4, 0x40, - 0x10, 0x85, 0x89, 0x81, 0x13, 0xf7, 0x14, 0x65, 0x9b, 0x0b, 0xd8, 0x9c, 0x56, 0x57, 0xed, 0x16, - 0x76, 0x96, 0x07, 0x16, 0x82, 0x85, 0x84, 0x54, 0x5a, 0x84, 0x64, 0x76, 0xdc, 0x2c, 0xd9, 0xcd, - 0x84, 0xcd, 0x04, 0xf2, 0xf3, 0x25, 0x31, 0x48, 0xb8, 0x6e, 0xe6, 0x83, 0xf7, 0x3e, 0x9e, 0x38, - 0x04, 0x32, 0xe8, 0x35, 0x57, 0x43, 0x8b, 0x13, 0x82, 0x27, 0xab, 0xfa, 0x48, 0x4c, 0xf2, 0x1e, - 0xa8, 0x33, 0x23, 0x30, 0xc5, 0x3f, 0xf0, 0xfc, 0x2d, 0xf6, 0x45, 0x35, 0xb4, 0x6f, 0x13, 0xc2, - 0x07, 0x59, 0xf9, 0x20, 0x52, 0x4f, 0x36, 0x4b, 0x8e, 0xc9, 0xe9, 0x26, 0x9f, 0x4f, 0x79, 0x10, - 0xd7, 0x73, 0x4d, 0xe9, 0x4c, 0x76, 0xb5, 0xd0, 0xdd, 0xfc, 0xbe, 0x1b, 0xf9, 0x24, 0x6e, 0x21, - 0x62, 0xc5, 0x68, 0x4a, 0x76, 0x01, 0xb3, 0xf4, 0x98, 0x9c, 0xd2, 0x7c, 0xbf, 0xb2, 0xc2, 0x05, - 0x3c, 0x37, 0xe2, 0x11, 0x28, 0xa8, 0x0e, 0xf9, 0xc7, 0xbb, 0x49, 0x5d, 0xb8, 0xcf, 0x77, 0x1b, - 0xf3, 0x67, 0xfd, 0xf5, 0x6a, 0x1d, 0x37, 0x63, 0xad, 0x80, 0x82, 0x5e, 0x23, 0xfa, 0x3f, 0xa2, - 0xc1, 0x3b, 0xec, 0x58, 0x5b, 0xb2, 0xb1, 0x87, 0x0d, 0x5f, 0x96, 0xd6, 0xbb, 0xa5, 0xf1, 0xe5, - 0x37, 0x00, 0x00, 0xff, 0xff, 0x78, 0x61, 0x87, 0x8e, 0xf9, 0x00, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go b/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go deleted file mode 100644 index 26eb4017f..000000000 --- a/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go +++ /dev/null @@ -1,192 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/taskresult.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import any "github.com/golang/protobuf/ptypes/any" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type TaskResult_Status int32 - -const ( - TaskResult_IN_PROGRESS TaskResult_Status = 0 - TaskResult_FAILED TaskResult_Status = 1 - TaskResult_FAILED_WITH_TERMINAL_ERROR TaskResult_Status = 2 - TaskResult_COMPLETED TaskResult_Status = 3 - TaskResult_SCHEDULED TaskResult_Status = 4 -) - -var TaskResult_Status_name = map[int32]string{ - 0: "IN_PROGRESS", - 1: "FAILED", - 2: "FAILED_WITH_TERMINAL_ERROR", - 3: "COMPLETED", - 4: "SCHEDULED", -} -var TaskResult_Status_value = map[string]int32{ - "IN_PROGRESS": 0, - "FAILED": 1, - "FAILED_WITH_TERMINAL_ERROR": 2, - "COMPLETED": 3, - "SCHEDULED": 4, -} - -func (x TaskResult_Status) String() string { - return proto.EnumName(TaskResult_Status_name, int32(x)) -} -func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0, 0} -} - -type TaskResult struct { - WorkflowInstanceId string `protobuf:"bytes,1,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` - CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` - WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` - Status TaskResult_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.TaskResult_Status" json:"status,omitempty"` - OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - OutputMessage *any.Any `protobuf:"bytes,8,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskResult) Reset() { *m = TaskResult{} } -func (m *TaskResult) String() string { return proto.CompactTextString(m) } -func (*TaskResult) ProtoMessage() {} -func (*TaskResult) Descriptor() ([]byte, []int) { - return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0} -} -func (m *TaskResult) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskResult.Unmarshal(m, b) -} -func (m *TaskResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskResult.Marshal(b, m, deterministic) -} -func (dst *TaskResult) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskResult.Merge(dst, src) -} -func (m *TaskResult) XXX_Size() int { - return xxx_messageInfo_TaskResult.Size(m) -} -func (m *TaskResult) XXX_DiscardUnknown() { - xxx_messageInfo_TaskResult.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskResult proto.InternalMessageInfo - -func (m *TaskResult) GetWorkflowInstanceId() string { - if m != nil { - return m.WorkflowInstanceId - } - return "" -} - -func (m *TaskResult) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *TaskResult) GetReasonForIncompletion() string { - if m != nil { - return m.ReasonForIncompletion - } - return "" -} - -func (m *TaskResult) GetCallbackAfterSeconds() int64 { - if m != nil { - return m.CallbackAfterSeconds - } - return 0 -} - -func (m *TaskResult) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *TaskResult) GetStatus() TaskResult_Status { - if m != nil { - return m.Status - } - return TaskResult_IN_PROGRESS -} - -func (m *TaskResult) GetOutputData() map[string]*_struct.Value { - if m != nil { - return m.OutputData - } - return nil -} - -func (m *TaskResult) GetOutputMessage() *any.Any { - if m != nil { - return m.OutputMessage - } - return nil -} - -func init() { - proto.RegisterType((*TaskResult)(nil), "conductor.proto.TaskResult") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskResult.OutputDataEntry") - proto.RegisterEnum("conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) -} - -func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_ccaec941f8ac2f31) } - -var fileDescriptor_taskresult_ccaec941f8ac2f31 = []byte{ - // 517 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xdf, 0x6e, 0xda, 0x30, - 0x14, 0xc6, 0x17, 0xa0, 0x69, 0x39, 0xac, 0x05, 0x59, 0x8c, 0x66, 0x74, 0x9a, 0x10, 0x57, 0x48, - 0x9b, 0x92, 0x89, 0x4d, 0xd3, 0xc4, 0xae, 0x68, 0x49, 0xd7, 0x48, 0x50, 0x90, 0xa1, 0x9b, 0xb4, - 0x9b, 0xc8, 0x38, 0x26, 0x8b, 0x08, 0x31, 0xb2, 0x9d, 0x75, 0x3c, 0xf0, 0xde, 0x63, 0x4a, 0x1c, - 0xda, 0x8a, 0x49, 0xbd, 0xf3, 0x39, 0xbf, 0xef, 0x7c, 0x3a, 0x7f, 0x64, 0x68, 0x6d, 0x78, 0xc0, - 0x62, 0x47, 0x11, 0xb9, 0x16, 0x4c, 0xa6, 0xb1, 0xb2, 0xb7, 0x82, 0x2b, 0x8e, 0xea, 0x94, 0x27, - 0x41, 0x4a, 0x15, 0x17, 0x3a, 0xd1, 0x7e, 0x13, 0x72, 0x1e, 0xc6, 0xcc, 0xc9, 0xa3, 0x65, 0xba, - 0x72, 0xa4, 0x12, 0x29, 0x2d, 0xe4, 0xed, 0xd7, 0x87, 0x94, 0x24, 0x3b, 0x8d, 0xba, 0x7f, 0x2b, - 0x00, 0x0b, 0x22, 0xd7, 0x38, 0xb7, 0x47, 0x1f, 0xa0, 0x79, 0xcf, 0xc5, 0x7a, 0x15, 0xf3, 0x7b, - 0x3f, 0x4a, 0xa4, 0x22, 0x09, 0x65, 0x7e, 0x14, 0x58, 0x46, 0xc7, 0xe8, 0x55, 0x31, 0xda, 0x33, - 0xaf, 0x40, 0x5e, 0x80, 0xce, 0xe1, 0x38, 0x6b, 0x2f, 0x13, 0x95, 0x72, 0x91, 0x99, 0x85, 0x5e, - 0x80, 0x3e, 0xc3, 0xb9, 0x60, 0x44, 0xf2, 0xc4, 0x5f, 0x71, 0xe1, 0x47, 0x09, 0xe5, 0x9b, 0x6d, - 0xcc, 0x54, 0xc4, 0x13, 0xab, 0x9c, 0x0b, 0x5f, 0x69, 0x7c, 0xcd, 0x85, 0xf7, 0x04, 0xa2, 0x4f, - 0xd0, 0xa2, 0x24, 0x8e, 0x97, 0x84, 0xae, 0x7d, 0xb2, 0x52, 0x4c, 0xf8, 0x92, 0x65, 0xe3, 0x4a, - 0xab, 0xd2, 0x31, 0x7a, 0x65, 0xdc, 0xdc, 0xd3, 0x61, 0x06, 0xe7, 0x9a, 0xa1, 0x0b, 0xa8, 0x66, - 0xcd, 0x31, 0x91, 0x35, 0x72, 0x94, 0xfb, 0x9f, 0xe8, 0x84, 0x17, 0xa0, 0x01, 0x98, 0x52, 0x11, - 0x95, 0x4a, 0xcb, 0xec, 0x18, 0xbd, 0xb3, 0x7e, 0xd7, 0x3e, 0xd8, 0x9f, 0xfd, 0xb8, 0x02, 0x7b, - 0x9e, 0x2b, 0x71, 0x51, 0x81, 0xc6, 0x50, 0xe3, 0xa9, 0xda, 0xa6, 0xca, 0x0f, 0x88, 0x22, 0xd6, - 0x71, 0xa7, 0xdc, 0xab, 0xf5, 0xdf, 0x3d, 0x67, 0x30, 0xcd, 0xe5, 0x23, 0xa2, 0x88, 0x9b, 0x28, - 0xb1, 0xc3, 0xc0, 0x1f, 0x12, 0xe8, 0x2b, 0x9c, 0x15, 0x6e, 0x1b, 0x26, 0x25, 0x09, 0x99, 0x75, - 0xd2, 0x31, 0x7a, 0xb5, 0x7e, 0xd3, 0xd6, 0x27, 0xb2, 0xf7, 0x27, 0xb2, 0x87, 0xc9, 0x0e, 0x9f, - 0x6a, 0xed, 0x44, 0x4b, 0xdb, 0x77, 0x50, 0x3f, 0xf0, 0x46, 0x0d, 0x28, 0xaf, 0xd9, 0xae, 0x38, - 0x4f, 0xf6, 0x44, 0xef, 0xe1, 0xe8, 0x37, 0x89, 0x53, 0x96, 0x5f, 0xa3, 0xd6, 0x6f, 0xfd, 0x67, - 0xfc, 0x3d, 0xa3, 0x58, 0x8b, 0x06, 0xa5, 0x2f, 0x46, 0x97, 0x82, 0xa9, 0x67, 0x46, 0x75, 0xa8, - 0x79, 0xb7, 0xfe, 0x0c, 0x4f, 0xbf, 0x61, 0x77, 0x3e, 0x6f, 0xbc, 0x40, 0x00, 0xe6, 0xf5, 0xd0, - 0x1b, 0xbb, 0xa3, 0x86, 0x81, 0xde, 0x42, 0x5b, 0xbf, 0xfd, 0x1f, 0xde, 0xe2, 0xc6, 0x5f, 0xb8, - 0x78, 0xe2, 0xdd, 0x0e, 0xc7, 0xbe, 0x8b, 0xf1, 0x14, 0x37, 0x4a, 0xe8, 0x14, 0xaa, 0x57, 0xd3, - 0xc9, 0x6c, 0xec, 0x2e, 0xdc, 0x51, 0xa3, 0x9c, 0x85, 0xf3, 0xab, 0x1b, 0x77, 0x74, 0x97, 0x55, - 0x57, 0x2e, 0x43, 0xb8, 0xa0, 0x7c, 0x63, 0x27, 0x4c, 0xad, 0xe2, 0xe8, 0xcf, 0xe1, 0xfa, 0x2e, - 0x5f, 0x3e, 0xee, 0x6f, 0xb6, 0xfc, 0x39, 0x08, 0x23, 0xf5, 0x2b, 0x5d, 0xda, 0x94, 0x6f, 0x9c, - 0xa2, 0xc2, 0x79, 0xa8, 0x70, 0x68, 0x1c, 0xb1, 0x44, 0x39, 0x21, 0x0f, 0xc5, 0x96, 0x3e, 0xc9, - 0xe7, 0x7f, 0x65, 0x69, 0xe6, 0x86, 0x1f, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xff, 0xd5, 0x82, - 0xee, 0x3b, 0x03, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go b/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go deleted file mode 100644 index 0127dd8f7..000000000 --- a/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go +++ /dev/null @@ -1,217 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/tasksummary.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type TaskSummary struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - WorkflowType string `protobuf:"bytes,2,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - ScheduledTime string `protobuf:"bytes,4,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` - StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` - UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` - EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - Status Task_Status `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` - ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` - QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime,proto3" json:"queue_wait_time,omitempty"` - TaskDefName string `protobuf:"bytes,12,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` - TaskType string `protobuf:"bytes,13,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` - Input string `protobuf:"bytes,14,opt,name=input,proto3" json:"input,omitempty"` - Output string `protobuf:"bytes,15,opt,name=output,proto3" json:"output,omitempty"` - TaskId string `protobuf:"bytes,16,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskSummary) Reset() { *m = TaskSummary{} } -func (m *TaskSummary) String() string { return proto.CompactTextString(m) } -func (*TaskSummary) ProtoMessage() {} -func (*TaskSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_tasksummary_ab439d130c50da04, []int{0} -} -func (m *TaskSummary) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskSummary.Unmarshal(m, b) -} -func (m *TaskSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskSummary.Marshal(b, m, deterministic) -} -func (dst *TaskSummary) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskSummary.Merge(dst, src) -} -func (m *TaskSummary) XXX_Size() int { - return xxx_messageInfo_TaskSummary.Size(m) -} -func (m *TaskSummary) XXX_DiscardUnknown() { - xxx_messageInfo_TaskSummary.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskSummary proto.InternalMessageInfo - -func (m *TaskSummary) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *TaskSummary) GetWorkflowType() string { - if m != nil { - return m.WorkflowType - } - return "" -} - -func (m *TaskSummary) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *TaskSummary) GetScheduledTime() string { - if m != nil { - return m.ScheduledTime - } - return "" -} - -func (m *TaskSummary) GetStartTime() string { - if m != nil { - return m.StartTime - } - return "" -} - -func (m *TaskSummary) GetUpdateTime() string { - if m != nil { - return m.UpdateTime - } - return "" -} - -func (m *TaskSummary) GetEndTime() string { - if m != nil { - return m.EndTime - } - return "" -} - -func (m *TaskSummary) GetStatus() Task_Status { - if m != nil { - return m.Status - } - return Task_IN_PROGRESS -} - -func (m *TaskSummary) GetReasonForIncompletion() string { - if m != nil { - return m.ReasonForIncompletion - } - return "" -} - -func (m *TaskSummary) GetExecutionTime() int64 { - if m != nil { - return m.ExecutionTime - } - return 0 -} - -func (m *TaskSummary) GetQueueWaitTime() int64 { - if m != nil { - return m.QueueWaitTime - } - return 0 -} - -func (m *TaskSummary) GetTaskDefName() string { - if m != nil { - return m.TaskDefName - } - return "" -} - -func (m *TaskSummary) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *TaskSummary) GetInput() string { - if m != nil { - return m.Input - } - return "" -} - -func (m *TaskSummary) GetOutput() string { - if m != nil { - return m.Output - } - return "" -} - -func (m *TaskSummary) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func init() { - proto.RegisterType((*TaskSummary)(nil), "conductor.proto.TaskSummary") -} - -func init() { - proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab439d130c50da04) -} - -var fileDescriptor_tasksummary_ab439d130c50da04 = []byte{ - // 446 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xcf, 0x8b, 0x13, 0x31, - 0x14, 0xc7, 0xa9, 0xbb, 0xdb, 0x1f, 0xaf, 0x3b, 0xed, 0x32, 0xa8, 0x1d, 0x5d, 0x65, 0xcb, 0x8a, - 0xd2, 0xd3, 0x14, 0x54, 0x3c, 0x78, 0x5c, 0x44, 0xe8, 0x45, 0xa4, 0x5b, 0x10, 0xbc, 0x0c, 0x69, - 0xf2, 0xa6, 0x0d, 0x9d, 0x24, 0x63, 0x26, 0xa1, 0xdb, 0x3f, 0xcf, 0xff, 0x4c, 0xf2, 0x32, 0x5b, - 0xcb, 0x1e, 0xf3, 0xf9, 0x7e, 0xf2, 0x92, 0xf7, 0x12, 0x98, 0x28, 0x23, 0xb0, 0x9a, 0x3b, 0xd6, - 0xec, 0x1a, 0xaf, 0x14, 0xb3, 0x87, 0xbc, 0xb6, 0xc6, 0x99, 0x74, 0xcc, 0x8d, 0x16, 0x9e, 0x3b, - 0x63, 0x23, 0x78, 0x7d, 0xf5, 0xdf, 0x8c, 0xe4, 0xf6, 0xef, 0x39, 0x0c, 0x57, 0xac, 0xd9, 0xdd, - 0xc7, 0x8d, 0xe9, 0x0d, 0x0c, 0xf7, 0xc6, 0xee, 0xca, 0xca, 0xec, 0x0b, 0x29, 0xb2, 0xce, 0xb4, - 0x33, 0x1b, 0x2c, 0xe1, 0x11, 0x2d, 0x44, 0xfa, 0x0e, 0x92, 0xa3, 0xe0, 0x0e, 0x35, 0x66, 0xcf, - 0x48, 0xb9, 0x7c, 0x84, 0xab, 0x43, 0x8d, 0xe9, 0x7b, 0x18, 0x71, 0x63, 0x2d, 0x56, 0xcc, 0x49, - 0xa3, 0x43, 0xa1, 0x33, 0xb2, 0x92, 0x13, 0xba, 0x10, 0x41, 0x6b, 0xf8, 0x16, 0x85, 0xaf, 0x50, - 0x14, 0x4e, 0x2a, 0xcc, 0xce, 0xa3, 0x76, 0xa4, 0x2b, 0xa9, 0x30, 0x7d, 0x0b, 0xd0, 0x38, 0x66, - 0x5d, 0x54, 0x2e, 0x48, 0x19, 0x10, 0xa1, 0xf8, 0x06, 0x86, 0xbe, 0x16, 0xcc, 0x61, 0xcc, 0xbb, - 0xf1, 0xca, 0x11, 0x91, 0xf0, 0x0a, 0xfa, 0xa8, 0xdb, 0x03, 0x7a, 0x94, 0xf6, 0x50, 0xc7, 0xd2, - 0x9f, 0xa1, 0xdb, 0x38, 0xe6, 0x7c, 0x93, 0xf5, 0xa7, 0x9d, 0xd9, 0xe8, 0xe3, 0x9b, 0xfc, 0xc9, - 0xc8, 0xf2, 0x30, 0x9c, 0xfc, 0x9e, 0x9c, 0x65, 0xeb, 0xa6, 0x5f, 0x60, 0x62, 0x91, 0x35, 0x46, - 0x17, 0xa5, 0xb1, 0x85, 0xd4, 0xdc, 0xa8, 0xba, 0xc2, 0xd0, 0x54, 0x36, 0xa0, 0xfa, 0x2f, 0x62, - 0xfc, 0xdd, 0xd8, 0xc5, 0x49, 0x18, 0xfa, 0xc5, 0x07, 0xe4, 0x9e, 0x86, 0x42, 0xd7, 0x81, 0x69, - 0x67, 0x76, 0xb6, 0x4c, 0x8e, 0x94, 0x2e, 0xf5, 0x01, 0xc6, 0x7f, 0x3c, 0x7a, 0x2c, 0xf6, 0x4c, - 0xb6, 0x4d, 0x0f, 0xa3, 0x47, 0xf8, 0x17, 0x93, 0xb1, 0xf1, 0x5b, 0x48, 0xc2, 0x4b, 0x16, 0x02, - 0xcb, 0x42, 0x33, 0x85, 0xd9, 0x25, 0x1d, 0x3e, 0x0c, 0xf0, 0x1b, 0x96, 0x3f, 0x98, 0xc2, 0xf4, - 0x1a, 0x06, 0xe4, 0xd0, 0x53, 0x25, 0x94, 0xf7, 0x03, 0xa0, 0x67, 0x7a, 0x0e, 0x17, 0x52, 0xd7, - 0xde, 0x65, 0x23, 0x0a, 0xe2, 0x22, 0x7d, 0x09, 0x5d, 0xe3, 0x5d, 0xc0, 0x63, 0xc2, 0xed, 0x2a, - 0x9d, 0x40, 0x8f, 0x4a, 0x49, 0x91, 0x5d, 0xc5, 0x20, 0x2c, 0x17, 0xe2, 0x6e, 0x0b, 0xd7, 0xdc, - 0xa8, 0x5c, 0xa3, 0x2b, 0x2b, 0xf9, 0xf0, 0x74, 0x82, 0x77, 0xc9, 0xc9, 0xff, 0xfa, 0xb9, 0xfe, - 0xfd, 0x75, 0x23, 0xdd, 0xd6, 0xaf, 0x73, 0x6e, 0xd4, 0xbc, 0xdd, 0x32, 0x3f, 0x6e, 0x99, 0xf3, - 0x4a, 0xa2, 0x76, 0xf3, 0x8d, 0xd9, 0xd8, 0x9a, 0x9f, 0x70, 0xfa, 0xb8, 0xeb, 0x2e, 0x55, 0xfc, - 0xf4, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x32, 0xdb, 0x34, 0x28, 0xf2, 0x02, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/workflow.pb.go b/polyglot-clients/gogrpc/conductor/model/workflow.pb.go deleted file mode 100644 index d0c963b5a..000000000 --- a/polyglot-clients/gogrpc/conductor/model/workflow.pb.go +++ /dev/null @@ -1,289 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/workflow.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type Workflow_WorkflowStatus int32 - -const ( - Workflow_RUNNING Workflow_WorkflowStatus = 0 - Workflow_COMPLETED Workflow_WorkflowStatus = 1 - Workflow_FAILED Workflow_WorkflowStatus = 2 - Workflow_TIMED_OUT Workflow_WorkflowStatus = 3 - Workflow_TERMINATED Workflow_WorkflowStatus = 4 - Workflow_PAUSED Workflow_WorkflowStatus = 5 -) - -var Workflow_WorkflowStatus_name = map[int32]string{ - 0: "RUNNING", - 1: "COMPLETED", - 2: "FAILED", - 3: "TIMED_OUT", - 4: "TERMINATED", - 5: "PAUSED", -} -var Workflow_WorkflowStatus_value = map[string]int32{ - "RUNNING": 0, - "COMPLETED": 1, - "FAILED": 2, - "TIMED_OUT": 3, - "TERMINATED": 4, - "PAUSED": 5, -} - -func (x Workflow_WorkflowStatus) String() string { - return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) -} -func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflow_d126dd6e3df866dc, []int{0, 0} -} - -type Workflow struct { - Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` - EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId,proto3" json:"parent_workflow_id,omitempty"` - ParentWorkflowTaskId string `protobuf:"bytes,5,opt,name=parent_workflow_task_id,json=parentWorkflowTaskId,proto3" json:"parent_workflow_task_id,omitempty"` - Tasks []*Task `protobuf:"bytes,6,rep,name=tasks,proto3" json:"tasks,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,8,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Output map[string]*_struct.Value `protobuf:"bytes,9,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - WorkflowType string `protobuf:"bytes,10,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` - Version int32 `protobuf:"varint,11,opt,name=version,proto3" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,12,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - ReRunFromWorkflowId string `protobuf:"bytes,13,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,14,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` - SchemaVersion int32 `protobuf:"varint,15,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` - Event string `protobuf:"bytes,16,opt,name=event,proto3" json:"event,omitempty"` - TaskToDomain map[string]string `protobuf:"bytes,17,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - FailedReferenceTaskNames []string `protobuf:"bytes,18,rep,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` - WorkflowDefinition *WorkflowDef `protobuf:"bytes,19,opt,name=workflow_definition,json=workflowDefinition,proto3" json:"workflow_definition,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Workflow) Reset() { *m = Workflow{} } -func (m *Workflow) String() string { return proto.CompactTextString(m) } -func (*Workflow) ProtoMessage() {} -func (*Workflow) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_d126dd6e3df866dc, []int{0} -} -func (m *Workflow) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Workflow.Unmarshal(m, b) -} -func (m *Workflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Workflow.Marshal(b, m, deterministic) -} -func (dst *Workflow) XXX_Merge(src proto.Message) { - xxx_messageInfo_Workflow.Merge(dst, src) -} -func (m *Workflow) XXX_Size() int { - return xxx_messageInfo_Workflow.Size(m) -} -func (m *Workflow) XXX_DiscardUnknown() { - xxx_messageInfo_Workflow.DiscardUnknown(m) -} - -var xxx_messageInfo_Workflow proto.InternalMessageInfo - -func (m *Workflow) GetStatus() Workflow_WorkflowStatus { - if m != nil { - return m.Status - } - return Workflow_RUNNING -} - -func (m *Workflow) GetEndTime() int64 { - if m != nil { - return m.EndTime - } - return 0 -} - -func (m *Workflow) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *Workflow) GetParentWorkflowId() string { - if m != nil { - return m.ParentWorkflowId - } - return "" -} - -func (m *Workflow) GetParentWorkflowTaskId() string { - if m != nil { - return m.ParentWorkflowTaskId - } - return "" -} - -func (m *Workflow) GetTasks() []*Task { - if m != nil { - return m.Tasks - } - return nil -} - -func (m *Workflow) GetInput() map[string]*_struct.Value { - if m != nil { - return m.Input - } - return nil -} - -func (m *Workflow) GetOutput() map[string]*_struct.Value { - if m != nil { - return m.Output - } - return nil -} - -func (m *Workflow) GetWorkflowType() string { - if m != nil { - return m.WorkflowType - } - return "" -} - -func (m *Workflow) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *Workflow) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *Workflow) GetReRunFromWorkflowId() string { - if m != nil { - return m.ReRunFromWorkflowId - } - return "" -} - -func (m *Workflow) GetReasonForIncompletion() string { - if m != nil { - return m.ReasonForIncompletion - } - return "" -} - -func (m *Workflow) GetSchemaVersion() int32 { - if m != nil { - return m.SchemaVersion - } - return 0 -} - -func (m *Workflow) GetEvent() string { - if m != nil { - return m.Event - } - return "" -} - -func (m *Workflow) GetTaskToDomain() map[string]string { - if m != nil { - return m.TaskToDomain - } - return nil -} - -func (m *Workflow) GetFailedReferenceTaskNames() []string { - if m != nil { - return m.FailedReferenceTaskNames - } - return nil -} - -func (m *Workflow) GetWorkflowDefinition() *WorkflowDef { - if m != nil { - return m.WorkflowDefinition - } - return nil -} - -func init() { - proto.RegisterType((*Workflow)(nil), "conductor.proto.Workflow") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.InputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.OutputEntry") - proto.RegisterMapType((map[string]string)(nil), "conductor.proto.Workflow.TaskToDomainEntry") - proto.RegisterEnum("conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) -} - -func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_d126dd6e3df866dc) } - -var fileDescriptor_workflow_d126dd6e3df866dc = []byte{ - // 727 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0x5f, 0x4f, 0xe3, 0x46, - 0x14, 0xc5, 0x6b, 0xb2, 0x09, 0xe4, 0x86, 0x64, 0xbd, 0x43, 0xb6, 0xb8, 0xec, 0x4a, 0x8d, 0xb6, - 0x5d, 0xc9, 0xd2, 0x22, 0x47, 0x4a, 0xff, 0xa8, 0x42, 0x42, 0x2d, 0x34, 0xa1, 0xb2, 0x44, 0x42, - 0x30, 0x06, 0xa4, 0xbe, 0x58, 0x8e, 0x3d, 0x0e, 0x56, 0xec, 0x99, 0x68, 0x3c, 0x86, 0xe6, 0x6b, - 0xf6, 0x13, 0x55, 0x33, 0x63, 0x13, 0x07, 0x9a, 0xb7, 0x7d, 0xcb, 0xdc, 0xfb, 0x3b, 0x27, 0x77, - 0xce, 0xcc, 0x18, 0xba, 0x29, 0x0d, 0x71, 0xd2, 0x7f, 0xa2, 0x6c, 0x11, 0x25, 0xf4, 0xc9, 0x5a, - 0x32, 0xca, 0x29, 0x7a, 0x1b, 0x50, 0x12, 0xe6, 0x01, 0xa7, 0x4c, 0x15, 0x8e, 0x0e, 0x37, 0xb1, - 0x10, 0x47, 0x45, 0x43, 0x57, 0x0d, 0xee, 0x67, 0x8b, 0xa2, 0xf2, 0x71, 0x4e, 0xe9, 0x3c, 0xc1, - 0x7d, 0xb9, 0x9a, 0xe5, 0x51, 0x3f, 0xe3, 0x2c, 0x0f, 0xb8, 0xea, 0x7e, 0xfa, 0xb7, 0x09, 0x7b, - 0xf7, 0x85, 0x0b, 0xfa, 0x03, 0x1a, 0x19, 0xf7, 0x79, 0x9e, 0x19, 0x5a, 0x4f, 0x33, 0x3b, 0x03, - 0xd3, 0x7a, 0xf1, 0xbf, 0x56, 0x89, 0x3e, 0xff, 0xb8, 0x91, 0xbc, 0x53, 0xe8, 0xd0, 0x77, 0xb0, - 0x87, 0x49, 0xe8, 0xf1, 0x38, 0xc5, 0xc6, 0x4e, 0x4f, 0x33, 0x6b, 0xce, 0x2e, 0x26, 0xa1, 0x1b, - 0xa7, 0x18, 0x7d, 0x0f, 0xad, 0x72, 0x5c, 0x2f, 0x0e, 0x8d, 0x5a, 0x4f, 0x33, 0x9b, 0x0e, 0x94, - 0x25, 0x3b, 0x44, 0xc7, 0x80, 0x96, 0x3e, 0xc3, 0x84, 0x7b, 0x55, 0xee, 0x8d, 0xe4, 0x74, 0xd5, - 0xb9, 0x5f, 0xd3, 0xbf, 0xc0, 0xe1, 0x4b, 0x5a, 0x6c, 0x5a, 0x48, 0xea, 0x52, 0xd2, 0xdd, 0x94, - 0xb8, 0x7e, 0xb6, 0xb0, 0x43, 0xf4, 0x05, 0xea, 0x02, 0xcb, 0x8c, 0x46, 0xaf, 0x66, 0xb6, 0x06, - 0xef, 0x5f, 0xed, 0x50, 0x70, 0x8e, 0x62, 0xd0, 0x09, 0xd4, 0x63, 0xb2, 0xcc, 0xb9, 0xb1, 0x27, - 0xe1, 0x1f, 0xb7, 0xc7, 0x61, 0x0b, 0x6c, 0x44, 0x38, 0x5b, 0x39, 0x4a, 0x82, 0x4e, 0xa1, 0x41, - 0x73, 0x2e, 0xc4, 0x4d, 0x29, 0xfe, 0xbc, 0x5d, 0x7c, 0x25, 0x39, 0xa5, 0x2e, 0x44, 0xe8, 0x07, - 0x68, 0xaf, 0xf7, 0xb5, 0x5a, 0x62, 0x03, 0xe4, 0xa6, 0xf6, 0xcb, 0xa2, 0xbb, 0x5a, 0x62, 0x64, - 0xc0, 0xee, 0x23, 0x66, 0x59, 0x4c, 0x89, 0xd1, 0xea, 0x69, 0x66, 0xdd, 0x29, 0x97, 0xe8, 0x33, - 0x74, 0x02, 0xca, 0x18, 0x4e, 0x7c, 0x1e, 0x53, 0x22, 0x42, 0xd9, 0x97, 0xfa, 0x76, 0xa5, 0x6a, - 0x87, 0xe8, 0x67, 0x38, 0x64, 0xd8, 0x63, 0x39, 0xf1, 0x22, 0x46, 0xd3, 0x8d, 0xdc, 0xdb, 0x92, - 0x3f, 0x60, 0xd8, 0xc9, 0xc9, 0x05, 0xa3, 0x69, 0x25, 0xfa, 0x5f, 0x85, 0xca, 0xcf, 0x28, 0xf1, - 0x22, 0xca, 0xbc, 0x98, 0x04, 0x34, 0x5d, 0x26, 0x58, 0x58, 0x1a, 0x1d, 0xa9, 0x7a, 0xaf, 0xda, - 0x17, 0x94, 0xd9, 0x95, 0xa6, 0x18, 0x2a, 0x0b, 0x1e, 0x70, 0xea, 0x7b, 0xe5, 0xd4, 0x6f, 0xe5, - 0xd4, 0x6d, 0x55, 0xbd, 0x2b, 0x66, 0xef, 0x42, 0x1d, 0x3f, 0x62, 0xc2, 0x0d, 0x5d, 0x9a, 0xa9, - 0x05, 0xba, 0x86, 0x8e, 0x3c, 0x5f, 0x4e, 0xbd, 0x90, 0xa6, 0x7e, 0x4c, 0x8c, 0x77, 0x32, 0xd7, - 0x2f, 0xdb, 0x73, 0x15, 0x47, 0xe9, 0xd2, 0xa1, 0xa4, 0x55, 0xba, 0xfb, 0xbc, 0x52, 0x42, 0xa7, - 0xf0, 0x21, 0xf2, 0xe3, 0x04, 0x87, 0x1e, 0xc3, 0x11, 0x66, 0x98, 0x04, 0x58, 0xdd, 0x21, 0xe2, - 0xa7, 0x38, 0x33, 0x50, 0xaf, 0x66, 0x36, 0x1d, 0x43, 0x21, 0x4e, 0x49, 0x08, 0xd3, 0x89, 0xe8, - 0xa3, 0x31, 0x1c, 0x3c, 0x07, 0x16, 0xe2, 0x28, 0x26, 0xb1, 0x8c, 0xe0, 0xa0, 0xa7, 0x99, 0xad, - 0xc1, 0xc7, 0xad, 0x63, 0x0d, 0x71, 0xe4, 0xa0, 0xa7, 0xf5, 0xa2, 0xd0, 0x1d, 0x4d, 0x01, 0xd6, - 0xb7, 0x08, 0xe9, 0x50, 0x5b, 0xe0, 0x95, 0x7c, 0x87, 0x4d, 0x47, 0xfc, 0x44, 0xc7, 0x50, 0x7f, - 0xf4, 0x93, 0x5c, 0xbd, 0xab, 0xd6, 0xe0, 0x5b, 0x4b, 0xbd, 0x6b, 0xab, 0x7c, 0xd7, 0xd6, 0x9d, - 0xe8, 0x3a, 0x0a, 0x3a, 0xd9, 0xf9, 0x4d, 0x3b, 0xba, 0x86, 0x56, 0xe5, 0x6a, 0x7d, 0x15, 0xcb, - 0xdf, 0xe1, 0xdd, 0xab, 0x54, 0xff, 0xc7, 0xb8, 0x5b, 0x35, 0x6e, 0x56, 0x0c, 0x3e, 0x05, 0xd0, - 0xd9, 0xfc, 0x74, 0xa0, 0x16, 0xec, 0x3a, 0xb7, 0x93, 0x89, 0x3d, 0xf9, 0x4b, 0xff, 0x06, 0xb5, - 0xa1, 0xf9, 0xe7, 0xd5, 0x78, 0x7a, 0x39, 0x72, 0x47, 0x43, 0x5d, 0x43, 0x00, 0x8d, 0x8b, 0x33, - 0xfb, 0x72, 0x34, 0xd4, 0x77, 0x44, 0xcb, 0xb5, 0xc7, 0xa3, 0xa1, 0x77, 0x75, 0xeb, 0xea, 0x35, - 0xd4, 0x01, 0x70, 0x47, 0xce, 0xd8, 0x9e, 0x9c, 0x09, 0xf4, 0x8d, 0x40, 0xa7, 0x67, 0xb7, 0x37, - 0xa3, 0xa1, 0x5e, 0x3f, 0xc7, 0xf0, 0x21, 0xa0, 0xa9, 0x45, 0x30, 0x8f, 0x92, 0xf8, 0x9f, 0x97, - 0x27, 0x71, 0x0e, 0xe5, 0x04, 0xd3, 0xd9, 0xdf, 0x27, 0xf3, 0x98, 0x3f, 0xe4, 0x33, 0x2b, 0xa0, - 0x69, 0xbf, 0xe0, 0xfb, 0xcf, 0x7c, 0x3f, 0x48, 0x62, 0x4c, 0x78, 0x7f, 0x4e, 0xe7, 0x6c, 0x19, - 0x54, 0xea, 0xf2, 0x23, 0x3b, 0x6b, 0x48, 0xbb, 0x9f, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1f, - 0xe2, 0x8d, 0x7f, 0xb4, 0x05, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go deleted file mode 100644 index ed8cfe2a7..000000000 --- a/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go +++ /dev/null @@ -1,161 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/workflowdef.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type WorkflowDef struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` - Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - Tasks []*WorkflowTask `protobuf:"bytes,4,rep,name=tasks,proto3" json:"tasks,omitempty"` - InputParameters []string `protobuf:"bytes,5,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty"` - OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters,proto3" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow,proto3" json:"failure_workflow,omitempty"` - SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` - Restartable bool `protobuf:"varint,9,opt,name=restartable,proto3" json:"restartable,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowDef) Reset() { *m = WorkflowDef{} } -func (m *WorkflowDef) String() string { return proto.CompactTextString(m) } -func (*WorkflowDef) ProtoMessage() {} -func (*WorkflowDef) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowdef_3a04d4bf8b36be23, []int{0} -} -func (m *WorkflowDef) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowDef.Unmarshal(m, b) -} -func (m *WorkflowDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowDef.Marshal(b, m, deterministic) -} -func (dst *WorkflowDef) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowDef.Merge(dst, src) -} -func (m *WorkflowDef) XXX_Size() int { - return xxx_messageInfo_WorkflowDef.Size(m) -} -func (m *WorkflowDef) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowDef.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowDef proto.InternalMessageInfo - -func (m *WorkflowDef) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *WorkflowDef) GetDescription() string { - if m != nil { - return m.Description - } - return "" -} - -func (m *WorkflowDef) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *WorkflowDef) GetTasks() []*WorkflowTask { - if m != nil { - return m.Tasks - } - return nil -} - -func (m *WorkflowDef) GetInputParameters() []string { - if m != nil { - return m.InputParameters - } - return nil -} - -func (m *WorkflowDef) GetOutputParameters() map[string]*_struct.Value { - if m != nil { - return m.OutputParameters - } - return nil -} - -func (m *WorkflowDef) GetFailureWorkflow() string { - if m != nil { - return m.FailureWorkflow - } - return "" -} - -func (m *WorkflowDef) GetSchemaVersion() int32 { - if m != nil { - return m.SchemaVersion - } - return 0 -} - -func (m *WorkflowDef) GetRestartable() bool { - if m != nil { - return m.Restartable - } - return false -} - -func init() { - proto.RegisterType((*WorkflowDef)(nil), "conductor.proto.WorkflowDef") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowDef.OutputParametersEntry") -} - -func init() { - proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_3a04d4bf8b36be23) -} - -var fileDescriptor_workflowdef_3a04d4bf8b36be23 = []byte{ - // 404 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xdf, 0x6b, 0xd5, 0x30, - 0x14, 0xc7, 0xe9, 0xba, 0x6e, 0xbb, 0x29, 0xf3, 0x5e, 0x03, 0x6a, 0x98, 0x0a, 0x45, 0x10, 0x2a, - 0x48, 0x0a, 0x77, 0x2f, 0xb2, 0xc7, 0x31, 0x9f, 0x1d, 0x45, 0x26, 0xe8, 0x43, 0x49, 0xd3, 0xd3, - 0xde, 0xd2, 0xb4, 0x29, 0xf9, 0xb1, 0xb9, 0xbf, 0xda, 0x7f, 0x41, 0x9a, 0xb6, 0xb3, 0x16, 0x7d, - 0xcb, 0xf9, 0x7c, 0xcf, 0x2f, 0xbe, 0x27, 0xe8, 0x55, 0x2b, 0x0b, 0x10, 0xc9, 0x83, 0x54, 0x4d, - 0x29, 0xe4, 0x43, 0x01, 0x25, 0xed, 0x95, 0x34, 0x12, 0x6f, 0xb9, 0xec, 0x0a, 0xcb, 0x8d, 0x54, - 0x23, 0xb8, 0x20, 0x7f, 0x67, 0x1a, 0xa6, 0x9b, 0x49, 0x79, 0x53, 0x49, 0x59, 0x09, 0x48, 0x5c, - 0x94, 0xdb, 0x32, 0xd1, 0x46, 0x59, 0x6e, 0x46, 0xf5, 0xdd, 0x2f, 0x1f, 0x85, 0xdf, 0xa6, 0xa2, - 0x1b, 0x28, 0x31, 0x46, 0xc7, 0x1d, 0x6b, 0x81, 0x78, 0x91, 0x17, 0x6f, 0x52, 0xf7, 0xc6, 0x11, - 0x0a, 0x0b, 0xd0, 0x5c, 0xd5, 0xbd, 0xa9, 0x65, 0x47, 0x8e, 0x9c, 0xb4, 0x44, 0x98, 0xa0, 0xd3, - 0x7b, 0x50, 0x7a, 0x50, 0xfd, 0xc8, 0x8b, 0x83, 0x74, 0x0e, 0xf1, 0x25, 0x0a, 0x86, 0x5d, 0x34, - 0x39, 0x8e, 0xfc, 0x38, 0xdc, 0xbf, 0xa5, 0xab, 0xc5, 0xe9, 0x3c, 0xfc, 0x2b, 0xd3, 0x4d, 0x3a, - 0xe6, 0xe2, 0x0f, 0x68, 0x57, 0x77, 0xbd, 0x35, 0x59, 0xcf, 0x14, 0x6b, 0xc1, 0x80, 0xd2, 0x24, - 0x88, 0xfc, 0x78, 0x93, 0x6e, 0x1d, 0xbf, 0x7d, 0xc2, 0x38, 0x43, 0xcf, 0xa5, 0x35, 0xab, 0xdc, - 0x13, 0x37, 0x6b, 0xff, 0xdf, 0x59, 0x37, 0x50, 0xd2, 0x2f, 0xae, 0xea, 0x4f, 0xa7, 0xcf, 0x9d, - 0x51, 0x8f, 0xe9, 0x4e, 0xae, 0xf0, 0xb0, 0x4b, 0xc9, 0x6a, 0x61, 0x15, 0x64, 0xb3, 0xb9, 0xe4, - 0xd4, 0x39, 0xb0, 0x9d, 0xf8, 0xdc, 0x15, 0xbf, 0x47, 0xcf, 0x34, 0x3f, 0x40, 0xcb, 0xb2, 0xd9, - 0x8c, 0x33, 0x67, 0xc6, 0xf9, 0x48, 0xef, 0x26, 0x4b, 0x22, 0x14, 0x2a, 0xd0, 0x86, 0x29, 0xc3, - 0x72, 0x01, 0x64, 0x13, 0x79, 0xf1, 0x59, 0xba, 0x44, 0x17, 0x3f, 0xd0, 0x8b, 0x7f, 0xae, 0x87, - 0x77, 0xc8, 0x6f, 0xe0, 0x71, 0x3a, 0xce, 0xf0, 0xc4, 0x1f, 0x51, 0x70, 0xcf, 0x84, 0x05, 0x77, - 0x95, 0x70, 0xff, 0x92, 0x8e, 0xd7, 0xa6, 0xf3, 0xb5, 0xe9, 0xdd, 0xa0, 0xa6, 0x63, 0xd2, 0xd5, - 0xd1, 0x27, 0xef, 0xfa, 0x80, 0x5e, 0x73, 0xd9, 0xd2, 0x0e, 0x4c, 0x29, 0xea, 0x9f, 0x6b, 0x8f, - 0xae, 0xcf, 0x17, 0x26, 0xdd, 0xe6, 0xdf, 0xaf, 0xaa, 0xda, 0x1c, 0x6c, 0x4e, 0xb9, 0x6c, 0x93, - 0xa9, 0x24, 0x79, 0x2a, 0x49, 0xb8, 0xa8, 0xa1, 0x33, 0x49, 0x25, 0x2b, 0xd5, 0xf3, 0x05, 0x77, - 0x9f, 0x31, 0x3f, 0x71, 0x1d, 0x2f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x70, 0x4f, 0x0f, 0xeb, - 0xc6, 0x02, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go deleted file mode 100644 index 06837d555..000000000 --- a/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go +++ /dev/null @@ -1,200 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/workflowsummary.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type WorkflowSummary struct { - WorkflowType string `protobuf:"bytes,1,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - CorrelationId string `protobuf:"bytes,4,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` - StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` - UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` - EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` - Input string `protobuf:"bytes,9,opt,name=input,proto3" json:"input,omitempty"` - Output string `protobuf:"bytes,10,opt,name=output,proto3" json:"output,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` - ExecutionTime int64 `protobuf:"varint,12,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` - Event string `protobuf:"bytes,13,opt,name=event,proto3" json:"event,omitempty"` - FailedReferenceTaskNames string `protobuf:"bytes,14,opt,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowSummary) Reset() { *m = WorkflowSummary{} } -func (m *WorkflowSummary) String() string { return proto.CompactTextString(m) } -func (*WorkflowSummary) ProtoMessage() {} -func (*WorkflowSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowsummary_3f8ed40c0bd9261f, []int{0} -} -func (m *WorkflowSummary) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowSummary.Unmarshal(m, b) -} -func (m *WorkflowSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowSummary.Marshal(b, m, deterministic) -} -func (dst *WorkflowSummary) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowSummary.Merge(dst, src) -} -func (m *WorkflowSummary) XXX_Size() int { - return xxx_messageInfo_WorkflowSummary.Size(m) -} -func (m *WorkflowSummary) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowSummary.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowSummary proto.InternalMessageInfo - -func (m *WorkflowSummary) GetWorkflowType() string { - if m != nil { - return m.WorkflowType - } - return "" -} - -func (m *WorkflowSummary) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *WorkflowSummary) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *WorkflowSummary) GetCorrelationId() string { - if m != nil { - return m.CorrelationId - } - return "" -} - -func (m *WorkflowSummary) GetStartTime() string { - if m != nil { - return m.StartTime - } - return "" -} - -func (m *WorkflowSummary) GetUpdateTime() string { - if m != nil { - return m.UpdateTime - } - return "" -} - -func (m *WorkflowSummary) GetEndTime() string { - if m != nil { - return m.EndTime - } - return "" -} - -func (m *WorkflowSummary) GetStatus() Workflow_WorkflowStatus { - if m != nil { - return m.Status - } - return Workflow_RUNNING -} - -func (m *WorkflowSummary) GetInput() string { - if m != nil { - return m.Input - } - return "" -} - -func (m *WorkflowSummary) GetOutput() string { - if m != nil { - return m.Output - } - return "" -} - -func (m *WorkflowSummary) GetReasonForIncompletion() string { - if m != nil { - return m.ReasonForIncompletion - } - return "" -} - -func (m *WorkflowSummary) GetExecutionTime() int64 { - if m != nil { - return m.ExecutionTime - } - return 0 -} - -func (m *WorkflowSummary) GetEvent() string { - if m != nil { - return m.Event - } - return "" -} - -func (m *WorkflowSummary) GetFailedReferenceTaskNames() string { - if m != nil { - return m.FailedReferenceTaskNames - } - return "" -} - -func init() { - proto.RegisterType((*WorkflowSummary)(nil), "conductor.proto.WorkflowSummary") -} - -func init() { - proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_3f8ed40c0bd9261f) -} - -var fileDescriptor_workflowsummary_3f8ed40c0bd9261f = []byte{ - // 428 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0x51, 0x8b, 0xd3, 0x40, - 0x10, 0xc7, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x1e, 0x2e, 0x55, 0x57, 0x8b, 0x58, 0x14, 0x21, - 0x4f, 0x09, 0x28, 0xf8, 0x20, 0x08, 0x72, 0x0f, 0x42, 0x5f, 0x44, 0x72, 0x05, 0xc1, 0x97, 0xb0, - 0xdd, 0x4c, 0xea, 0xd2, 0x64, 0x27, 0xec, 0x6e, 0xee, 0xda, 0x4f, 0xe2, 0xd7, 0x95, 0xcc, 0x36, - 0xa5, 0xd7, 0xb7, 0xcc, 0xef, 0xff, 0x9b, 0xdd, 0xcc, 0xb0, 0x6c, 0x5e, 0x63, 0x01, 0x55, 0xfa, - 0x88, 0x76, 0x5b, 0x56, 0xf8, 0xe8, 0xda, 0xba, 0x96, 0x76, 0x9f, 0x34, 0x16, 0x3d, 0xf2, 0x5b, - 0x85, 0xa6, 0x68, 0x95, 0x47, 0x1b, 0xc0, 0x9b, 0xd9, 0x53, 0x3b, 0xd0, 0xf7, 0xff, 0x2e, 0xd9, - 0xed, 0xef, 0x03, 0xba, 0x0f, 0x07, 0xf0, 0x0f, 0x6c, 0xd2, 0x5b, 0xb9, 0xdf, 0x37, 0x20, 0xa2, - 0x45, 0x14, 0x8f, 0xb3, 0x9b, 0x1e, 0xae, 0xf6, 0x0d, 0x70, 0xc1, 0x46, 0x0f, 0x60, 0x9d, 0x46, - 0x23, 0x9e, 0x2d, 0xa2, 0x78, 0x90, 0xf5, 0x25, 0x7f, 0xc7, 0xae, 0x8f, 0xed, 0xba, 0x10, 0x17, - 0xd4, 0xcc, 0x7a, 0xb4, 0x2c, 0xf8, 0x47, 0x36, 0x55, 0x68, 0x2d, 0x54, 0xd2, 0x6b, 0x34, 0x9d, - 0x73, 0x49, 0xce, 0xe4, 0x84, 0x2e, 0x0b, 0xfe, 0x96, 0x31, 0xe7, 0xa5, 0xf5, 0xb9, 0xd7, 0x35, - 0x88, 0x01, 0x29, 0x63, 0x22, 0x2b, 0x5d, 0x43, 0x77, 0x4d, 0xdb, 0x14, 0xd2, 0x43, 0xc8, 0x87, - 0xe1, 0x9a, 0x80, 0x48, 0x78, 0xcd, 0xae, 0xc0, 0x14, 0x21, 0x1d, 0x51, 0x3a, 0x02, 0x53, 0x50, - 0xf4, 0x9d, 0x0d, 0x9d, 0x97, 0xbe, 0x75, 0xe2, 0x6a, 0x11, 0xc5, 0xd3, 0x4f, 0x71, 0x72, 0xb6, - 0xad, 0xa4, 0xdf, 0xc9, 0xf1, 0xe3, 0x9e, 0xfc, 0xec, 0xd0, 0xc7, 0x67, 0x6c, 0xa0, 0x4d, 0xd3, - 0x7a, 0x31, 0xa6, 0x93, 0x43, 0xc1, 0x5f, 0xb2, 0x21, 0xb6, 0xbe, 0xc3, 0x8c, 0xf0, 0xa1, 0xe2, - 0x5f, 0xd8, 0x2b, 0x0b, 0xd2, 0xa1, 0xc9, 0x4b, 0xb4, 0xb9, 0x36, 0x0a, 0xeb, 0xa6, 0x82, 0x6e, - 0x4e, 0x71, 0x4d, 0xe2, 0x8b, 0x10, 0xff, 0x40, 0xbb, 0x3c, 0x09, 0xbb, 0x4d, 0xc1, 0x0e, 0x54, - 0x4b, 0x7b, 0xa2, 0x41, 0x6e, 0x16, 0x51, 0x7c, 0x91, 0x4d, 0x8e, 0x94, 0xc6, 0x99, 0xb1, 0x01, - 0x3c, 0x80, 0xf1, 0x62, 0x12, 0x7e, 0x86, 0x0a, 0xfe, 0x8d, 0xcd, 0x4b, 0xa9, 0x2b, 0x28, 0x72, - 0x0b, 0x25, 0x58, 0x30, 0x0a, 0x72, 0x2f, 0xdd, 0x36, 0x37, 0xb2, 0x06, 0x27, 0xa6, 0xe4, 0x8a, - 0xa0, 0x64, 0xbd, 0xb1, 0x92, 0x6e, 0xfb, 0xb3, 0xcb, 0xef, 0x2a, 0x36, 0x57, 0x58, 0x27, 0x06, - 0x7c, 0x59, 0xe9, 0xdd, 0xf9, 0x82, 0xee, 0x9e, 0x9f, 0xbd, 0x9a, 0x5f, 0xeb, 0x3f, 0x5f, 0x37, - 0xda, 0xff, 0x6d, 0xd7, 0x89, 0xc2, 0x3a, 0x3d, 0xb4, 0xa5, 0xc7, 0xb6, 0x54, 0x55, 0x1a, 0x8c, - 0x4f, 0x37, 0xb8, 0xb1, 0x8d, 0x3a, 0xe1, 0xf4, 0x2c, 0xd7, 0x43, 0x3a, 0xf5, 0xf3, 0xff, 0x00, - 0x00, 0x00, 0xff, 0xff, 0xa0, 0xee, 0x86, 0xf0, 0xd4, 0x02, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go deleted file mode 100644 index 1e24eb2f4..000000000 --- a/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go +++ /dev/null @@ -1,308 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: model/workflowtask.proto - -package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type WorkflowTask struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` - InputParameters map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` - DynamicTaskNameParam string `protobuf:"bytes,6,opt,name=dynamic_task_name_param,json=dynamicTaskNameParam,proto3" json:"dynamic_task_name_param,omitempty"` - CaseValueParam string `protobuf:"bytes,7,opt,name=case_value_param,json=caseValueParam,proto3" json:"case_value_param,omitempty"` - CaseExpression string `protobuf:"bytes,8,opt,name=case_expression,json=caseExpression,proto3" json:"case_expression,omitempty"` - DecisionCases map[string]*WorkflowTask_WorkflowTaskList `protobuf:"bytes,9,rep,name=decision_cases,json=decisionCases,proto3" json:"decision_cases,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - DynamicForkTasksParam string `protobuf:"bytes,10,opt,name=dynamic_fork_tasks_param,json=dynamicForkTasksParam,proto3" json:"dynamic_fork_tasks_param,omitempty"` - DynamicForkTasksInputParamName string `protobuf:"bytes,11,opt,name=dynamic_fork_tasks_input_param_name,json=dynamicForkTasksInputParamName,proto3" json:"dynamic_fork_tasks_input_param_name,omitempty"` - DefaultCase []*WorkflowTask `protobuf:"bytes,12,rep,name=default_case,json=defaultCase,proto3" json:"default_case,omitempty"` - ForkTasks []*WorkflowTask_WorkflowTaskList `protobuf:"bytes,13,rep,name=fork_tasks,json=forkTasks,proto3" json:"fork_tasks,omitempty"` - StartDelay int32 `protobuf:"varint,14,opt,name=start_delay,json=startDelay,proto3" json:"start_delay,omitempty"` - SubWorkflowParam *SubWorkflowParams `protobuf:"bytes,15,opt,name=sub_workflow_param,json=subWorkflowParam,proto3" json:"sub_workflow_param,omitempty"` - JoinOn []string `protobuf:"bytes,16,rep,name=join_on,json=joinOn,proto3" json:"join_on,omitempty"` - Sink string `protobuf:"bytes,17,opt,name=sink,proto3" json:"sink,omitempty"` - Optional bool `protobuf:"varint,18,opt,name=optional,proto3" json:"optional,omitempty"` - TaskDefinition *TaskDef `protobuf:"bytes,19,opt,name=task_definition,json=taskDefinition,proto3" json:"task_definition,omitempty"` - RateLimited bool `protobuf:"varint,20,opt,name=rate_limited,json=rateLimited,proto3" json:"rate_limited,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } -func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } -func (*WorkflowTask) ProtoMessage() {} -func (*WorkflowTask) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_9ea0dc5eed4f592b, []int{0} -} -func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) -} -func (m *WorkflowTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowTask.Marshal(b, m, deterministic) -} -func (dst *WorkflowTask) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowTask.Merge(dst, src) -} -func (m *WorkflowTask) XXX_Size() int { - return xxx_messageInfo_WorkflowTask.Size(m) -} -func (m *WorkflowTask) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowTask.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowTask proto.InternalMessageInfo - -func (m *WorkflowTask) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *WorkflowTask) GetTaskReferenceName() string { - if m != nil { - return m.TaskReferenceName - } - return "" -} - -func (m *WorkflowTask) GetDescription() string { - if m != nil { - return m.Description - } - return "" -} - -func (m *WorkflowTask) GetInputParameters() map[string]*_struct.Value { - if m != nil { - return m.InputParameters - } - return nil -} - -func (m *WorkflowTask) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *WorkflowTask) GetDynamicTaskNameParam() string { - if m != nil { - return m.DynamicTaskNameParam - } - return "" -} - -func (m *WorkflowTask) GetCaseValueParam() string { - if m != nil { - return m.CaseValueParam - } - return "" -} - -func (m *WorkflowTask) GetCaseExpression() string { - if m != nil { - return m.CaseExpression - } - return "" -} - -func (m *WorkflowTask) GetDecisionCases() map[string]*WorkflowTask_WorkflowTaskList { - if m != nil { - return m.DecisionCases - } - return nil -} - -func (m *WorkflowTask) GetDynamicForkTasksParam() string { - if m != nil { - return m.DynamicForkTasksParam - } - return "" -} - -func (m *WorkflowTask) GetDynamicForkTasksInputParamName() string { - if m != nil { - return m.DynamicForkTasksInputParamName - } - return "" -} - -func (m *WorkflowTask) GetDefaultCase() []*WorkflowTask { - if m != nil { - return m.DefaultCase - } - return nil -} - -func (m *WorkflowTask) GetForkTasks() []*WorkflowTask_WorkflowTaskList { - if m != nil { - return m.ForkTasks - } - return nil -} - -func (m *WorkflowTask) GetStartDelay() int32 { - if m != nil { - return m.StartDelay - } - return 0 -} - -func (m *WorkflowTask) GetSubWorkflowParam() *SubWorkflowParams { - if m != nil { - return m.SubWorkflowParam - } - return nil -} - -func (m *WorkflowTask) GetJoinOn() []string { - if m != nil { - return m.JoinOn - } - return nil -} - -func (m *WorkflowTask) GetSink() string { - if m != nil { - return m.Sink - } - return "" -} - -func (m *WorkflowTask) GetOptional() bool { - if m != nil { - return m.Optional - } - return false -} - -func (m *WorkflowTask) GetTaskDefinition() *TaskDef { - if m != nil { - return m.TaskDefinition - } - return nil -} - -func (m *WorkflowTask) GetRateLimited() bool { - if m != nil { - return m.RateLimited - } - return false -} - -type WorkflowTask_WorkflowTaskList struct { - Tasks []*WorkflowTask `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_WorkflowTaskList{} } -func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } -func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} -func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_9ea0dc5eed4f592b, []int{0, 0} -} -func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) -} -func (m *WorkflowTask_WorkflowTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Marshal(b, m, deterministic) -} -func (dst *WorkflowTask_WorkflowTaskList) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowTask_WorkflowTaskList.Merge(dst, src) -} -func (m *WorkflowTask_WorkflowTaskList) XXX_Size() int { - return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Size(m) -} -func (m *WorkflowTask_WorkflowTaskList) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowTask_WorkflowTaskList.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowTask_WorkflowTaskList proto.InternalMessageInfo - -func (m *WorkflowTask_WorkflowTaskList) GetTasks() []*WorkflowTask { - if m != nil { - return m.Tasks - } - return nil -} - -func init() { - proto.RegisterType((*WorkflowTask)(nil), "conductor.proto.WorkflowTask") - proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.DecisionCasesEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowTask.InputParametersEntry") - proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.WorkflowTaskList") -} - -func init() { - proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_9ea0dc5eed4f592b) -} - -var fileDescriptor_workflowtask_9ea0dc5eed4f592b = []byte{ - // 708 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0xd3, 0x3a, - 0x14, 0x56, 0xd7, 0x75, 0x5b, 0x4f, 0xbb, 0xb6, 0xf3, 0x76, 0xef, 0xac, 0xde, 0xbb, 0x7b, 0xcb, - 0x78, 0xa0, 0x0f, 0x28, 0x45, 0x9d, 0x10, 0x68, 0x4f, 0x30, 0x3a, 0x10, 0x62, 0xc0, 0x14, 0x10, - 0x93, 0x26, 0xa1, 0x28, 0x4d, 0x9c, 0x62, 0x9a, 0xc6, 0x91, 0xed, 0xb0, 0xf5, 0xaf, 0xf0, 0x6b, - 0x91, 0x8f, 0x93, 0x36, 0xeb, 0xa6, 0x09, 0xde, 0xec, 0xef, 0x7c, 0xdf, 0x39, 0xe7, 0xb3, 0x7d, - 0x0c, 0x74, 0x26, 0x42, 0x16, 0x0f, 0xae, 0x84, 0x9c, 0x46, 0xb1, 0xb8, 0xd2, 0xbe, 0x9a, 0x3a, - 0xa9, 0x14, 0x5a, 0x90, 0x76, 0x20, 0x92, 0x30, 0x0b, 0xb4, 0x90, 0x16, 0xe8, 0xee, 0x5a, 0xaa, - 0xa1, 0x84, 0x2c, 0xca, 0xc1, 0x03, 0x0b, 0xaa, 0x6c, 0x5c, 0xa4, 0x48, 0x7d, 0xe9, 0xcf, 0x54, - 0x1e, 0xfe, 0x77, 0x22, 0xc4, 0x24, 0x66, 0x03, 0xdc, 0x8d, 0xb3, 0x68, 0xa0, 0xb4, 0xcc, 0x02, - 0x6d, 0xa3, 0x87, 0x3f, 0x01, 0x9a, 0x17, 0xb9, 0xec, 0xb3, 0xaf, 0xa6, 0x84, 0xc0, 0x7a, 0xe2, - 0xcf, 0x18, 0xad, 0xf4, 0x2a, 0xfd, 0xba, 0x8b, 0x6b, 0xe2, 0xc0, 0xae, 0x29, 0xe9, 0x49, 0x16, - 0x31, 0xc9, 0x92, 0x80, 0x79, 0x48, 0x59, 0x43, 0xca, 0x8e, 0x09, 0xb9, 0x45, 0xe4, 0x83, 0xe1, - 0xf7, 0xa0, 0x11, 0x32, 0x15, 0x48, 0x9e, 0x6a, 0x2e, 0x12, 0x5a, 0x45, 0x5e, 0x19, 0x22, 0x5f, - 0xa1, 0xc3, 0x93, 0x34, 0xd3, 0x1e, 0xb6, 0xca, 0x34, 0x93, 0x8a, 0xae, 0xf7, 0xaa, 0xfd, 0xc6, - 0x70, 0xe8, 0xac, 0x98, 0x76, 0xca, 0xed, 0x39, 0x6f, 0x8d, 0xea, 0x7c, 0x21, 0x3a, 0x4d, 0xb4, - 0x9c, 0xbb, 0x6d, 0x7e, 0x13, 0x35, 0x26, 0xf4, 0x3c, 0x65, 0xb4, 0x66, 0x4d, 0x98, 0x35, 0x79, - 0x0a, 0xfb, 0xe1, 0x3c, 0xf1, 0x67, 0x3c, 0xf0, 0xd0, 0x8c, 0xb1, 0x60, 0xcb, 0xd3, 0x0d, 0xa4, - 0xed, 0xe5, 0x61, 0x53, 0xc7, 0xd8, 0xc0, 0x7c, 0xa4, 0x0f, 0x9d, 0xc0, 0x57, 0xcc, 0xfb, 0xe1, - 0xc7, 0x59, 0xc1, 0xdf, 0x44, 0x7e, 0xcb, 0xe0, 0x5f, 0x0c, 0x6c, 0x99, 0x8f, 0xa0, 0x8d, 0x4c, - 0x76, 0x9d, 0x4a, 0xa6, 0x94, 0x71, 0xbe, 0xb5, 0x24, 0x9e, 0x2e, 0x50, 0x72, 0x01, 0xad, 0x90, - 0x05, 0xdc, 0xac, 0x3d, 0x13, 0x52, 0xb4, 0x8e, 0xd6, 0x9f, 0xdc, 0x6f, 0x7d, 0x94, 0x6b, 0x5e, - 0x19, 0x89, 0x35, 0xbe, 0x1d, 0x96, 0x31, 0xf2, 0x0c, 0x68, 0x61, 0x31, 0x12, 0x72, 0x8a, 0x3e, - 0x55, 0xde, 0x33, 0x60, 0x2b, 0x7f, 0xe5, 0xf1, 0xd7, 0x42, 0x4e, 0x4d, 0x52, 0x65, 0x5b, 0x7f, - 0x07, 0x0f, 0xef, 0x10, 0x96, 0x6e, 0xc8, 0x5e, 0x78, 0x03, 0x73, 0xfc, 0xb7, 0x9a, 0x63, 0x79, - 0x27, 0x78, 0xfb, 0x2f, 0xa0, 0x19, 0xb2, 0xc8, 0xcf, 0x62, 0x8d, 0xee, 0x68, 0x13, 0xcd, 0x1d, - 0xdc, 0x6b, 0xce, 0xbc, 0x0e, 0x94, 0x18, 0x23, 0xe4, 0x3d, 0xc0, 0xb2, 0x0d, 0xba, 0x8d, 0x7a, - 0xe7, 0xfe, 0xc3, 0x29, 0x6f, 0xce, 0xb8, 0xd2, 0x6e, 0x3d, 0x2a, 0xda, 0x23, 0xff, 0x43, 0x43, - 0x69, 0x5f, 0x6a, 0x2f, 0x64, 0xb1, 0x3f, 0xa7, 0xad, 0x5e, 0xa5, 0x5f, 0x73, 0x01, 0xa1, 0x91, - 0x41, 0xc8, 0x39, 0x10, 0x95, 0x8d, 0xbd, 0x62, 0x7c, 0xf2, 0x13, 0x6b, 0xf7, 0x2a, 0xfd, 0xc6, - 0xf0, 0xf0, 0x56, 0xdd, 0x4f, 0xd9, 0xb8, 0xa8, 0x86, 0xa6, 0x95, 0xdb, 0x51, 0x2b, 0x10, 0xd9, - 0x87, 0xcd, 0xef, 0x82, 0x27, 0x9e, 0x48, 0x68, 0xa7, 0x57, 0xed, 0xd7, 0xdd, 0x0d, 0xb3, 0xfd, - 0x98, 0x98, 0x97, 0xa9, 0x78, 0x32, 0xa5, 0x3b, 0xf6, 0x65, 0x9a, 0x35, 0xe9, 0xc2, 0x96, 0xc0, - 0xb1, 0xf0, 0x63, 0x4a, 0x7a, 0x95, 0xfe, 0x96, 0xbb, 0xd8, 0x93, 0x97, 0xd0, 0xc6, 0xd7, 0x1a, - 0xb2, 0x88, 0x27, 0x1c, 0xc7, 0x69, 0x17, 0xfb, 0xa2, 0xb7, 0xfa, 0x32, 0x66, 0x47, 0x2c, 0x72, - 0x5b, 0xda, 0x2e, 0x72, 0x3e, 0x79, 0x00, 0x4d, 0xe9, 0x6b, 0xe6, 0xc5, 0x7c, 0xc6, 0x35, 0x0b, - 0xe9, 0x1e, 0x96, 0x68, 0x18, 0xec, 0xcc, 0x42, 0xdd, 0x37, 0xd0, 0x59, 0x3d, 0x40, 0x72, 0x04, - 0x35, 0x7b, 0xfe, 0x95, 0xdf, 0xb9, 0x3f, 0xcb, 0xed, 0x5e, 0xc2, 0xde, 0x5d, 0x13, 0x4a, 0x3a, - 0x50, 0x9d, 0xb2, 0x79, 0xfe, 0xa9, 0x98, 0x25, 0x79, 0x0c, 0x35, 0x1c, 0x29, 0xfc, 0x45, 0x1a, - 0xc3, 0xbf, 0x1d, 0xfb, 0x4d, 0x39, 0xc5, 0x37, 0xe5, 0xe0, 0x64, 0xb9, 0x96, 0x74, 0xbc, 0xf6, - 0xbc, 0xd2, 0x4d, 0x81, 0xdc, 0x1e, 0x81, 0x3b, 0x32, 0x8f, 0x6e, 0x66, 0xfe, 0xd3, 0x87, 0xb3, - 0xac, 0x78, 0xc2, 0xe1, 0x9f, 0x40, 0xcc, 0x9c, 0x84, 0xe9, 0x28, 0xe6, 0xd7, 0xab, 0x79, 0x4e, - 0x5a, 0x65, 0xed, 0xf9, 0xf8, 0xf2, 0x78, 0xc2, 0xf5, 0xb7, 0x6c, 0xec, 0x04, 0x62, 0x36, 0xc8, - 0x35, 0x83, 0x85, 0x66, 0x10, 0xc4, 0x9c, 0x25, 0x7a, 0x30, 0x11, 0x13, 0x99, 0x06, 0x25, 0x1c, - 0x7f, 0xef, 0xf1, 0x06, 0xa6, 0x3c, 0xfa, 0x15, 0x00, 0x00, 0xff, 0xff, 0xa9, 0x25, 0x52, 0x75, - 0x0d, 0x06, 0x00, 0x00, -} diff --git a/polyglot-clients/gogrpc/conductor/worker.go b/polyglot-clients/gogrpc/conductor/worker.go deleted file mode 100644 index 63a67fb8b..000000000 --- a/polyglot-clients/gogrpc/conductor/worker.go +++ /dev/null @@ -1,177 +0,0 @@ -package conductor - -import ( - "context" - "fmt" - "os" - "runtime" - "sync" - "sync/atomic" - "time" - - "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" - "github.com/netflix/conductor/client/gogrpc/conductor/model" -) - -// An Executor is a struct that executes the logic required to resolve -// a task. Each Worker instance uses an Executor to run the polled tasks. -type Executor interface { - // Execute attempt to resolve the given Task and returns a TaskResult - // with its output. The given Context carries a deadline which must be - // enforced by the implementation. - // This function will be called by the Worker for each incoming Task, - // and must be threadsafe as it can be called by several goroutines - // concurrently. - Execute(context.Context, *model.Task) (*model.TaskResult, error) - - // ConnectionError is called by a Worker whenever there's an error with - // a GRPC connection. The GRPC error is passed in as its only argument. - // If this function returns nil, the Worker will continue retrying the - // connection; if it returns a non-nill error, the Worker will stop its - // execution and return the given error as the result of the Worker.Run - // function. - ConnectionError(error) error -} - -// A Worker uses a TaskClient to poll the Conductor server for new tasks and -// executes them using an Executor instance, returning the result of the task -// to the upstream server. -// The Worker struct must be created manually with the desired settings, and then -// ran with Worker.Run. -// Client implementations usually run a single Worker per process, or one worker per Task Type -// if a process needs to execute tasks of different types. The Concurrency -// field allows the worker to execute tasks concurrently in several goroutines. -type Worker struct { - // TaskType is the identifier for the type of tasks that this worker can - // execute. This will be send to Conductor when polling for new tasks. - TaskType string - - // TaskTimeout is the total duration that a task will be executed for. This - // includes the time required to poll, execute and return the task's results. - // If not set, tasks will not timeout. - TaskTimeout time.Duration - - // Identifier is an unique identifier for this worker. If not set, it defaults - // to the local hostname. - Identifier string - - // Concurrency is the amount of goroutines that wil poll for tasks and execute - // them concurrently. If not set, it defaults to GOMAXPROCS, a sensible default. - Concurrency int - - // Executor is an instance of an Executor that will actually run the logic required - // for each task. See conductor.Executor. - Executor Executor - - // Client is an instance of a conductor.Client that implements a Task service. - // See conductor.Client - Client TasksClient - - waitThreads sync.WaitGroup - active int32 // atomic - shutdown chan struct{} - shutdownFlag sync.Once - result error -} - -// Run executes the main loop of the Worker, spawning several gorutines to poll and -// resolve tasks from a Conductor server. -// This is a blocking call that will not return until Worker.Shutdown is called from -// another goroutine. When shutting down cleanly, this function returns nil; otherwise -// an error is returned if there's been a problem with the GRPC connection and the Worker -// cannot continue running. -func (worker *Worker) Run() error { - if worker.TaskType == "" { - return fmt.Errorf("conductor: missing field 'TaskType'") - } - if worker.Executor == nil { - return fmt.Errorf("conductor: missing field 'Executor'") - } - if worker.Client == nil { - return fmt.Errorf("conductor: missing field 'Client'") - } - if worker.Identifier == "" { - hostname, err := os.Hostname() - if err != nil { - return err - } - worker.Identifier = fmt.Sprintf("%s (conductor-go)", hostname) - } - if worker.Concurrency == 0 { - worker.Concurrency = runtime.GOMAXPROCS(0) - } - - worker.active = 0 - worker.result = nil - worker.shutdown = make(chan struct{}) - worker.waitThreads.Add(worker.Concurrency) - - for i := 0; i < worker.Concurrency; i++ { - go worker.thread() - } - - worker.waitThreads.Wait() - return worker.result -} - -// Shutdown stops this worker gracefully. This function is thread-safe and may -// be called from any goroutine. Only the first call to Shutdown will have -// an effect. -func (worker *Worker) Shutdown() { - worker.shutdownOnce(nil) -} - -func (worker *Worker) shutdownOnce(err error) { - worker.shutdownFlag.Do(func() { - worker.result = err - close(worker.shutdown) - worker.waitThreads.Wait() - worker.Client.Shutdown() - }) -} - -func (worker *Worker) onError(err error) { - userErr := worker.Executor.ConnectionError(err) - if userErr != nil { - worker.shutdownOnce(userErr) - } -} - -func (worker *Worker) runTask(req *tasks.PollRequest) error { - ctx, cancel := context.WithTimeout(context.Background(), worker.TaskTimeout) - defer cancel() - - task, err := worker.Client.Tasks().Poll(ctx, req) - if err != nil { - return err - } - - result, err := worker.Executor.Execute(ctx, task.Task) - // TODO: what if the task failed? - if err == nil { - request := tasks.UpdateTaskRequest{Result: result} - _, err := worker.Client.Tasks().UpdateTask(context.Background(), &request) - if err != nil { - return err - } - } - return nil -} - -func (worker *Worker) thread() { - defer worker.waitThreads.Done() - - pollRequest := &tasks.PollRequest{ - TaskType: worker.TaskType, - WorkerId: worker.Identifier, - } - - for range worker.shutdown { - atomic.AddInt32(&worker.active, 1) - err := worker.runTask(pollRequest) - if err != nil { - worker.onError(err) - } - atomic.AddInt32(&worker.active, -1) - } -} diff --git a/polyglot-clients/gogrpc/conductor/worker_test.go b/polyglot-clients/gogrpc/conductor/worker_test.go deleted file mode 100644 index 39e6416f2..000000000 --- a/polyglot-clients/gogrpc/conductor/worker_test.go +++ /dev/null @@ -1,197 +0,0 @@ -package conductor - -import ( - "context" - "flag" - "fmt" - "io" - "math/rand" - "sync" - "testing" - "time" - - "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" - - "github.com/netflix/conductor/client/gogrpc/conductor/model" - "google.golang.org/grpc" - - "github.com/stretchr/testify/assert" -) - -var doTrace = flag.Bool("dotrace", false, "print tracing information") - -func trace(format string, args ...interface{}) { - if *doTrace { - fmt.Printf(format, args...) - } -} - -type fakeTaskService struct { - latency time.Duration - shutdown chan struct{} - mu sync.Mutex - completed map[string]bool - result error -} - -func randomTaskID() string { - return fmt.Sprintf("task-%08x", rand.Int63()) -} - -var ErrNotImplemented = fmt.Errorf("API call not implemented") - -func (s *fakeTaskService) newTask(req *tasks.PollRequest) (*model.Task, error) { - id := randomTaskID() - - s.mu.Lock() - s.completed[id] = false - s.mu.Unlock() - - return &model.Task{ - TaskType: req.GetTaskType(), - Status: model.Task_SCHEDULED, - TaskId: id, - }, nil -} - -func (s *fakeTaskService) updateTask(res *model.TaskResult) (*tasks.UpdateTaskResponse, error) { - id := res.GetTaskId() - - s.mu.Lock() - if _, found := s.completed[id]; !found { - panic("missing task: " + id) - } - s.completed[id] = true - s.mu.Unlock() - - return &tasks.UpdateTaskResponse{ - TaskId: id, - }, nil -} - -func (s *fakeTaskService) Poll(ctx context.Context, in *tasks.PollRequest, opts ...grpc.CallOption) (*tasks.PollResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) BatchPoll(context.Context, *tasks.BatchPollRequest, ...grpc.CallOption) (tasks.TaskService_BatchPollClient, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetPendingTaskForWorkflow(context.Context, *tasks.PendingTaskRequest, ...grpc.CallOption) (*tasks.PendingTaskResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetTasksInProgress(ctx context.Context, in *tasks.TasksInProgressRequest, opts ...grpc.CallOption) (*tasks.TasksInProgressResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) UpdateTask(ctx context.Context, in *tasks.UpdateTaskRequest, opts ...grpc.CallOption) (*tasks.UpdateTaskResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) AckTask(ctx context.Context, in *tasks.AckTaskRequest, opts ...grpc.CallOption) (*tasks.AckTaskResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) AddLog(ctx context.Context, in *tasks.AddLogRequest, opts ...grpc.CallOption) (*tasks.AddLogResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetQueueAllInfo(ctx context.Context, in *tasks.QueueAllInfoRequest, opts ...grpc.CallOption) (*tasks.QueueAllInfoResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetQueueInfo(ctx context.Context, in *tasks.QueueInfoRequest, opts ...grpc.CallOption) (*tasks.QueueInfoResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetTaskLogs(ctx context.Context, in *tasks.GetTaskLogsRequest, opts ...grpc.CallOption) (*tasks.GetTaskLogsResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetTask(ctx context.Context, in *tasks.GetTaskRequest, opts ...grpc.CallOption) (*tasks.GetTaskResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) RemoveTaskFromQueue(ctx context.Context, in *tasks.RemoveTaskRequest, opts ...grpc.CallOption) (*tasks.RemoveTaskResponse, error) { - return nil, ErrNotImplemented -} - -func (s *fakeTaskService) GetQueueSizesForTasks(ctx context.Context, in *tasks.QueueSizesRequest, opts ...grpc.CallOption) (*tasks.QueueSizesResponse, error) { - return nil, ErrNotImplemented -} - - -type fakeTaskClient struct { - tasks *fakeTaskService -} - -func (c *fakeTaskClient) Tasks() tasks.TaskServiceClient { - return c.tasks -} - -func (c *fakeTaskClient) forceShutdown(err error) { - c.tasks.result = err - close(c.tasks.shutdown) -} - -func (c *fakeTaskClient) Shutdown() { - c.tasks.result = io.EOF - close(c.tasks.shutdown) -} - -func newFakeTaskClient(latency time.Duration) *fakeTaskClient { - return &fakeTaskClient{ - tasks: &fakeTaskService{ - shutdown: make(chan struct{}), - latency: latency, - }, - } -} - -type slowExecutor struct { - mu sync.Mutex - recv []*model.Task - delay time.Duration -} - -func (exe *slowExecutor) Execute(ctx context.Context, m *model.Task) (*model.TaskResult, error) { - exe.mu.Lock() - exe.recv = append(exe.recv, m) - exe.mu.Unlock() - - time.Sleep(exe.delay) - return &model.TaskResult{ - TaskId: m.GetTaskId(), - Status: model.TaskResult_COMPLETED, - }, nil -} - -func (exe *slowExecutor) ConnectionError(err error) error { - panic(err) -} - -func TestWorkerInterface(t *testing.T) { - mock := newFakeTaskClient(200 * time.Millisecond) - exec := &slowExecutor{ - delay: 100 * time.Millisecond, - } - - worker := &Worker{ - TaskType: "fake-task", - Concurrency: 4, - Executor: exec, - Client: mock, - } - - time.AfterFunc(1*time.Second, func() { - worker.Shutdown() - }) - - assert.NoError(t, worker.Run()) - - for id, completed := range mock.tasks.completed { - assert.Truef(t, completed, "task %s was not reported as completed", id) - } - assert.Equal(t, len(mock.tasks.completed), len(exec.recv)) -} diff --git a/polyglot-clients/gogrpc/go.mod b/polyglot-clients/gogrpc/go.mod deleted file mode 100644 index ff2ebf1cf..000000000 --- a/polyglot-clients/gogrpc/go.mod +++ /dev/null @@ -1,6 +0,0 @@ -module github.com/netflix/conductor/client/gogrpc - -require ( - github.com/stretchr/testify v1.2.1 - google.golang.org/grpc v1.15.0 -) diff --git a/polyglot-clients/gogrpc/go.sum b/polyglot-clients/gogrpc/go.sum deleted file mode 100644 index 5db68e5cb..000000000 --- a/polyglot-clients/gogrpc/go.sum +++ /dev/null @@ -1,21 +0,0 @@ -github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/golang/protobuf v1.1.0 h1:0iH4Ffd/meGoXqF2lSAhZHt8X+cPgkfn/cb6Cce5Vpc= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/netflix/conductor v1.11.5 h1:HxVBurFWxFtz56wepyiXFkLunRISlXTKfBlkuAnXPd0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/square/goprotowrap v0.0.0-20180504135057-6f414ea4a80c h1:iIIx5xujWT8vyoW+umx0/JSpNvZQgQUv9krqlN2AOTA= -github.com/square/goprotowrap v0.0.0-20180504135057-6f414ea4a80c/go.mod h1:ss+tcSDAsyytwf1fIIsDTBbLS5uMvktdl8DvEZwELx4= -github.com/stretchr/testify v1.2.1 h1:52QO5WkIUcHGIR7EnGagH88x1bUzqGXTC5/1bDTUQ7U= -github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33 h1:I6FyU15t786LL7oL/hn43zqTuEGr4PN7F4XJ1p4E3Y8= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -google.golang.org/genproto v0.0.0-20180831171423-11092d34479b h1:lohp5blsw53GBXtLyLNaTXPXS9pJ1tiTw61ZHUoE9Qw= -google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/grpc v1.14.0 h1:ArxJuB1NWfPY6r9Gp9gqwplT0Ge7nqv9msgu03lHLmo= -google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= diff --git a/polyglot-clients/gogrpc/tools.go b/polyglot-clients/gogrpc/tools.go deleted file mode 100644 index 5b4c8025a..000000000 --- a/polyglot-clients/gogrpc/tools.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build tools - -package tools - -import ( - _ "github.com/golang/protobuf/protoc-gen-go" - _ "github.com/square/goprotowrap/cmd/protowrap" - _ "github.com/kazegusuri/grpcurl" -) diff --git a/polyglot-clients/python/.gitignore b/polyglot-clients/python/.gitignore deleted file mode 100644 index 3ce1d246a..000000000 --- a/polyglot-clients/python/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.pytest_cache diff --git a/polyglot-clients/python/README.md b/polyglot-clients/python/README.md deleted file mode 100644 index da64aeac3..000000000 --- a/polyglot-clients/python/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# Python client for Conductor -Python client for Conductor provides two sets of functions: - -1. Workflow management APIs (start, terminate, get workflow status etc.) -2. Worker execution framework - -## Install - -```Using virtualenv - virtualenv conductorclient - source conductorclient/bin/activate - cd ../conductor/client/python - python setup.py install -``` - -## Using Workflow Management API -Python class ```WorkflowClient``` provides client API calls to the conductor server to start manage the workflows. - -### Example - -```python -import sys -from conductor import conductor -import json - -def getStatus(workflowId): - - workflowClient = conductor.WorkflowClient('http://localhost:8080/api') - - workflow_json = workflowClient.getWorkflow(workflowId) - print json.dumps(workflow_json, indent=True, separators=(',', ': ')) - - return workflow_json - -``` - -## Task Worker Execution -Task worker execution APIs facilitates execution of a task worker using python client. -The API provides necessary mechanism to poll for task work at regular interval and executing the python worker in a separate threads. - -### Example -The following python script demonstrates workers for the kitchensink workflow. - -```python -from __future__ import print_function -from conductor.ConductorWorker import ConductorWorker - -def execute(task): - return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0}, 'logs': ['one', 'two']} - -def execute4(task): - forkTasks = [{"name": "task_1", "taskReferenceName": "task_1_1", "type": "SIMPLE"},{"name": "sub_workflow_4", "taskReferenceName": "wf_dyn", "type": "SUB_WORKFLOW", "subWorkflowParam": {"name": "sub_flow_1"}}]; - input = {'task_1_1': {}, 'wf_dyn': {}} - return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0, 'dynamicTasks': forkTasks, 'inputs': input}, 'logs': ['one','two']} - -def main(): - print('Starting Kitchensink workflows') - cc = ConductorWorker('http://localhost:8080/api', 1, 0.1) - for x in range(1, 30): - if(x == 4): - cc.start('task_{0}'.format(x), execute4, False) - else: - cc.start('task_{0}'.format(x), execute, False) - cc.start('task_30', execute, True) - -if __name__ == '__main__': - main() -``` diff --git a/polyglot-clients/python/conductor/ConductorWorker.py b/polyglot-clients/python/conductor/ConductorWorker.py deleted file mode 100644 index 6c4f13790..000000000 --- a/polyglot-clients/python/conductor/ConductorWorker.py +++ /dev/null @@ -1,183 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import print_function, absolute_import -import sys -import time -from conductor.conductor import WFClientMgr -from threading import Thread -import socket -from enum import Enum - -hostname = socket.gethostname() - -class TaskStatus(Enum): - IN_PROGRESS = 'IN_PROGRESS' - FAILED = 'FAILED' - FAILED_WITH_TERMINAL_ERROR = 'FAILED_WITH_TERMINAL_ERROR' - COMPLETED = 'COMPLETED' - - def __str__(self): - return str(self.value) - - - -class ConductorWorker: - """ - Main class for implementing Conductor Workers - - A conductor worker is a separate system that executes the various - tasks that the conductor server queues up for execution. The worker - can run on the same instance as the server or on a remote instance. - - The worker generally provides a wrapper around some function that - performs the actual execution of the task. The function that is - being executed must return a `dict` with the `status`, `output` and - `log` keys. If these keys are not present, the worker will raise an - Exception after completion of the task. - - The start method is used to begin continous polling and execution - of the tasks that the conductor server makes available. The same - script can run multiple workers using the wait argument. For more - details, view the start method - """ - def __init__(self, server_url, thread_count, polling_interval, worker_id=None): - """ - Parameters - ---------- - server_url: str - The url to the server hosting the conductor api. - Ex: 'http://localhost:8080/api' - thread_count: int - The number of threads that will be polling for and - executing tasks in case of using the start method. - polling_interval: float - The number of seconds that each worker thread will wait - between polls to the conductor server. - worker_id: str, optional - The worker_id of the worker that is going to execute the - task. For further details, refer to the documentation - By default, it is set to hostname of the machine - """ - wfcMgr = WFClientMgr(server_url) - self.workflowClient = wfcMgr.workflowClient - self.taskClient = wfcMgr.taskClient - self.thread_count = thread_count - self.polling_interval = polling_interval - self.worker_id = worker_id or hostname - - @staticmethod - def task_result(status: TaskStatus, output=None, logs=None, reasonForIncompletion=None): - """ - Get task result - Parameters - ---------- - status: TaskStatus - The status of the task - Ex: TaskStatus.COMPLETED - output: dict - results of task processing - logs: list - log list - reasonForIncompletion: str, optional - the reason for not completing the task if any - """ - if logs is None: - logs = [] - if output is None: - output = {} - ret = { - 'status': status.__str__(), - 'output': output, - 'logs': logs - } - if reasonForIncompletion: - ret['reasonForIncompletion'] = reasonForIncompletion - return ret - - def execute(self, task, exec_function): - try: - resp = exec_function(task) - if type(resp) is not dict or not all(key in resp for key in ('status', 'output', 'logs')): - raise Exception('Task execution function MUST return a response as a dict with status, output and logs fields') - task['status'] = resp['status'] - task['outputData'] = resp['output'] - task['logs'] = resp['logs'] - if 'callbackAfterSeconds' in resp: - task['callbackAfterSeconds'] = resp['callbackAfterSeconds'] - if 'reasonForIncompletion' in resp: - task['reasonForIncompletion'] = resp['reasonForIncompletion'] - self.taskClient.updateTask(task) - except Exception as err: - print(f'Error executing task: {exec_function.__name__} with error: {str(err)}') - task['status'] = 'FAILED' - self.taskClient.updateTask(task) - - def poll_and_execute(self, taskType, exec_function, domain=None): - while True: - time.sleep(float(self.polling_interval)) - polled = self.taskClient.pollForTask(taskType, self.worker_id, domain) - if polled is not None: - self.execute(polled, exec_function) - - def start(self, taskType, exec_function, wait, domain=None): - """ - start begins the continuous polling of the conductor server - - Parameters - ---------- - taskType: str - The name of the task that the worker is looking to execute - exec_function: function - The function that the worker will execute. The function - must return a dict with the `status`, `output` and `logs` - keys present. If this is not present, an Exception will be - raised - wait: bool - Whether the worker will block execution of further code. - Since the workers are being run in daemon threads, when the - program completes execution, all the threads are destroyed. - Setting wait to True prevents the program from ending. - If multiple workers are being called from the same program, - all but the last start call but have wait set to False. - The last start call must always set wait to True. If a - single worker is being called, set wait to True. - domain: str, optional - The domain of the task under which the worker will run. For - further details refer to the conductor server documentation - By default, it is set to None - """ - print('Polling for task %s at a %f ms interval with %d threads for task execution, with worker id as %s' % (taskType, self.polling_interval * 1000, self.thread_count, self.worker_id)) - for x in range(0, int(self.thread_count)): - thread = Thread(target=self.poll_and_execute, args=(taskType, exec_function, domain,)) - thread.daemon = True - thread.start() - if wait: - while 1: - time.sleep(1) - - -def exc(taskType, inputData, startTime, retryCount, status, callbackAfterSeconds, pollCount): - print('Executing the function') - return {'status': 'COMPLETED', 'output': {}, 'logs': []} - - -def main(): - cc = ConductorWorker('http://localhost:8080/api', 5, 0.1) - cc.start(sys.argv[1], exc, False) - cc.start(sys.argv[2], exc, True) - - -if __name__ == '__main__': - main() diff --git a/polyglot-clients/python/conductor/__init__.py b/polyglot-clients/python/conductor/__init__.py deleted file mode 100644 index 137d70b6b..000000000 --- a/polyglot-clients/python/conductor/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = '1.0.0' -VERSION = tuple(map(int, __version__.split('.'))) - -__all__ = ['conductor','ConductorWorker'] diff --git a/polyglot-clients/python/conductor/conductor.py b/polyglot-clients/python/conductor/conductor.py deleted file mode 100644 index ca727347d..000000000 --- a/polyglot-clients/python/conductor/conductor.py +++ /dev/null @@ -1,373 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function -import requests -import json -import sys -import socket -import warnings - - -hostname = socket.gethostname() - - -class BaseClient(object): - printUrl = False - headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} - - def __init__(self, baseURL, baseResource): - self.baseURL = baseURL - self.baseResource = baseResource - - def get(self, resPath, queryParams=None): - theUrl = "{}/{}".format(self.baseURL, resPath) - resp = requests.get(theUrl, params=queryParams) - self.__checkForSuccess(resp) - if(resp.content == b''): - return None - else: - return resp.json() - - def post(self, resPath, queryParams, body, headers=None): - theUrl = "{}/{}".format(self.baseURL, resPath) - theHeader = self.headers - if headers is not None: - theHeader = self.mergeTwoDicts(self.headers, headers) - if body is not None: - jsonBody = json.dumps(body, ensure_ascii=False).encode('utf8') - resp = requests.post(theUrl, params=queryParams, data=jsonBody, headers=theHeader) - else: - resp = requests.post(theUrl, params=queryParams, headers=theHeader) - - self.__checkForSuccess(resp) - return self.__return(resp, theHeader) - - def put(self, resPath, queryParams=None, body=None, headers=None): - theUrl = "{}/{}".format(self.baseURL, resPath) - theHeader = self.headers - if headers is not None: - theHeader = self.mergeTwoDicts(self.headers, headers) - - if body is not None: - jsonBody = json.dumps(body, ensure_ascii=False).encode('utf8') - resp = requests.put(theUrl, params=queryParams, data=jsonBody, headers=theHeader) - else: - resp = requests.put(theUrl, params=queryParams, headers=theHeader) - - self.__print(resp) - self.__checkForSuccess(resp) - - def delete(self, resPath, queryParams): - theUrl = "{}/{}".format(self.baseURL, resPath) - resp = requests.delete(theUrl, params=queryParams) - self.__print(resp) - self.__checkForSuccess(resp) - - def makeUrl(self, urlformat=None, *argv): - url = self.baseResource + '/' - if urlformat: - url += urlformat.format(*argv) - return url - - def makeParams(self, **kwargs): - return dict((k, v) for k, v in kwargs.items() if v is not None) or None - - def mergeTwoDicts(self, x, y): - z = x.copy() - z.update(y) - return z - - def __print(self, resp): - if self.printUrl: - print(resp.url) - - def __return(self, resp, header): - retval = '' - if len(resp.text) > 0: - if header['Accept'] == 'text/plain': - retval = resp.text - elif header['Accept'] == 'application/json': - retval = resp.json() - else: - retval = resp.text - return retval - - def __checkForSuccess(self, resp): - try: - resp.raise_for_status() - except requests.HTTPError: - print("ERROR: " + resp.text) - raise - - -class MetadataClient(BaseClient): - BASE_RESOURCE = 'metadata' - - def __init__(self, baseURL): - BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) - - def getWorkflowDef(self, wfname, version=None): - url = self.makeUrl('workflow/{}', wfname) - return self.get(url, self.makeParams(version=version)) - - def createWorkflowDef(self, wfdObj): - url = self.makeUrl('workflow') - return self.post(url, None, wfdObj) - - def updateWorkflowDefs(self, listOfWfdObj): - url = self.makeUrl('workflow') - self.put(url, None, listOfWfdObj) - - def getAllWorkflowDefs(self): - url = self.makeUrl('workflow') - return self.get(url) - - def unRegisterWorkflowDef(self, wfname, version): - url = self.makeUrl("workflow/{name}/{version}".format(name=wfname, version=version)) - self.delete(url, None) - - def getTaskDef(self, tdName): - url = self.makeUrl('taskdefs/{}', tdName) - return self.get(url) - - def registerTaskDefs(self, listOfTaskDefObj): - url = self.makeUrl('taskdefs') - return self.post(url, None, listOfTaskDefObj) - - def registerTaskDef(self, taskDefObj): - """registerTaskDef is deprecated since PUT /metadata/taskdefs does not - register but updates a task definition. Use updateTaskDef function - instead. - """ - warnings.warn(self.registerTaskDef.__doc__, DeprecationWarning) - url = self.makeUrl('taskdefs') - self.put(url, None, taskDefObj) - - def updateTaskDef(self, taskDefObj): - url = self.makeUrl('taskdefs') - self.put(url, None, taskDefObj) - - def unRegisterTaskDef(self, tdName, reason=None): - url = self.makeUrl('taskdefs/{}', tdName) - self.delete(url, self.makeParams(reason=reason)) - - def getAllTaskDefs(self): - url = self.makeUrl('taskdefs') - return self.get(url) - - -class TaskClient(BaseClient): - BASE_RESOURCE = 'tasks' - - def __init__(self, baseURL): - BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) - - def getTask(self, taskId): - url = self.makeUrl('{}', taskId) - return self.get(url) - - def updateTask(self, taskObj): - url = self.makeUrl('') - headers = {'Accept': 'text/plain'} - self.post(url, None, taskObj, headers) - - def pollForTask(self, taskType, workerid, domain=None): - url = self.makeUrl('poll/{}', taskType) - params = {} - params['workerid'] = workerid - if domain is not None: - params['domain'] = domain - - try: - return self.get(url, params) - except Exception as err: - print('Error while polling ' + str(err)) - return None - - def pollForBatch(self, taskType, count, timeout, workerid, domain=None): - url = self.makeUrl('poll/batch/{}', taskType) - params = {} - params['workerid'] = workerid - params['count'] = count - params['timeout'] = timeout - - if domain is not None: - params['domain'] = domain - - try: - return self.get(url, params) - except Exception as err: - print('Error while polling ' + str(err)) - return None - - def getTasksInQueue(self, taskName): - url = self.makeUrl('queue/{}', taskName) - return self.get(url) - - def removeTaskFromQueue(self, taskId, reason=None): - url = self.makeUrl('queue/{}', taskId) - params = {} - params['reason'] = reason - self.delete(url, params) - - def getTaskQueueSizes(self, listOfTaskName): - url = self.makeUrl('queue/sizes') - return self.post(url, None, listOfTaskName) - - -class WorkflowClient(BaseClient): - BASE_RESOURCE = 'workflow' - - def __init__(self, baseURL): - BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) - - def getWorkflow(self, wfId, includeTasks=True): - url = self.makeUrl('{}', wfId) - params = {} - params['includeTasks'] = includeTasks - return self.get(url, params) - - def getRunningWorkflows(self, wfName, version=None, startTime=None, endTime=None): - url = self.makeUrl('running/{}', wfName) - params = {} - params['version'] = version - params['startTime'] = startTime - params['endTime'] = endTime - return self.get(url, params) - - def startWorkflow(self, wfName, inputjson, version=None, correlationId=None): - url = self.makeUrl('{}', wfName) - params = {} - params['version'] = version - params['correlationId'] = correlationId - headers = {'Accept': 'text/plain'} - return self.post(url, params, inputjson, headers) - - def terminateWorkflow(self, wfId, reason=None): - url = self.makeUrl('{}', wfId) - params = {} - params['reason'] = reason - self.delete(url, params) - - def removeWorkflow(self, wfId, archiveWorkflow, reason=None): - url = self.makeUrl('{}/remove', wfId) - self.delete(url, self.makeParams(archiveWorkflow=archiveWorkflow, reason=reason)) - - def pauseWorkflow(self, wfId): - url = self.makeUrl('{}/pause', wfId) - self.put(url) - - def resumeWorkflow(self, wfId): - url = self.makeUrl('{}/resume', wfId) - self.put(url) - - def skipTaskFromWorkflow(self, wfId, taskRefName, skipTaskRequest): - url = self.makeUrl('{}/skiptask/{}', wfId, taskRefName) - self.post(url, None, skipTaskRequest) - - def rerunWorkflow(self, wfId, taskRefName, rerunWorkflowRequest): - url = self.makeUrl('{}/rerun', wfId) - return self.post(url, None, rerunWorkflowRequest) - - def restartWorkflow(self, wfId, taskRefName, fromTaskRef): - url = self.makeUrl('{}/restart', wfId) - params = {} - params['from'] = fromTaskRef - self.post(url, params, None) - -class EventServicesClient(BaseClient): - BASE_RESOURCE = 'event' - - def __init__(self, baseURL): - BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) - - def getEventHandlerDef(self, event, activeOnly=True): - url = self.makeUrl('{}', event) - params = {} - params['activeOnly'] = activeOnly - return self.get(url, params) - - def getEventHandlerDefs(self): - url = self.makeUrl() - return self.get(url) - - def createEventHandlerDef(self, ehObj): - url = self.makeUrl() - return self.post(url, None, ehObj) - - def updateEventHandlerDef(self, ehObj): - url = self.makeUrl() - return self.put(url, None, ehObj) - - def removeEventHandler(self, ehName): - url = self.makeUrl('{}', ehName) - self.delete(url, {}) - - def getEventHandlerQueues(self): - url = self.makeUrl('queues') - return self.get(url) - - def getEventHandlerQueuesProviders(self): - url = self.makeUrl('queues/providers') - return self.get(url) - -class WFClientMgr: - def __init__(self, server_url='http://localhost:8080/api/'): - self.workflowClient = WorkflowClient(server_url) - self.taskClient = TaskClient(server_url) - self.metadataClient = MetadataClient(server_url) - - -def main(): - if len(sys.argv) < 3: - print("Usage - python conductor server_url command parameters...") - return None - - server_url = sys.argv[1] - command = sys.argv[2] - wfcMgr = WFClientMgr(server_url) - wfc = wfcMgr.workflowClient - if command == 'start': - if len(sys.argv) < 7: - print('python conductor server_url start workflow_name input_json [version] [correlationId]') - return None - wfName = sys.argv[3] - input = json.loads(sys.argv[5]) - correlationId = sys.argv[6] - workflowId = wfc.startWorkflow(wfName, input, 1, correlationId) - print(workflowId) - return workflowId - elif command == 'get': - if len(sys.argv) < 4: - print('python conductor server_url get workflow_id') - return None - wfId = sys.argv[3] - wfjson = wfc.getWorkflow(wfId) - print(json.dumps(wfjson, indent=True, separators=(',', ': '))) - return wfjson - elif command == 'terminate': - if len(sys.argv) < 4: - print('python conductor server_url terminate workflow_id') - return None - wfId = sys.argv[3] - wfc.terminateWorkflow(wfId) - print('OK') - return wfId - - -if __name__ == '__main__': - main() - diff --git a/polyglot-clients/python/conductor_shell.py b/polyglot-clients/python/conductor_shell.py deleted file mode 100644 index 33e0673fe..000000000 --- a/polyglot-clients/python/conductor_shell.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function -import sys -from conductor import conductor -import json - - -def main(): - if(len(sys.argv) < 3): - print("Usage - python conductor server_url command parameters...") - return None - - wfc = conductor.WorkflowClient(sys.argv[1]) - command = sys.argv[2] - if command == 'start': - if len(sys.argv) < 5: - print('python conductor server_url start workflow_name input_json [version] [correlationId]') - return None - wfName = sys.argv[3] - input = json.loads(sys.argv[4]) - workflowId = wfc.startWorkflow(wfName, input, 1, None) - print(workflowId) - return workflowId - elif command == 'get': - if len(sys.argv) < 4: - print('python conductor server_url get workflow_id') - return None - wfId = sys.argv[3] - wfjson = wfc.getWorkflow(wfId) - print(json.dumps(wfjson, indent=True, separators=(',', ': '))) - return wfjson - elif command == 'terminate': - if len(sys.argv) < 4: - print('python conductor server_url terminate workflow_id') - return None - wfId = sys.argv[3] - wfjson = wfc.terminateWorkflow(wfId) - print('OK') - return wfId - -if __name__ == '__main__': - main() diff --git a/polyglot-clients/python/kitchensink_workers.py b/polyglot-clients/python/kitchensink_workers.py deleted file mode 100644 index 31e95072e..000000000 --- a/polyglot-clients/python/kitchensink_workers.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import print_function -from conductor.ConductorWorker import ConductorWorker,TaskStatus - -def execute(task): - return ConductorWorker.task_result( - status=TaskStatus.COMPLETED, - output= {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0}, - logs=['one','two'] - ) - -def execute4(task): - forkTasks = [{"name": "task_1", "taskReferenceName": "task_1_1", "type": "SIMPLE"},{"name": "sub_workflow_4", "taskReferenceName": "wf_dyn", "type": "SUB_WORKFLOW", "subWorkflowParam": {"name": "sub_flow_1"}}]; - input = {'task_1_1': {}, 'wf_dyn': {}} - return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0, 'dynamicTasks': forkTasks, 'inputs': input}, 'logs': ['one','two']} - -def main(): - print('Starting Kitchensink workflows') - cc = ConductorWorker('http://localhost:8080/api', 1, 0.1) - for x in range(1, 30): - if(x == 4): - cc.start('task_{0}'.format(x), execute4, False) - else: - cc.start('task_{0}'.format(x), execute, False) - cc.start('task_30', execute, True) - -if __name__ == '__main__': - main() diff --git a/polyglot-clients/python/setup.cfg b/polyglot-clients/python/setup.cfg deleted file mode 100644 index b88034e41..000000000 --- a/polyglot-clients/python/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -description-file = README.md diff --git a/polyglot-clients/python/setup.py b/polyglot-clients/python/setup.py deleted file mode 100644 index 053d27bed..000000000 --- a/polyglot-clients/python/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright 2017 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from setuptools import setup - -setup( - name = 'conductor', - packages = ['conductor'], # this must be the same as the name above - version = '1.0.0', - description = 'Conductor python client', - author = 'Viren Baraiya', - author_email = 'vbaraiya@netflix.com', - url = 'https://github.com/netflix/conductor', - download_url = 'https://github.com/Netflix/conductor/releases', - keywords = ['conductor'], - license = 'Apache 2.0', - install_requires = [ - 'requests', - ], - classifiers = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python :: 2.7', - 'Topic :: Workflow', - 'Topic :: Microservices', - 'Topic :: Orchestration', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: System :: Networking' - ], -) diff --git a/polyglot-clients/python/test_conductor.py b/polyglot-clients/python/test_conductor.py deleted file mode 100644 index 2924bdea8..000000000 --- a/polyglot-clients/python/test_conductor.py +++ /dev/null @@ -1,79 +0,0 @@ -import threading -import mock -import json -from conductor.conductor import TaskClient -from conductor.ConductorWorker import ConductorWorker - - -@mock.patch('requests.get') -def test_pollForTask(requests_get): - task_client = TaskClient('base') - task_client.pollForTask('fooType', 'barWorker') - requests_get.assert_called_with('base/tasks/poll/fooType', params={'workerid': 'barWorker'}) - - task_client.pollForTask('fooType', 'barWorker', 'bazDomain') - requests_get.assert_called_with('base/tasks/poll/fooType', - params={'workerid': 'barWorker', 'domain': 'bazDomain'}) - - -@mock.patch('requests.get') -def test_pollForBatch(requests_get): - task_client = TaskClient('base') - task_client.pollForBatch('fooType', 20, 100, 'barWorker') - requests_get.assert_called_with( - 'base/tasks/poll/batch/fooType', - params={'workerid': 'barWorker', 'count': 20, 'timeout': 100}) - - task_client.pollForBatch('fooType', 20, 100, 'barWorker', 'a_domain') - requests_get.assert_called_with( - 'base/tasks/poll/batch/fooType', - params={'workerid': 'barWorker', 'count': 20, 'timeout': 100, 'domain': 'a_domain'}) - - -@mock.patch('requests.post') -def test_updateTask(post): - task_client = TaskClient('base') - task_obj = {'task_id': '123', 'result': 'fail'} - task_client.updateTask(task_obj) - post.assert_called_with( - 'base/tasks/', - data=json.dumps(task_obj), - headers={'Accept': 'application/json', 'Content-Type': 'application/json'}, params=None) - - -def test_conductor_worker(): - num_threads = 2 - worker = ConductorWorker('http://server_url', num_threads, 0.1, 'wid') - num_tasks = num_threads * 3 - id_range = range(123, 123 + num_tasks) - events = [threading.Event() for _ in id_range] - return_val = {'status': '', 'output': 'out', 'logs': []} - - tasks = [{'taskId': str(n)} for n in id_range] - - # output is named outputData in the resulting task - out_tasks = [{'status': '', 'outputData': 'out', 'logs': [], 'taskId': task['taskId']} for task in tasks] - - def exec_function(task): - assert task in tasks - tasks.remove(task) - for ev in events: - if not ev.is_set(): - ev.set() - break - return return_val - - # verify conductor worker call the appropriate method in TaskClient, acks the task, and updates the output - poll = mock.Mock() - ack = mock.Mock() - update = mock.Mock() - with mock.patch.multiple('conductor.conductor.TaskClient', pollForTask=poll, updateTask=update, ackTask=ack): - poll.side_effect = tasks + [None] * num_threads - worker.start('task_a', exec_function, False, 'my_domain') - for ev in events: - assert ev.wait(2) is True - - poll.assert_has_calls([mock.call('task_a', 'wid', 'my_domain')] * num_tasks) - ack.assert_has_calls([mock.call(str(i), 'wid') for i in id_range]) - update.assert_has_calls([mock.call(t) for t in out_tasks]) - diff --git a/redis-concurrency-limit/build.gradle b/redis-concurrency-limit/build.gradle deleted file mode 100644 index dd602a569..000000000 --- a/redis-concurrency-limit/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -plugins { - id 'groovy' -} - - -dependencies { - compileOnly 'org.springframework.boot:spring-boot-starter' - compileOnly 'org.springframework.data:spring-data-redis' - - implementation project(':conductor-common') - implementation project(':conductor-core') - implementation "redis.clients:jedis:${revJedis}" - implementation "org.apache.commons:commons-lang3" - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" - testImplementation "org.testcontainers:spock:${revTestContainer}" - testImplementation "org.testcontainers:testcontainers:${revTestContainer}" - testImplementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - testImplementation 'org.springframework.data:spring-data-redis' -} diff --git a/redis-concurrency-limit/dependencies.lock b/redis-concurrency-limit/dependencies.lock deleted file mode 100644 index 60f0dc050..000000000 --- a/redis-concurrency-limit/dependencies.lock +++ /dev/null @@ -1,426 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.data:spring-data-redis": { - "locked": "2.6.4" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "testCompileClasspath": { - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.data:spring-data-redis": { - "locked": "2.6.4" - }, - "org.testcontainers:spock": { - "locked": "1.15.3" - }, - "org.testcontainers:testcontainers": { - "locked": "1.15.3" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.data:spring-data-redis": { - "locked": "2.6.4" - }, - "org.testcontainers:spock": { - "locked": "1.15.3" - }, - "org.testcontainers:testcontainers": { - "locked": "1.15.3" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - } -} \ No newline at end of file diff --git a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAO.java b/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAO.java deleted file mode 100644 index 410799725..000000000 --- a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAO.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.limit; - -import java.util.Optional; - -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.data.redis.core.StringRedisTemplate; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.dao.ConcurrentExecutionLimitDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.redis.limit.config.RedisConcurrentExecutionLimitProperties; - -@Trace -@Component -@ConditionalOnProperty( - value = "conductor.redis-concurrent-execution-limit.enabled", - havingValue = "true") -public class RedisConcurrentExecutionLimitDAO implements ConcurrentExecutionLimitDAO { - - private static final Logger LOGGER = - LoggerFactory.getLogger(RedisConcurrentExecutionLimitDAO.class); - private static final String CLASS_NAME = RedisConcurrentExecutionLimitDAO.class.getSimpleName(); - - private final StringRedisTemplate stringRedisTemplate; - private final RedisConcurrentExecutionLimitProperties properties; - - public RedisConcurrentExecutionLimitDAO( - StringRedisTemplate stringRedisTemplate, - RedisConcurrentExecutionLimitProperties properties) { - this.stringRedisTemplate = stringRedisTemplate; - this.properties = properties; - } - - /** - * Adds the {@link TaskModel} identifier to a Redis Set for the {@link TaskDef}'s name. - * - * @param task The {@link TaskModel} object. - */ - @Override - public void addTaskToLimit(TaskModel task) { - try { - Monitors.recordDaoRequests( - CLASS_NAME, "addTaskToLimit", task.getTaskType(), task.getWorkflowType()); - String taskId = task.getTaskId(); - String taskDefName = task.getTaskDefName(); - String keyName = createKeyName(taskDefName); - - stringRedisTemplate.opsForSet().add(keyName, taskId); - - LOGGER.debug("Added taskId: {} to key: {}", taskId, keyName); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "addTaskToLimit"); - String errorMsg = - String.format( - "Error updating taskDefLimit for task - %s:%s in workflow: %s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - - /** - * Remove the {@link TaskModel} identifier from the Redis Set for the {@link TaskDef}'s name. - * - * @param task The {@link TaskModel} object. - */ - @Override - public void removeTaskFromLimit(TaskModel task) { - try { - Monitors.recordDaoRequests( - CLASS_NAME, "removeTaskFromLimit", task.getTaskType(), task.getWorkflowType()); - String taskId = task.getTaskId(); - String taskDefName = task.getTaskDefName(); - - String keyName = createKeyName(taskDefName); - - stringRedisTemplate.opsForSet().remove(keyName, taskId); - - LOGGER.debug("Removed taskId: {} from key: {}", taskId, keyName); - } catch (Exception e) { - Monitors.error(CLASS_NAME, "removeTaskFromLimit"); - String errorMsg = - String.format( - "Error updating taskDefLimit for task - %s:%s in workflow: %s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); - } - } - - /** - * Checks if the {@link TaskModel} identifier is in the Redis Set and size of the set is more - * than the {@link TaskDef#concurrencyLimit()}. - * - * @param task The {@link TaskModel} object. - * @return true if the task id is not in the set and size of the set is more than the {@link - * TaskDef#concurrencyLimit()}. - */ - @Override - public boolean exceedsLimit(TaskModel task) { - Optional taskDefinition = task.getTaskDefinition(); - if (taskDefinition.isEmpty()) { - return false; - } - int limit = taskDefinition.get().concurrencyLimit(); - if (limit <= 0) { - return false; - } - - try { - Monitors.recordDaoRequests( - CLASS_NAME, "exceedsLimit", task.getTaskType(), task.getWorkflowType()); - String taskId = task.getTaskId(); - String taskDefName = task.getTaskDefName(); - String keyName = createKeyName(taskDefName); - - boolean isMember = - ObjectUtils.defaultIfNull( - stringRedisTemplate.opsForSet().isMember(keyName, taskId), false); - long size = - ObjectUtils.defaultIfNull(stringRedisTemplate.opsForSet().size(keyName), -1L); - - LOGGER.debug( - "Task: {} is {} of {}, size: {} and limit: {}", - taskId, - isMember ? "a member" : "not a member", - keyName, - size, - limit); - - return !isMember && size >= limit; - } catch (Exception e) { - Monitors.error(CLASS_NAME, "exceedsLimit"); - String errorMsg = - String.format( - "Failed to get in progress limit - %s:%s in workflow :%s", - task.getTaskDefName(), task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.error(errorMsg, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); - } - } - - private String createKeyName(String taskDefName) { - StringBuilder builder = new StringBuilder(); - String namespace = properties.getNamespace(); - - if (StringUtils.isNotBlank(namespace)) { - builder.append(namespace).append(':'); - } - - return builder.append(taskDefName).toString(); - } -} diff --git a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitConfiguration.java b/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitConfiguration.java deleted file mode 100644 index 9349093eb..000000000 --- a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitConfiguration.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.limit.config; - -import java.util.List; - -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.redis.connection.RedisClusterConfiguration; -import org.springframework.data.redis.connection.RedisConnectionFactory; -import org.springframework.data.redis.connection.RedisStandaloneConfiguration; -import org.springframework.data.redis.connection.jedis.JedisClientConfiguration; -import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; - -@Configuration -@ConditionalOnProperty( - value = "conductor.redis-concurrent-execution-limit.enabled", - havingValue = "true") -@EnableConfigurationProperties(RedisConcurrentExecutionLimitProperties.class) -public class RedisConcurrentExecutionLimitConfiguration { - - @Bean - @ConditionalOnProperty( - value = "conductor.redis-concurrent-execution-limit.type", - havingValue = "cluster") - public RedisConnectionFactory redisClusterConnectionFactory( - RedisConcurrentExecutionLimitProperties properties) { - GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig<>(); - poolConfig.setMaxTotal(properties.getMaxConnectionsPerHost()); - poolConfig.setTestWhileIdle(true); - JedisClientConfiguration clientConfig = - JedisClientConfiguration.builder() - .usePooling() - .poolConfig(poolConfig) - .and() - .clientName(properties.getClientName()) - .build(); - - RedisClusterConfiguration redisClusterConfiguration = - new RedisClusterConfiguration( - List.of(properties.getHost() + ":" + properties.getPort())); - - return new JedisConnectionFactory(redisClusterConfiguration, clientConfig); - } - - @Bean - @ConditionalOnProperty( - value = "conductor.redis-concurrent-execution-limit.type", - havingValue = "standalone", - matchIfMissing = true) - public RedisConnectionFactory redisStandaloneConnectionFactory( - RedisConcurrentExecutionLimitProperties properties) { - RedisStandaloneConfiguration config = - new RedisStandaloneConfiguration(properties.getHost(), properties.getPort()); - return new JedisConnectionFactory(config); - } -} diff --git a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitProperties.java b/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitProperties.java deleted file mode 100644 index 20b0e929d..000000000 --- a/redis-concurrency-limit/src/main/java/com/netflix/conductor/redis/limit/config/RedisConcurrentExecutionLimitProperties.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.limit.config; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -@ConfigurationProperties("conductor.redis-concurrent-execution-limit") -public class RedisConcurrentExecutionLimitProperties { - - public enum RedisType { - STANDALONE, - CLUSTER - } - - private RedisType type; - - private String host; - - private int port; - - private String password; - - private int maxConnectionsPerHost; - - private String clientName; - - private String namespace = "conductor"; - - public RedisType getType() { - return type; - } - - public void setType(RedisType type) { - this.type = type; - } - - public int getMaxConnectionsPerHost() { - return maxConnectionsPerHost; - } - - public void setMaxConnectionsPerHost(int maxConnectionsPerHost) { - this.maxConnectionsPerHost = maxConnectionsPerHost; - } - - public String getClientName() { - return clientName; - } - - public void setClientName(String clientName) { - this.clientName = clientName; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getNamespace() { - return namespace; - } - - public void setNamespace(String namespace) { - this.namespace = namespace; - } -} diff --git a/redis-concurrency-limit/src/test/groovy/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAOSpec.groovy b/redis-concurrency-limit/src/test/groovy/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAOSpec.groovy deleted file mode 100644 index c14bc3800..000000000 --- a/redis-concurrency-limit/src/test/groovy/com/netflix/conductor/redis/limit/RedisConcurrentExecutionLimitDAOSpec.groovy +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.limit - -import org.springframework.data.redis.connection.RedisStandaloneConfiguration -import org.springframework.data.redis.connection.jedis.JedisConnectionFactory -import org.springframework.data.redis.core.StringRedisTemplate -import org.testcontainers.containers.GenericContainer -import org.testcontainers.spock.Testcontainers - -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.model.TaskModel -import com.netflix.conductor.redis.limit.config.RedisConcurrentExecutionLimitProperties - -import spock.lang.Specification -import spock.lang.Subject -import spock.lang.Unroll - -@Testcontainers -class RedisConcurrentExecutionLimitDAOSpec extends Specification { - - GenericContainer redis = new GenericContainer("redis:5.0.3-alpine") - .withExposedPorts(6379) - - @Subject - RedisConcurrentExecutionLimitDAO dao - - StringRedisTemplate redisTemplate - - RedisConcurrentExecutionLimitProperties properties - - def setup() { - properties = new RedisConcurrentExecutionLimitProperties(namespace: 'conductor') - redisTemplate = new StringRedisTemplate(new JedisConnectionFactory(new RedisStandaloneConfiguration(redis.host, redis.firstMappedPort))) - dao = new RedisConcurrentExecutionLimitDAO(redisTemplate, properties) - } - - def "verify addTaskToLimit adds the taskId to the right set"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - def keyName = "${properties.namespace}:$taskDefName" as String - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName) - - when: - dao.addTaskToLimit(task) - - then: - redisTemplate.hasKey(keyName) - redisTemplate.opsForSet().size(keyName) == 1 - redisTemplate.opsForSet().isMember(keyName, taskId) - } - - def "verify removeTaskFromLimit removes the taskId from the right set"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - def keyName = "${properties.namespace}:$taskDefName" as String - - redisTemplate.opsForSet().add(keyName, taskId) - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName) - - when: - dao.removeTaskFromLimit(task) - - then: - !redisTemplate.hasKey(keyName) // since the only element in the set is removed, Redis removes the set - } - - @Unroll - def "verify exceedsLimit returns false for #testCase"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName, workflowTask: workflowTask) - - when: - def retVal = dao.exceedsLimit(task) - - then: - !retVal - - where: - workflowTask << [new WorkflowTask(taskDefinition: null), new WorkflowTask(taskDefinition: new TaskDef(concurrentExecLimit: -2))] - testCase << ['a task with no TaskDefinition', 'TaskDefinition with concurrentExecLimit is less than 0'] - } - - def "verify exceedsLimit returns false for tasks less than concurrentExecLimit"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - def keyName = "${properties.namespace}:$taskDefName" as String - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName, workflowTask: new WorkflowTask(taskDefinition: new TaskDef(concurrentExecLimit: 2))) - - redisTemplate.opsForSet().add(keyName, taskId) - - when: - def retVal = dao.exceedsLimit(task) - - then: - !retVal - } - - def "verify exceedsLimit returns false for taskId already in the set but more than concurrentExecLimit"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - def keyName = "${properties.namespace}:$taskDefName" as String - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName, workflowTask: new WorkflowTask(taskDefinition: new TaskDef(concurrentExecLimit: 2))) - - redisTemplate.opsForSet().add(keyName, taskId) // add the id of the task passed as argument to exceedsLimit - redisTemplate.opsForSet().add(keyName, 'taskId2') - - when: - def retVal = dao.exceedsLimit(task) - - then: - !retVal - } - - def "verify exceedsLimit returns true for a new taskId more than concurrentExecLimit"() { - given: - def taskId = 'task1' - def taskDefName = 'task_def_name1' - def keyName = "${properties.namespace}:$taskDefName" as String - - TaskModel task = new TaskModel(taskId: taskId, taskDefName: taskDefName, workflowTask: new WorkflowTask(taskDefinition: new TaskDef(concurrentExecLimit: 2))) - - // add task ids different from the id of the task passed to exceedsLimit - redisTemplate.opsForSet().add(keyName, 'taskId2') - redisTemplate.opsForSet().add(keyName, 'taskId3') - - when: - def retVal = dao.exceedsLimit(task) - - then: - retVal - } - - def "verify createKeyName ignores namespace if its not present"() { - given: - def dao = new RedisConcurrentExecutionLimitDAO(null, conductorProperties) - - when: - def keyName = dao.createKeyName('taskdefname') - - then: - keyName == expectedKeyName - - where: - conductorProperties << [new RedisConcurrentExecutionLimitProperties(), new RedisConcurrentExecutionLimitProperties(namespace: null), new RedisConcurrentExecutionLimitProperties(namespace: 'test')] - expectedKeyName << ['conductor:taskdefname', 'taskdefname', 'test:taskdefname'] - } -} diff --git a/redis-lock/build.gradle b/redis-lock/build.gradle deleted file mode 100644 index bc3c074cb..000000000 --- a/redis-lock/build.gradle +++ /dev/null @@ -1,9 +0,0 @@ -dependencies { - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "org.apache.commons:commons-lang3" - implementation "org.redisson:redisson:${revRedisson}" - - testImplementation "com.github.kstyrc:embedded-redis:${revEmbeddedRedis}" -} diff --git a/redis-lock/dependencies.lock b/redis-lock/dependencies.lock deleted file mode 100644 index a97a5cabc..000000000 --- a/redis-lock/dependencies.lock +++ /dev/null @@ -1,384 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-core": { - "project": true - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.redisson:redisson": { - "locked": "3.13.3" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.redisson:redisson": { - "locked": "3.13.3" - } - }, - "testCompileClasspath": { - "com.github.kstyrc:embedded-redis": { - "locked": "0.6" - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.redisson:redisson": { - "locked": "3.13.3" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.github.kstyrc:embedded-redis": { - "locked": "0.6" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.redisson:redisson": { - "locked": "3.13.3" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockConfiguration.java b/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockConfiguration.java deleted file mode 100644 index 25bf8379f..000000000 --- a/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockConfiguration.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redislock.config; - -import java.util.Arrays; - -import org.redisson.Redisson; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.sync.Lock; -import com.netflix.conductor.redislock.config.RedisLockProperties.REDIS_SERVER_TYPE; -import com.netflix.conductor.redislock.lock.RedisLock; - -@Configuration -@EnableConfigurationProperties(RedisLockProperties.class) -@ConditionalOnProperty(name = "conductor.workflow-execution-lock.type", havingValue = "redis") -public class RedisLockConfiguration { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisLockConfiguration.class); - - @Bean - public Redisson getRedisson(RedisLockProperties properties) { - RedisLockProperties.REDIS_SERVER_TYPE redisServerType; - try { - redisServerType = properties.getServerType(); - } catch (IllegalArgumentException ie) { - final String message = - "Invalid Redis server type: " - + properties.getServerType() - + ", supported values are: " - + Arrays.toString(REDIS_SERVER_TYPE.values()); - LOGGER.error(message); - throw new RuntimeException(message, ie); - } - String redisServerAddress = properties.getServerAddress(); - String redisServerPassword = properties.getServerPassword(); - String masterName = properties.getServerMasterName(); - - Config redisConfig = new Config(); - - int connectionTimeout = 10000; - switch (redisServerType) { - case SINGLE: - redisConfig - .useSingleServer() - .setAddress(redisServerAddress) - .setPassword(redisServerPassword) - .setTimeout(connectionTimeout); - break; - case CLUSTER: - redisConfig - .useClusterServers() - .setScanInterval(2000) // cluster state scan interval in milliseconds - .addNodeAddress(redisServerAddress.split(",")) - .setPassword(redisServerPassword) - .setTimeout(connectionTimeout); - break; - case SENTINEL: - redisConfig - .useSentinelServers() - .setScanInterval(2000) - .setMasterName(masterName) - .addSentinelAddress(redisServerAddress) - .setPassword(redisServerPassword) - .setTimeout(connectionTimeout); - break; - } - - return (Redisson) Redisson.create(redisConfig); - } - - @Bean - public Lock provideLock(Redisson redisson, RedisLockProperties properties) { - return new RedisLock(redisson, properties); - } -} diff --git a/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockProperties.java b/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockProperties.java deleted file mode 100644 index fbf873522..000000000 --- a/redis-lock/src/main/java/com/netflix/conductor/redislock/config/RedisLockProperties.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redislock.config; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -@ConfigurationProperties("conductor.redis-lock") -public class RedisLockProperties { - - /** The redis server configuration to be used. */ - private REDIS_SERVER_TYPE serverType = REDIS_SERVER_TYPE.SINGLE; - - /** The address of the redis server following format -- host:port */ - private String serverAddress = "redis://127.0.0.1:6379"; - - /** The password for redis authentication */ - private String serverPassword = null; - - /** The master server name used by Redis Sentinel servers and master change monitoring task */ - private String serverMasterName = "master"; - - /** The namespace to use to prepend keys used for locking in redis */ - private String namespace = ""; - - /** - * Enable to otionally continue without a lock to not block executions until the locking service - * becomes available - */ - private boolean ignoreLockingExceptions = false; - - public REDIS_SERVER_TYPE getServerType() { - return serverType; - } - - public void setServerType(REDIS_SERVER_TYPE serverType) { - this.serverType = serverType; - } - - public String getServerAddress() { - return serverAddress; - } - - public void setServerAddress(String serverAddress) { - this.serverAddress = serverAddress; - } - - public String getServerPassword() { - return serverPassword; - } - - public void setServerPassword(String serverPassword) { - this.serverPassword = serverPassword; - } - - public String getServerMasterName() { - return serverMasterName; - } - - public void setServerMasterName(String serverMasterName) { - this.serverMasterName = serverMasterName; - } - - public String getNamespace() { - return namespace; - } - - public void setNamespace(String namespace) { - this.namespace = namespace; - } - - public boolean isIgnoreLockingExceptions() { - return ignoreLockingExceptions; - } - - public void setIgnoreLockingExceptions(boolean ignoreLockingExceptions) { - this.ignoreLockingExceptions = ignoreLockingExceptions; - } - - public enum REDIS_SERVER_TYPE { - SINGLE, - CLUSTER, - SENTINEL - } -} diff --git a/redis-lock/src/main/java/com/netflix/conductor/redislock/lock/RedisLock.java b/redis-lock/src/main/java/com/netflix/conductor/redislock/lock/RedisLock.java deleted file mode 100644 index 28cdcfef8..000000000 --- a/redis-lock/src/main/java/com/netflix/conductor/redislock/lock/RedisLock.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redislock.lock; - -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.StringUtils; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.core.sync.Lock; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.redislock.config.RedisLockProperties; - -public class RedisLock implements Lock { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisLock.class); - - private final RedisLockProperties properties; - private final RedissonClient redisson; - private static String LOCK_NAMESPACE = ""; - - public RedisLock(Redisson redisson, RedisLockProperties properties) { - this.properties = properties; - this.redisson = redisson; - LOCK_NAMESPACE = properties.getNamespace(); - } - - @Override - public void acquireLock(String lockId) { - RLock lock = redisson.getLock(parseLockId(lockId)); - lock.lock(); - } - - @Override - public boolean acquireLock(String lockId, long timeToTry, TimeUnit unit) { - RLock lock = redisson.getLock(parseLockId(lockId)); - try { - return lock.tryLock(timeToTry, unit); - } catch (Exception e) { - return handleAcquireLockFailure(lockId, e); - } - } - - /** - * @param lockId resource to lock on - * @param timeToTry blocks up to timeToTry duration in attempt to acquire the lock - * @param leaseTime Lock lease expiration duration. Redisson default is -1, meaning it holds the - * lock until explicitly unlocked. - * @param unit time unit - * @return - */ - @Override - public boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit) { - RLock lock = redisson.getLock(parseLockId(lockId)); - try { - return lock.tryLock(timeToTry, leaseTime, unit); - } catch (Exception e) { - return handleAcquireLockFailure(lockId, e); - } - } - - @Override - public void releaseLock(String lockId) { - RLock lock = redisson.getLock(parseLockId(lockId)); - try { - lock.unlock(); - } catch (IllegalMonitorStateException e) { - // Releasing a lock twice using Redisson can cause this exception, which can be ignored. - } - } - - @Override - public void deleteLock(String lockId) { - // Noop for Redlock algorithm as releaseLock / unlock deletes it. - } - - private String parseLockId(String lockId) { - if (StringUtils.isEmpty(lockId)) { - throw new IllegalArgumentException("lockId cannot be NULL or empty: lockId=" + lockId); - } - return LOCK_NAMESPACE + "." + lockId; - } - - private boolean handleAcquireLockFailure(String lockId, Exception e) { - LOGGER.error("Failed to acquireLock for lockId: {}", lockId, e); - Monitors.recordAcquireLockFailure(e.getClass().getName()); - // A Valid failure to acquire lock when another thread has acquired it returns false. - // However, when an exception is thrown while acquiring lock, due to connection or others - // issues, - // we can optionally continue without a "lock" to not block executions until Locking service - // is available. - return properties.isIgnoreLockingExceptions(); - } -} diff --git a/redis-lock/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/redis-lock/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index fe41f5bea..000000000 --- a/redis-lock/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.redis-lock.server-type", - "defaultValue": "SINGLE" - } - ], - "hints": [ - { - "name": "conductor.workflow-execution-lock.type", - "values": [ - { - "value": "redis", - "description": "Use the redis-lock implementation as the lock provider." - } - ] - }, - { - "name": "conductor.redis-lock.server-type", - "providers": [ - { - "name": "handle-as", - "parameters": { - "target": "java.lang.Enum" - } - } - ] - } - ] -} diff --git a/redis-lock/src/test/java/com/netflix/conductor/redis/lock/RedisLockTest.java b/redis-lock/src/test/java/com/netflix/conductor/redis/lock/RedisLockTest.java deleted file mode 100644 index 7414b72c3..000000000 --- a/redis-lock/src/test/java/com/netflix/conductor/redis/lock/RedisLockTest.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.lock; - -import java.util.concurrent.TimeUnit; - -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; - -import com.netflix.conductor.redislock.config.RedisLockProperties; -import com.netflix.conductor.redislock.config.RedisLockProperties.REDIS_SERVER_TYPE; -import com.netflix.conductor.redislock.lock.RedisLock; - -import redis.embedded.RedisServer; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class RedisLockTest { - - private static RedisLock redisLock; - private static Config config; - private static RedissonClient redisson; - private static RedisServer redisServer = null; - - @BeforeClass - public static void setUp() throws Exception { - String testServerAddress = "redis://127.0.0.1:6371"; - redisServer = new RedisServer(6371); - if (redisServer.isActive()) { - redisServer.stop(); - } - redisServer.start(); - - RedisLockProperties properties = mock(RedisLockProperties.class); - when(properties.getServerType()).thenReturn(REDIS_SERVER_TYPE.SINGLE); - when(properties.getServerAddress()).thenReturn(testServerAddress); - when(properties.getServerMasterName()).thenReturn("master"); - when(properties.getNamespace()).thenReturn(""); - when(properties.isIgnoreLockingExceptions()).thenReturn(false); - - Config redissonConfig = new Config(); - redissonConfig.useSingleServer().setAddress(testServerAddress).setTimeout(10000); - redisLock = new RedisLock((Redisson) Redisson.create(redissonConfig), properties); - - // Create another instance of redisson for tests. - RedisLockTest.config = new Config(); - RedisLockTest.config.useSingleServer().setAddress(testServerAddress).setTimeout(10000); - redisson = Redisson.create(RedisLockTest.config); - } - - @AfterClass - public static void tearDown() { - redisServer.stop(); - } - - @Test - public void testLocking() { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - assertTrue(redisLock.acquireLock(lockId, 1000, 1000, TimeUnit.MILLISECONDS)); - } - - @Test - public void testLockExpiration() throws InterruptedException { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - boolean isLocked = redisLock.acquireLock(lockId, 1000, 1000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - Thread.sleep(2000); - - RLock lock = redisson.getLock(lockId); - assertFalse(lock.isLocked()); - } - - @Test - public void testLockReentry() throws InterruptedException { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - boolean isLocked = redisLock.acquireLock(lockId, 1000, 60000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - Thread.sleep(1000); - - // get the lock back - isLocked = redisLock.acquireLock(lockId, 1000, 1000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - RLock lock = redisson.getLock(lockId); - assertTrue(isLocked); - } - - @Test - public void testReleaseLock() { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - - boolean isLocked = redisLock.acquireLock(lockId, 1000, 10000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - redisLock.releaseLock(lockId); - - RLock lock = redisson.getLock(lockId); - assertFalse(lock.isLocked()); - } - - @Test - public void testLockReleaseAndAcquire() throws InterruptedException { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - - boolean isLocked = redisLock.acquireLock(lockId, 1000, 10000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - redisLock.releaseLock(lockId); - - Worker worker1 = new Worker(redisLock, lockId); - - worker1.start(); - worker1.join(); - - assertTrue(worker1.isLocked); - } - - @Test - public void testLockingDuplicateThreads() throws InterruptedException { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - - Worker worker1 = new Worker(redisLock, lockId); - Worker worker2 = new Worker(redisLock, lockId); - - worker1.start(); - worker2.start(); - - worker1.join(); - worker2.join(); - - // Ensure only one of them had got the lock. - assertFalse(worker1.isLocked && worker2.isLocked); - assertTrue(worker1.isLocked || worker2.isLocked); - } - - @Test - public void testDuplicateLockAcquireFailure() throws InterruptedException { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - Worker worker1 = new Worker(redisLock, lockId, 100L, 60000L); - - worker1.start(); - worker1.join(); - - boolean isLocked = redisLock.acquireLock(lockId, 500L, 1000L, TimeUnit.MILLISECONDS); - - // Ensure only one of them had got the lock. - assertFalse(isLocked); - assertTrue(worker1.isLocked); - } - - @Test - public void testReacquireLostKey() { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - - boolean isLocked = redisLock.acquireLock(lockId, 1000, 10000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - // Delete key from the cluster to reacquire - // Simulating the case when cluster goes down and possibly loses some keys. - redisson.getKeys().flushall(); - - isLocked = redisLock.acquireLock(lockId, 100, 10000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - } - - @Test - public void testReleaseLockTwice() { - redisson.getKeys().flushall(); - String lockId = "abcd-1234"; - - boolean isLocked = redisLock.acquireLock(lockId, 1000, 10000, TimeUnit.MILLISECONDS); - assertTrue(isLocked); - - redisLock.releaseLock(lockId); - redisLock.releaseLock(lockId); - } - - private static class Worker extends Thread { - - private final RedisLock lock; - private final String lockID; - boolean isLocked; - private Long timeToTry = 50L; - private Long leaseTime = 1000L; - - Worker(RedisLock lock, String lockID) { - super("TestWorker-" + lockID); - this.lock = lock; - this.lockID = lockID; - } - - Worker(RedisLock lock, String lockID, Long timeToTry, Long leaseTime) { - super("TestWorker-" + lockID); - this.lock = lock; - this.lockID = lockID; - this.timeToTry = timeToTry; - this.leaseTime = leaseTime; - } - - @Override - public void run() { - isLocked = lock.acquireLock(lockID, timeToTry, leaseTime, TimeUnit.MILLISECONDS); - } - } -} diff --git a/redis-persistence/build.gradle b/redis-persistence/build.gradle deleted file mode 100644 index 4dbf93f97..000000000 --- a/redis-persistence/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -dependencies { - implementation project(':conductor-common') - implementation project(':conductor-core') - compileOnly 'org.springframework.boot:spring-boot-starter' - - implementation "redis.clients:jedis:${revJedis}" - implementation "com.netflix.dyno-queues:dyno-queues-redis:${revDynoQueues}" - implementation('com.thoughtworks.xstream:xstream:1.4.19') - - //In memory - implementation "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" - - testImplementation project(':conductor-core').sourceSets.test.output - testImplementation project(':conductor-common').sourceSets.test.output -} diff --git a/redis-persistence/dependencies.lock b/redis-persistence/dependencies.lock deleted file mode 100644 index a0d32987d..000000000 --- a/redis-persistence/dependencies.lock +++ /dev/null @@ -1,414 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "locked": "2.0.20" - }, - "com.thoughtworks.xstream:xstream": { - "locked": "1.4.19" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.rarefiedredis.redis:redis-java": { - "locked": "0.0.17" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "locked": "2.0.20" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.thoughtworks.xstream:xstream": { - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.rarefiedredis.redis:redis-java": { - "locked": "0.0.17" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "testCompileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "locked": "2.0.20" - }, - "com.thoughtworks.xstream:xstream": { - "locked": "1.4.19" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.rarefiedredis.redis:redis-java": { - "locked": "0.0.17" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "locked": "2.0.20" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.thoughtworks.xstream:xstream": { - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.rarefiedredis.redis:redis-java": { - "locked": "0.0.17" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - } -} \ No newline at end of file diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/AnyRedisCondition.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/AnyRedisCondition.java deleted file mode 100644 index 0303c9f80..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/AnyRedisCondition.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import org.springframework.boot.autoconfigure.condition.AnyNestedCondition; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; - -public class AnyRedisCondition extends AnyNestedCondition { - - public AnyRedisCondition() { - super(ConfigurationPhase.PARSE_CONFIGURATION); - } - - @ConditionalOnProperty(name = "conductor.db.type", havingValue = "dynomite") - static class DynomiteClusterCondition {} - - @ConditionalOnProperty(name = "conductor.db.type", havingValue = "memory") - static class InMemoryRedisCondition {} - - @ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_cluster") - static class RedisClusterConfiguration {} - - @ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_sentinel") - static class RedisSentinelConfiguration {} - - @ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_standalone") - static class RedisStandaloneConfiguration {} -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java deleted file mode 100644 index 410f96f16..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; -import com.netflix.dyno.connectionpool.impl.ConnectionPoolConfigurationImpl; -import com.netflix.dyno.jedis.DynoJedisClient; - -import redis.clients.jedis.commands.JedisCommands; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "dynomite") -public class DynomiteClusterConfiguration extends JedisCommandsConfigurer { - - protected JedisCommands createJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - ConnectionPoolConfigurationImpl connectionPoolConfiguration = - new ConnectionPoolConfigurationImpl(properties.getClusterName()) - .withTokenSupplier(tokenMapSupplier) - .setLocalRack(properties.getAvailabilityZone()) - .setLocalDataCenter(properties.getDataCenterRegion()) - .setSocketTimeout(0) - .setConnectTimeout(0) - .setMaxConnsPerHost(properties.getMaxConnectionsPerHost()) - .setMaxTimeoutWhenExhausted( - (int) properties.getMaxTimeoutWhenExhausted().toMillis()) - .setRetryPolicyFactory(properties.getConnectionRetryPolicy()); - - return new DynoJedisClient.Builder() - .withHostSupplier(hostSupplier) - .withApplicationName(conductorProperties.getAppId()) - .withDynomiteClusterName(properties.getClusterName()) - .withCPConfig(connectionPoolConfiguration) - .build(); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java deleted file mode 100644 index 1d03de008..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.redis.dynoqueue.LocalhostHostSupplier; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.dyno.connectionpool.HostSupplier; - -import static com.netflix.conductor.redis.config.RedisCommonConfiguration.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.RedisCommonConfiguration.READ_CLIENT_INJECTION_NAME; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "memory") -public class InMemoryRedisConfiguration { - - @Bean - public HostSupplier hostSupplier(RedisProperties properties) { - return new LocalhostHostSupplier(properties); - } - - @Bean(name = {DEFAULT_CLIENT_INJECTION_NAME, READ_CLIENT_INJECTION_NAME}) - public JedisMock jedisMock() { - return new JedisMock(); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/JedisCommandsConfigurer.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/JedisCommandsConfigurer.java deleted file mode 100644 index 5f4783406..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/JedisCommandsConfigurer.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import org.springframework.context.annotation.Bean; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.dynoqueue.ConfigurationHostSupplier; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; - -import redis.clients.jedis.commands.JedisCommands; - -import static com.netflix.conductor.redis.config.RedisCommonConfiguration.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.RedisCommonConfiguration.READ_CLIENT_INJECTION_NAME; - -abstract class JedisCommandsConfigurer { - - @Bean - public HostSupplier hostSupplier(RedisProperties properties) { - return new ConfigurationHostSupplier(properties); - } - - @Bean(name = DEFAULT_CLIENT_INJECTION_NAME) - public JedisCommands jedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - return createJedisCommands(properties, conductorProperties, hostSupplier, tokenMapSupplier); - } - - @Bean(name = READ_CLIENT_INJECTION_NAME) - public JedisCommands readJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - return createJedisCommands(properties, conductorProperties, hostSupplier, tokenMapSupplier); - } - - protected abstract JedisCommands createJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier); -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisClusterConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisClusterConfiguration.java deleted file mode 100644 index b98e038f7..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisClusterConfiguration.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.jedis.JedisCluster; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; - -import redis.clients.jedis.HostAndPort; -import redis.clients.jedis.Protocol; -import redis.clients.jedis.commands.JedisCommands; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_cluster") -public class RedisClusterConfiguration extends JedisCommandsConfigurer { - - private static final Logger log = LoggerFactory.getLogger(JedisCommandsConfigurer.class); - - // Same as redis.clients.jedis.BinaryJedisCluster - protected static final int DEFAULT_MAX_ATTEMPTS = 5; - - @Override - protected JedisCommands createJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - GenericObjectPoolConfig genericObjectPoolConfig = new GenericObjectPoolConfig<>(); - genericObjectPoolConfig.setMaxTotal(properties.getMaxConnectionsPerHost()); - Set hosts = - hostSupplier.getHosts().stream() - .map(h -> new HostAndPort(h.getHostName(), h.getPort())) - .collect(Collectors.toSet()); - String password = getPassword(hostSupplier.getHosts()); - - if (password != null) { - log.info("Connecting to Redis Cluster with AUTH"); - return new JedisCluster( - new redis.clients.jedis.JedisCluster( - hosts, - Protocol.DEFAULT_TIMEOUT, - Protocol.DEFAULT_TIMEOUT, - DEFAULT_MAX_ATTEMPTS, - password, - genericObjectPoolConfig)); - } else { - return new JedisCluster( - new redis.clients.jedis.JedisCluster(hosts, genericObjectPoolConfig)); - } - } - - private String getPassword(List hosts) { - return hosts.isEmpty() ? null : hosts.get(0).getPassword(); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisCommonConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisCommonConfiguration.java deleted file mode 100644 index 94883f6f5..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisCommonConfiguration.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; -import com.netflix.dyno.connectionpool.impl.lb.HostToken; -import com.netflix.dyno.connectionpool.impl.utils.CollectionUtils; -import com.netflix.dyno.queues.ShardSupplier; -import com.netflix.dyno.queues.redis.RedisQueues; -import com.netflix.dyno.queues.redis.sharding.ShardingStrategy; -import com.netflix.dyno.queues.shard.DynoShardSupplier; - -import com.google.inject.ProvisionException; -import redis.clients.jedis.commands.JedisCommands; - -@Configuration(proxyBeanMethods = false) -@EnableConfigurationProperties(RedisProperties.class) -@Conditional(AnyRedisCondition.class) -public class RedisCommonConfiguration { - - public static final String DEFAULT_CLIENT_INJECTION_NAME = "DefaultJedisCommands"; - public static final String READ_CLIENT_INJECTION_NAME = "ReadJedisCommands"; - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisCommonConfiguration.class); - - @Bean - public ShardSupplier shardSupplier(HostSupplier hostSupplier, RedisProperties properties) { - if (properties.getAvailabilityZone() == null) { - throw new ProvisionException( - "Availability zone is not defined. Ensure Configuration.getAvailabilityZone() returns a non-null " - + "and non-empty value."); - } - String localDC = - properties.getAvailabilityZone().replaceAll(properties.getDataCenterRegion(), ""); - return new DynoShardSupplier(hostSupplier, properties.getDataCenterRegion(), localDC); - } - - @Bean - public TokenMapSupplier tokenMapSupplier() { - final List hostTokens = new ArrayList<>(); - return new TokenMapSupplier() { - @Override - public List getTokens(Set activeHosts) { - long i = activeHosts.size(); - for (Host host : activeHosts) { - HostToken hostToken = new HostToken(i, host); - hostTokens.add(hostToken); - i--; - } - return hostTokens; - } - - @Override - public HostToken getTokenForHost(Host host, Set activeHosts) { - return CollectionUtils.find( - hostTokens, token -> token.getHost().compareTo(host) == 0); - } - }; - } - - @Bean - public ShardingStrategy shardingStrategy( - ShardSupplier shardSupplier, RedisProperties properties) { - return new RedisQueuesShardingStrategyProvider(shardSupplier, properties).get(); - } - - @Bean - public RedisQueues redisQueues( - @Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands, - @Qualifier(READ_CLIENT_INJECTION_NAME) JedisCommands jedisCommandsRead, - ShardSupplier shardSupplier, - RedisProperties properties, - ShardingStrategy shardingStrategy) { - RedisQueues queues = - new RedisQueues( - jedisCommands, - jedisCommandsRead, - properties.getQueuePrefix(), - shardSupplier, - 60_000, - 60_000, - shardingStrategy); - LOGGER.info("DynoQueueDAO initialized with prefix " + properties.getQueuePrefix() + "!"); - return queues; - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java deleted file mode 100644 index 2c0b3eadb..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import java.time.Duration; -import java.time.temporal.ChronoUnit; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider; -import com.netflix.dyno.connectionpool.RetryPolicy.RetryPolicyFactory; -import com.netflix.dyno.connectionpool.impl.RetryNTimes; -import com.netflix.dyno.connectionpool.impl.RunOnce; - -@ConfigurationProperties("conductor.redis") -public class RedisProperties { - - private final ConductorProperties conductorProperties; - - @Autowired - public RedisProperties(ConductorProperties conductorProperties) { - this.conductorProperties = conductorProperties; - } - - /** - * Data center region. If hosting on Amazon the value is something like us-east-1, us-west-2 - * etc. - */ - private String dataCenterRegion = "us-east-1"; - - /** - * Local rack / availability zone. For AWS deployments, the value is something like us-east-1a, - * etc. - */ - private String availabilityZone = "us-east-1c"; - - /** The name of the redis / dynomite cluster */ - private String clusterName = ""; - - /** Dynomite Cluster details. Format is host:port:rack separated by semicolon */ - private String hosts = null; - - /** The prefix used to prepend workflow data in redis */ - private String workflowNamespacePrefix = null; - - /** The prefix used to prepend keys for queues in redis */ - private String queueNamespacePrefix = null; - - /** - * The domain name to be used in the key prefix for logical separation of workflow data and - * queues in a shared redis setup - */ - private String keyspaceDomain = null; - - /** - * The maximum number of connections that can be managed by the connection pool on a given - * instance - */ - private int maxConnectionsPerHost = 10; - - /** - * The maximum amount of time to wait for a connection to become available from the connection - * pool - */ - private Duration maxTimeoutWhenExhausted = Duration.ofMillis(800); - - /** The maximum retry attempts to use with this connection pool */ - private int maxRetryAttempts = 0; - - /** The read connection port to be used for connecting to dyno-queues */ - private int queuesNonQuorumPort = 22122; - - /** The sharding strategy to be used for the dyno queue configuration */ - private String queueShardingStrategy = RedisQueuesShardingStrategyProvider.ROUND_ROBIN_STRATEGY; - - /** The time in seconds after which the in-memory task definitions cache will be refreshed */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration taskDefCacheRefreshInterval = Duration.ofSeconds(60); - - /** The time to live in seconds for which the event execution will be persisted */ - @DurationUnit(ChronoUnit.SECONDS) - private Duration eventExecutionPersistenceTTL = Duration.ofSeconds(60); - - // Maximum number of idle connections to be maintained - private int maxIdleConnections = 8; - - // Minimum number of idle connections to be maintained - private int minIdleConnections = 5; - - private long minEvictableIdleTimeMillis = 1800000; - - private long timeBetweenEvictionRunsMillis = -1L; - - private boolean testWhileIdle = false; - - private int numTestsPerEvictionRun = 3; - - public int getNumTestsPerEvictionRun() { - return numTestsPerEvictionRun; - } - - public void setNumTestsPerEvictionRun(int numTestsPerEvictionRun) { - this.numTestsPerEvictionRun = numTestsPerEvictionRun; - } - - public boolean isTestWhileIdle() { - return testWhileIdle; - } - - public void setTestWhileIdle(boolean testWhileIdle) { - this.testWhileIdle = testWhileIdle; - } - - public long getMinEvictableIdleTimeMillis() { - return minEvictableIdleTimeMillis; - } - - public void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) { - this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis; - } - - public long getTimeBetweenEvictionRunsMillis() { - return timeBetweenEvictionRunsMillis; - } - - public void setTimeBetweenEvictionRunsMillis(long timeBetweenEvictionRunsMillis) { - this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis; - } - - public int getMinIdleConnections() { - return minIdleConnections; - } - - public void setMinIdleConnections(int minIdleConnections) { - this.minIdleConnections = minIdleConnections; - } - - public int getMaxIdleConnections() { - return maxIdleConnections; - } - - public void setMaxIdleConnections(int maxIdleConnections) { - this.maxIdleConnections = maxIdleConnections; - } - - public String getDataCenterRegion() { - return dataCenterRegion; - } - - public void setDataCenterRegion(String dataCenterRegion) { - this.dataCenterRegion = dataCenterRegion; - } - - public String getAvailabilityZone() { - return availabilityZone; - } - - public void setAvailabilityZone(String availabilityZone) { - this.availabilityZone = availabilityZone; - } - - public String getClusterName() { - return clusterName; - } - - public void setClusterName(String clusterName) { - this.clusterName = clusterName; - } - - public String getHosts() { - return hosts; - } - - public void setHosts(String hosts) { - this.hosts = hosts; - } - - public String getWorkflowNamespacePrefix() { - return workflowNamespacePrefix; - } - - public void setWorkflowNamespacePrefix(String workflowNamespacePrefix) { - this.workflowNamespacePrefix = workflowNamespacePrefix; - } - - public String getQueueNamespacePrefix() { - return queueNamespacePrefix; - } - - public void setQueueNamespacePrefix(String queueNamespacePrefix) { - this.queueNamespacePrefix = queueNamespacePrefix; - } - - public String getKeyspaceDomain() { - return keyspaceDomain; - } - - public void setKeyspaceDomain(String keyspaceDomain) { - this.keyspaceDomain = keyspaceDomain; - } - - public int getMaxConnectionsPerHost() { - return maxConnectionsPerHost; - } - - public void setMaxConnectionsPerHost(int maxConnectionsPerHost) { - this.maxConnectionsPerHost = maxConnectionsPerHost; - } - - public Duration getMaxTimeoutWhenExhausted() { - return maxTimeoutWhenExhausted; - } - - public void setMaxTimeoutWhenExhausted(Duration maxTimeoutWhenExhausted) { - this.maxTimeoutWhenExhausted = maxTimeoutWhenExhausted; - } - - public int getMaxRetryAttempts() { - return maxRetryAttempts; - } - - public void setMaxRetryAttempts(int maxRetryAttempts) { - this.maxRetryAttempts = maxRetryAttempts; - } - - public int getQueuesNonQuorumPort() { - return queuesNonQuorumPort; - } - - public void setQueuesNonQuorumPort(int queuesNonQuorumPort) { - this.queuesNonQuorumPort = queuesNonQuorumPort; - } - - public String getQueueShardingStrategy() { - return queueShardingStrategy; - } - - public void setQueueShardingStrategy(String queueShardingStrategy) { - this.queueShardingStrategy = queueShardingStrategy; - } - - public Duration getTaskDefCacheRefreshInterval() { - return taskDefCacheRefreshInterval; - } - - public void setTaskDefCacheRefreshInterval(Duration taskDefCacheRefreshInterval) { - this.taskDefCacheRefreshInterval = taskDefCacheRefreshInterval; - } - - public Duration getEventExecutionPersistenceTTL() { - return eventExecutionPersistenceTTL; - } - - public void setEventExecutionPersistenceTTL(Duration eventExecutionPersistenceTTL) { - this.eventExecutionPersistenceTTL = eventExecutionPersistenceTTL; - } - - public String getQueuePrefix() { - String prefix = getQueueNamespacePrefix() + "." + conductorProperties.getStack(); - if (getKeyspaceDomain() != null) { - prefix = prefix + "." + getKeyspaceDomain(); - } - return prefix; - } - - public RetryPolicyFactory getConnectionRetryPolicy() { - if (getMaxRetryAttempts() == 0) { - return RunOnce::new; - } else { - return () -> new RetryNTimes(maxRetryAttempts, false); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisSentinelConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisSentinelConfiguration.java deleted file mode 100644 index 89fa5b8cc..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisSentinelConfiguration.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.jedis.JedisSentinel; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; - -import redis.clients.jedis.JedisSentinelPool; -import redis.clients.jedis.Protocol; -import redis.clients.jedis.commands.JedisCommands; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_sentinel") -public class RedisSentinelConfiguration extends JedisCommandsConfigurer { - - private static final Logger log = LoggerFactory.getLogger(RedisSentinelConfiguration.class); - - @Override - protected JedisCommands createJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - GenericObjectPoolConfig genericObjectPoolConfig = new GenericObjectPoolConfig<>(); - genericObjectPoolConfig.setMinIdle(properties.getMinIdleConnections()); - genericObjectPoolConfig.setMaxIdle(properties.getMaxIdleConnections()); - genericObjectPoolConfig.setMaxTotal(properties.getMaxConnectionsPerHost()); - genericObjectPoolConfig.setTestWhileIdle(properties.isTestWhileIdle()); - genericObjectPoolConfig.setMinEvictableIdleTimeMillis( - properties.getMinEvictableIdleTimeMillis()); - genericObjectPoolConfig.setTimeBetweenEvictionRunsMillis( - properties.getTimeBetweenEvictionRunsMillis()); - genericObjectPoolConfig.setNumTestsPerEvictionRun(properties.getNumTestsPerEvictionRun()); - log.info( - "Starting conductor server using redis_sentinel and cluster " - + properties.getClusterName()); - Set sentinels = new HashSet<>(); - for (Host host : hostSupplier.getHosts()) { - sentinels.add(host.getHostName() + ":" + host.getPort()); - } - // We use the password of the first sentinel host as password and sentinelPassword - String password = getPassword(hostSupplier.getHosts()); - if (password != null) { - return new JedisSentinel( - new JedisSentinelPool( - properties.getClusterName(), - sentinels, - genericObjectPoolConfig, - Protocol.DEFAULT_TIMEOUT, - Protocol.DEFAULT_TIMEOUT, - password, - Protocol.DEFAULT_DATABASE, - null, - Protocol.DEFAULT_TIMEOUT, - Protocol.DEFAULT_TIMEOUT, - password, - null)); - } else { - return new JedisSentinel( - new JedisSentinelPool( - properties.getClusterName(), sentinels, genericObjectPoolConfig)); - } - } - - private String getPassword(List hosts) { - return hosts.isEmpty() ? null : hosts.get(0).getPassword(); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisStandaloneConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisStandaloneConfiguration.java deleted file mode 100644 index 8882e5403..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisStandaloneConfiguration.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Configuration; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.jedis.JedisStandalone; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; - -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.Protocol; -import redis.clients.jedis.commands.JedisCommands; - -@Configuration(proxyBeanMethods = false) -@ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_standalone") -public class RedisStandaloneConfiguration extends JedisCommandsConfigurer { - - private static final Logger log = LoggerFactory.getLogger(RedisSentinelConfiguration.class); - - @Override - protected JedisCommands createJedisCommands( - RedisProperties properties, - ConductorProperties conductorProperties, - HostSupplier hostSupplier, - TokenMapSupplier tokenMapSupplier) { - JedisPoolConfig config = new JedisPoolConfig(); - config.setMinIdle(2); - config.setMaxTotal(properties.getMaxConnectionsPerHost()); - log.info("Starting conductor server using redis_standalone."); - Host host = hostSupplier.getHosts().get(0); - return new JedisStandalone(getJedisPool(config, host)); - } - - private JedisPool getJedisPool(JedisPoolConfig config, Host host) { - if (host.getPassword() != null) { - log.info("Connecting to Redis Standalone with AUTH"); - return new JedisPool( - config, - host.getHostName(), - host.getPort(), - Protocol.DEFAULT_TIMEOUT, - host.getPassword()); - } else { - return new JedisPool(config, host.getHostName(), host.getPort()); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/BaseDynoDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/BaseDynoDAO.java deleted file mode 100644 index bdf881fbb..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/BaseDynoDAO.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.io.IOException; - -import org.apache.commons.lang3.StringUtils; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -public class BaseDynoDAO { - - private static final String NAMESPACE_SEP = "."; - private static final String DAO_NAME = "redis"; - private final String domain; - private final RedisProperties properties; - private final ConductorProperties conductorProperties; - protected JedisProxy jedisProxy; - protected ObjectMapper objectMapper; - - protected BaseDynoDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - this.jedisProxy = jedisProxy; - this.objectMapper = objectMapper; - this.conductorProperties = conductorProperties; - this.properties = properties; - this.domain = properties.getKeyspaceDomain(); - } - - String nsKey(String... nsValues) { - String rootNamespace = properties.getWorkflowNamespacePrefix(); - StringBuilder namespacedKey = new StringBuilder(); - if (StringUtils.isNotBlank(rootNamespace)) { - namespacedKey.append(rootNamespace).append(NAMESPACE_SEP); - } - String stack = conductorProperties.getStack(); - if (StringUtils.isNotBlank(stack)) { - namespacedKey.append(stack).append(NAMESPACE_SEP); - } - if (StringUtils.isNotBlank(domain)) { - namespacedKey.append(domain).append(NAMESPACE_SEP); - } - for (String nsValue : nsValues) { - namespacedKey.append(nsValue).append(NAMESPACE_SEP); - } - return StringUtils.removeEnd(namespacedKey.toString(), NAMESPACE_SEP); - } - - public JedisProxy getDyno() { - return jedisProxy; - } - - String toJson(Object value) { - try { - return objectMapper.writeValueAsString(value); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - T readValue(String json, Class clazz) { - try { - return objectMapper.readValue(json, clazz); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - void recordRedisDaoRequests(String action) { - recordRedisDaoRequests(action, "n/a", "n/a"); - } - - void recordRedisDaoRequests(String action, String taskType, String workflowType) { - Monitors.recordDaoRequests(DAO_NAME, action, taskType, workflowType); - } - - void recordRedisDaoEventRequests(String action, String event) { - Monitors.recordDaoEventRequests(DAO_NAME, action, event); - } - - void recordRedisDaoPayloadSize(String action, int size, String taskType, String workflowType) { - Monitors.recordDaoPayloadSize( - DAO_NAME, - action, - StringUtils.defaultIfBlank(taskType, ""), - StringUtils.defaultIfBlank(workflowType, ""), - size); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/DynoQueueDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/DynoQueueDAO.java deleted file mode 100644 index 3901931dc..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/DynoQueueDAO.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.dyno.queues.DynoQueue; -import com.netflix.dyno.queues.Message; -import com.netflix.dyno.queues.redis.RedisQueues; - -@Component -@Conditional(AnyRedisCondition.class) -public class DynoQueueDAO implements QueueDAO { - - private final RedisQueues queues; - - public DynoQueueDAO(RedisQueues queues) { - this.queues = queues; - } - - @Override - public void push(String queueName, String id, long offsetTimeInSecond) { - push(queueName, id, -1, offsetTimeInSecond); - } - - @Override - public void push(String queueName, String id, int priority, long offsetTimeInSecond) { - Message msg = new Message(id, null); - msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); - if (priority >= 0 && priority <= 99) { - msg.setPriority(priority); - } - queues.get(queueName).push(Collections.singletonList(msg)); - } - - @Override - public void push( - String queueName, List messages) { - List msgs = - messages.stream() - .map( - msg -> { - Message m = new Message(msg.getId(), msg.getPayload()); - if (msg.getPriority() > 0) { - m.setPriority(msg.getPriority()); - } - return m; - }) - .collect(Collectors.toList()); - queues.get(queueName).push(msgs); - } - - @Override - public boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond) { - return pushIfNotExists(queueName, id, -1, offsetTimeInSecond); - } - - @Override - public boolean pushIfNotExists( - String queueName, String id, int priority, long offsetTimeInSecond) { - DynoQueue queue = queues.get(queueName); - if (queue.get(id) != null) { - return false; - } - Message msg = new Message(id, null); - if (priority >= 0 && priority <= 99) { - msg.setPriority(priority); - } - msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); - queue.push(Collections.singletonList(msg)); - return true; - } - - @Override - public List pop(String queueName, int count, int timeout) { - List msg = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); - return msg.stream().map(Message::getId).collect(Collectors.toList()); - } - - @Override - public List pollMessages( - String queueName, int count, int timeout) { - List msgs = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); - return msgs.stream() - .map( - msg -> - new com.netflix.conductor.core.events.queue.Message( - msg.getId(), msg.getPayload(), null, msg.getPriority())) - .collect(Collectors.toList()); - } - - @Override - public void remove(String queueName, String messageId) { - queues.get(queueName).remove(messageId); - } - - @Override - public int getSize(String queueName) { - return (int) queues.get(queueName).size(); - } - - @Override - public boolean ack(String queueName, String messageId) { - return queues.get(queueName).ack(messageId); - } - - @Override - public boolean setUnackTimeout(String queueName, String messageId, long timeout) { - return queues.get(queueName).setUnackTimeout(messageId, timeout); - } - - @Override - public void flush(String queueName) { - DynoQueue queue = queues.get(queueName); - if (queue != null) { - queue.clear(); - } - } - - @Override - public Map queuesDetail() { - return queues.queues().stream() - .collect(Collectors.toMap(DynoQueue::getName, DynoQueue::size)); - } - - @Override - public Map>> queuesDetailVerbose() { - return queues.queues().stream() - .collect(Collectors.toMap(DynoQueue::getName, DynoQueue::shardSizes)); - } - - public void processUnacks(String queueName) { - queues.get(queueName).processUnacks(); - } - - @Override - public boolean resetOffsetTime(String queueName, String id) { - DynoQueue queue = queues.get(queueName); - return queue.setTimeout(id, 0); - } - - @Override - public boolean containsMessage(String queueName, String messageId) { - DynoQueue queue = queues.get(queueName); - Message message = queue.get(messageId); - return Objects.nonNull(message); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAO.java deleted file mode 100644 index 263e4ec77..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAO.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; - -@Component -@Conditional(AnyRedisCondition.class) -public class RedisEventHandlerDAO extends BaseDynoDAO implements EventHandlerDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisEventHandlerDAO.class); - - private static final String EVENT_HANDLERS = "EVENT_HANDLERS"; - private static final String EVENT_HANDLERS_BY_EVENT = "EVENT_HANDLERS_BY_EVENT"; - - public RedisEventHandlerDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - super(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Override - public void addEventHandler(EventHandler eventHandler) { - Preconditions.checkNotNull(eventHandler.getName(), "Missing Name"); - if (getEventHandler(eventHandler.getName()) != null) { - throw new ApplicationException( - Code.CONFLICT, - "EventHandler with name " + eventHandler.getName() + " already exists!"); - } - index(eventHandler); - jedisProxy.hset(nsKey(EVENT_HANDLERS), eventHandler.getName(), toJson(eventHandler)); - recordRedisDaoRequests("addEventHandler"); - } - - @Override - public void updateEventHandler(EventHandler eventHandler) { - Preconditions.checkNotNull(eventHandler.getName(), "Missing Name"); - EventHandler existing = getEventHandler(eventHandler.getName()); - if (existing == null) { - throw new ApplicationException( - Code.NOT_FOUND, - "EventHandler with name " + eventHandler.getName() + " not found!"); - } - index(eventHandler); - jedisProxy.hset(nsKey(EVENT_HANDLERS), eventHandler.getName(), toJson(eventHandler)); - recordRedisDaoRequests("updateEventHandler"); - } - - @Override - public void removeEventHandler(String name) { - EventHandler existing = getEventHandler(name); - if (existing == null) { - throw new ApplicationException( - Code.NOT_FOUND, "EventHandler with name " + name + " not found!"); - } - jedisProxy.hdel(nsKey(EVENT_HANDLERS), name); - recordRedisDaoRequests("removeEventHandler"); - removeIndex(existing); - } - - @Override - public List getAllEventHandlers() { - Map all = jedisProxy.hgetAll(nsKey(EVENT_HANDLERS)); - List handlers = new LinkedList<>(); - all.forEach( - (key, json) -> { - EventHandler eventHandler = readValue(json, EventHandler.class); - handlers.add(eventHandler); - }); - recordRedisDaoRequests("getAllEventHandlers"); - return handlers; - } - - private void index(EventHandler eventHandler) { - String event = eventHandler.getEvent(); - String key = nsKey(EVENT_HANDLERS_BY_EVENT, event); - jedisProxy.sadd(key, eventHandler.getName()); - } - - private void removeIndex(EventHandler eventHandler) { - String event = eventHandler.getEvent(); - String key = nsKey(EVENT_HANDLERS_BY_EVENT, event); - jedisProxy.srem(key, eventHandler.getName()); - } - - @Override - public List getEventHandlersForEvent(String event, boolean activeOnly) { - String key = nsKey(EVENT_HANDLERS_BY_EVENT, event); - Set names = jedisProxy.smembers(key); - List handlers = new LinkedList<>(); - for (String name : names) { - try { - EventHandler eventHandler = getEventHandler(name); - recordRedisDaoEventRequests("getEventHandler", event); - if (eventHandler.getEvent().equals(event) - && (!activeOnly || eventHandler.isActive())) { - handlers.add(eventHandler); - } - } catch (ApplicationException ae) { - if (ae.getCode() == Code.NOT_FOUND) { - LOGGER.info("No matching event handler found for event: {}", event); - } - throw ae; - } - } - return handlers; - } - - private EventHandler getEventHandler(String name) { - EventHandler eventHandler = null; - String json = jedisProxy.hget(nsKey(EVENT_HANDLERS), name); - if (json != null) { - eventHandler = readValue(json, EventHandler.class); - } - return eventHandler; - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisExecutionDAO.java deleted file mode 100644 index e653d3d6f..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisExecutionDAO.java +++ /dev/null @@ -1,776 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.ConcurrentExecutionLimitDAO; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; - -@Component -@Conditional(AnyRedisCondition.class) -public class RedisExecutionDAO extends BaseDynoDAO - implements ExecutionDAO, ConcurrentExecutionLimitDAO { - - public static final Logger LOGGER = LoggerFactory.getLogger(RedisExecutionDAO.class); - - // Keys Families - private static final String TASK_LIMIT_BUCKET = "TASK_LIMIT_BUCKET"; - private static final String IN_PROGRESS_TASKS = "IN_PROGRESS_TASKS"; - private static final String TASKS_IN_PROGRESS_STATUS = - "TASKS_IN_PROGRESS_STATUS"; // Tasks which are in IN_PROGRESS status. - private static final String WORKFLOW_TO_TASKS = "WORKFLOW_TO_TASKS"; - private static final String SCHEDULED_TASKS = "SCHEDULED_TASKS"; - private static final String TASK = "TASK"; - private static final String WORKFLOW = "WORKFLOW"; - private static final String PENDING_WORKFLOWS = "PENDING_WORKFLOWS"; - private static final String WORKFLOW_DEF_TO_WORKFLOWS = "WORKFLOW_DEF_TO_WORKFLOWS"; - private static final String CORR_ID_TO_WORKFLOWS = "CORR_ID_TO_WORKFLOWS"; - private static final String EVENT_EXECUTION = "EVENT_EXECUTION"; - private final int ttlEventExecutionSeconds; - - public RedisExecutionDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - super(jedisProxy, objectMapper, conductorProperties, properties); - - ttlEventExecutionSeconds = (int) properties.getEventExecutionPersistenceTTL().getSeconds(); - } - - private static String dateStr(Long timeInMs) { - Date date = new Date(timeInMs); - return dateStr(date); - } - - private static String dateStr(Date date) { - SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); - return format.format(date); - } - - private static List dateStrBetweenDates(Long startdatems, Long enddatems) { - List dates = new ArrayList<>(); - Calendar calendar = new GregorianCalendar(); - Date startdate = new Date(startdatems); - Date enddate = new Date(enddatems); - calendar.setTime(startdate); - while (calendar.getTime().before(enddate) || calendar.getTime().equals(enddate)) { - Date result = calendar.getTime(); - dates.add(dateStr(result)); - calendar.add(Calendar.DATE, 1); - } - return dates; - } - - @Override - public List getPendingTasksByWorkflow(String taskName, String workflowId) { - List tasks = new LinkedList<>(); - - List pendingTasks = getPendingTasksForTaskType(taskName); - pendingTasks.forEach( - pendingTask -> { - if (pendingTask.getWorkflowInstanceId().equals(workflowId)) { - tasks.add(pendingTask); - } - }); - - return tasks; - } - - @Override - public List getTasks(String taskDefName, String startKey, int count) { - List tasks = new LinkedList<>(); - - List pendingTasks = getPendingTasksForTaskType(taskDefName); - boolean startKeyFound = startKey == null; - int foundcount = 0; - for (TaskModel pendingTask : pendingTasks) { - if (!startKeyFound) { - if (pendingTask.getTaskId().equals(startKey)) { - startKeyFound = true; - if (startKey != null) { - continue; - } - } - } - if (startKeyFound && foundcount < count) { - tasks.add(pendingTask); - foundcount++; - } - } - return tasks; - } - - @Override - public List createTasks(List tasks) { - - List tasksCreated = new LinkedList<>(); - - for (TaskModel task : tasks) { - validate(task); - - recordRedisDaoRequests("createTask", task.getTaskType(), task.getWorkflowType()); - - String taskKey = task.getReferenceTaskName() + "" + task.getRetryCount(); - Long added = - jedisProxy.hset( - nsKey(SCHEDULED_TASKS, task.getWorkflowInstanceId()), - taskKey, - task.getTaskId()); - if (added < 1) { - LOGGER.debug( - "Task already scheduled, skipping the run " - + task.getTaskId() - + ", ref=" - + task.getReferenceTaskName() - + ", key=" - + taskKey); - continue; - } - - if (task.getStatus() != null - && !task.getStatus().isTerminal() - && task.getScheduledTime() == 0) { - task.setScheduledTime(System.currentTimeMillis()); - } - - correlateTaskToWorkflowInDS(task.getTaskId(), task.getWorkflowInstanceId()); - LOGGER.debug( - "Scheduled task added to WORKFLOW_TO_TASKS workflowId: {}, taskId: {}, taskType: {} during createTasks", - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType()); - - String inProgressTaskKey = nsKey(IN_PROGRESS_TASKS, task.getTaskDefName()); - jedisProxy.sadd(inProgressTaskKey, task.getTaskId()); - LOGGER.debug( - "Scheduled task added to IN_PROGRESS_TASKS with inProgressTaskKey: {}, workflowId: {}, taskId: {}, taskType: {} during createTasks", - inProgressTaskKey, - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType()); - - updateTask(task); - tasksCreated.add(task); - } - - return tasksCreated; - } - - @Override - public void updateTask(TaskModel task) { - Optional taskDefinition = task.getTaskDefinition(); - - if (taskDefinition.isPresent() && taskDefinition.get().concurrencyLimit() > 0) { - - if (task.getStatus() != null && task.getStatus().equals(TaskModel.Status.IN_PROGRESS)) { - jedisProxy.sadd( - nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()), task.getTaskId()); - LOGGER.debug( - "Workflow Task added to TASKS_IN_PROGRESS_STATUS with tasksInProgressKey: {}, workflowId: {}, taskId: {}, taskType: {}, taskStatus: {} during updateTask", - nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName(), task.getTaskId()), - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType(), - task.getStatus().name()); - } else { - jedisProxy.srem( - nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()), task.getTaskId()); - LOGGER.debug( - "Workflow Task removed from TASKS_IN_PROGRESS_STATUS with tasksInProgressKey: {}, workflowId: {}, taskId: {}, taskType: {}, taskStatus: {} during updateTask", - nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName(), task.getTaskId()), - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType(), - task.getStatus().name()); - String key = nsKey(TASK_LIMIT_BUCKET, task.getTaskDefName()); - jedisProxy.zrem(key, task.getTaskId()); - LOGGER.debug( - "Workflow Task removed from TASK_LIMIT_BUCKET with taskLimitBucketKey: {}, workflowId: {}, taskId: {}, taskType: {}, taskStatus: {} during updateTask", - key, - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType(), - task.getStatus().name()); - } - } - - String payload = toJson(task); - recordRedisDaoPayloadSize( - "updateTask", - payload.length(), - taskDefinition.map(TaskDef::getName).orElse("n/a"), - task.getWorkflowType()); - - recordRedisDaoRequests("updateTask", task.getTaskType(), task.getWorkflowType()); - jedisProxy.set(nsKey(TASK, task.getTaskId()), payload); - LOGGER.debug( - "Workflow task payload saved to TASK with taskKey: {}, workflowId: {}, taskId: {}, taskType: {} during updateTask", - nsKey(TASK, task.getTaskId()), - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType()); - if (task.getStatus() != null && task.getStatus().isTerminal()) { - jedisProxy.srem(nsKey(IN_PROGRESS_TASKS, task.getTaskDefName()), task.getTaskId()); - LOGGER.debug( - "Workflow Task removed from TASKS_IN_PROGRESS_STATUS with tasksInProgressKey: {}, workflowId: {}, taskId: {}, taskType: {}, taskStatus: {} during updateTask", - nsKey(IN_PROGRESS_TASKS, task.getTaskDefName()), - task.getWorkflowInstanceId(), - task.getTaskId(), - task.getTaskType(), - task.getStatus().name()); - } - - Set taskIds = - jedisProxy.smembers(nsKey(WORKFLOW_TO_TASKS, task.getWorkflowInstanceId())); - if (!taskIds.contains(task.getTaskId())) { - correlateTaskToWorkflowInDS(task.getTaskId(), task.getWorkflowInstanceId()); - } - } - - @Override - public boolean exceedsLimit(TaskModel task) { - Optional taskDefinition = task.getTaskDefinition(); - if (taskDefinition.isEmpty()) { - return false; - } - int limit = taskDefinition.get().concurrencyLimit(); - if (limit <= 0) { - return false; - } - - long current = getInProgressTaskCount(task.getTaskDefName()); - if (current >= limit) { - LOGGER.info( - "Task execution count limited. task - {}:{}, limit: {}, current: {}", - task.getTaskId(), - task.getTaskDefName(), - limit, - current); - Monitors.recordTaskConcurrentExecutionLimited(task.getTaskDefName(), limit); - return true; - } - - String rateLimitKey = nsKey(TASK_LIMIT_BUCKET, task.getTaskDefName()); - double score = System.currentTimeMillis(); - String taskId = task.getTaskId(); - jedisProxy.zaddnx(rateLimitKey, score, taskId); - recordRedisDaoRequests("checkTaskRateLimiting", task.getTaskType(), task.getWorkflowType()); - - Set ids = jedisProxy.zrangeByScore(rateLimitKey, 0, score + 1, limit); - boolean rateLimited = !ids.contains(taskId); - if (rateLimited) { - LOGGER.info( - "Task execution count limited. task - {}:{}, limit: {}, current: {}", - task.getTaskId(), - task.getTaskDefName(), - limit, - current); - String inProgressKey = nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()); - // Cleanup any items that are still present in the rate limit bucket but not in progress - // anymore! - ids.stream() - .filter(id -> !jedisProxy.sismember(inProgressKey, id)) - .forEach(id2 -> jedisProxy.zrem(rateLimitKey, id2)); - Monitors.recordTaskRateLimited(task.getTaskDefName(), limit); - } - return rateLimited; - } - - private void removeTaskMappings(TaskModel task) { - String taskKey = task.getReferenceTaskName() + "" + task.getRetryCount(); - - jedisProxy.hdel(nsKey(SCHEDULED_TASKS, task.getWorkflowInstanceId()), taskKey); - jedisProxy.srem(nsKey(IN_PROGRESS_TASKS, task.getTaskDefName()), task.getTaskId()); - jedisProxy.srem(nsKey(WORKFLOW_TO_TASKS, task.getWorkflowInstanceId()), task.getTaskId()); - jedisProxy.srem(nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()), task.getTaskId()); - jedisProxy.zrem(nsKey(TASK_LIMIT_BUCKET, task.getTaskDefName()), task.getTaskId()); - } - - private void removeTaskMappingsWithExpiry(TaskModel task) { - String taskKey = task.getReferenceTaskName() + "" + task.getRetryCount(); - - jedisProxy.hdel(nsKey(SCHEDULED_TASKS, task.getWorkflowInstanceId()), taskKey); - jedisProxy.srem(nsKey(IN_PROGRESS_TASKS, task.getTaskDefName()), task.getTaskId()); - jedisProxy.srem(nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()), task.getTaskId()); - jedisProxy.zrem(nsKey(TASK_LIMIT_BUCKET, task.getTaskDefName()), task.getTaskId()); - } - - @Override - public boolean removeTask(String taskId) { - TaskModel task = getTask(taskId); - if (task == null) { - LOGGER.warn("No such task found by id {}", taskId); - return false; - } - removeTaskMappings(task); - - jedisProxy.del(nsKey(TASK, task.getTaskId())); - recordRedisDaoRequests("removeTask", task.getTaskType(), task.getWorkflowType()); - return true; - } - - private boolean removeTaskWithExpiry(String taskId, int ttlSeconds) { - TaskModel task = getTask(taskId); - if (task == null) { - LOGGER.warn("No such task found by id {}", taskId); - return false; - } - removeTaskMappingsWithExpiry(task); - - jedisProxy.expire(nsKey(TASK, task.getTaskId()), ttlSeconds); - recordRedisDaoRequests("removeTask", task.getTaskType(), task.getWorkflowType()); - return true; - } - - @Override - public TaskModel getTask(String taskId) { - Preconditions.checkNotNull(taskId, "taskId cannot be null"); - return Optional.ofNullable(jedisProxy.get(nsKey(TASK, taskId))) - .map( - json -> { - TaskModel task = readValue(json, TaskModel.class); - recordRedisDaoRequests( - "getTask", task.getTaskType(), task.getWorkflowType()); - recordRedisDaoPayloadSize( - "getTask", - toJson(task).length(), - task.getTaskType(), - task.getWorkflowType()); - return task; - }) - .orElse(null); - } - - @Override - public List getTasks(List taskIds) { - return taskIds.stream() - .map(taskId -> nsKey(TASK, taskId)) - .map(jedisProxy::get) - .filter(Objects::nonNull) - .map( - jsonString -> { - TaskModel task = readValue(jsonString, TaskModel.class); - recordRedisDaoRequests( - "getTask", task.getTaskType(), task.getWorkflowType()); - recordRedisDaoPayloadSize( - "getTask", - jsonString.length(), - task.getTaskType(), - task.getWorkflowType()); - return task; - }) - .collect(Collectors.toList()); - } - - @Override - public List getTasksForWorkflow(String workflowId) { - Preconditions.checkNotNull(workflowId, "workflowId cannot be null"); - Set taskIds = jedisProxy.smembers(nsKey(WORKFLOW_TO_TASKS, workflowId)); - recordRedisDaoRequests("getTasksForWorkflow"); - return getTasks(new ArrayList<>(taskIds)); - } - - @Override - public List getPendingTasksForTaskType(String taskName) { - Preconditions.checkNotNull(taskName, "task name cannot be null"); - Set taskIds = jedisProxy.smembers(nsKey(IN_PROGRESS_TASKS, taskName)); - recordRedisDaoRequests("getPendingTasksForTaskType"); - return getTasks(new ArrayList<>(taskIds)); - } - - @Override - public String createWorkflow(WorkflowModel workflow) { - return insertOrUpdateWorkflow(workflow, false); - } - - @Override - public String updateWorkflow(WorkflowModel workflow) { - return insertOrUpdateWorkflow(workflow, true); - } - - @Override - public boolean removeWorkflow(String workflowId) { - WorkflowModel workflow = getWorkflow(workflowId, true); - if (workflow != null) { - recordRedisDaoRequests("removeWorkflow"); - - // Remove from lists - String key = - nsKey( - WORKFLOW_DEF_TO_WORKFLOWS, - workflow.getWorkflowName(), - dateStr(workflow.getCreateTime())); - jedisProxy.srem(key, workflowId); - jedisProxy.srem(nsKey(CORR_ID_TO_WORKFLOWS, workflow.getCorrelationId()), workflowId); - jedisProxy.srem(nsKey(PENDING_WORKFLOWS, workflow.getWorkflowName()), workflowId); - - // Remove the object - jedisProxy.del(nsKey(WORKFLOW, workflowId)); - for (TaskModel task : workflow.getTasks()) { - removeTask(task.getTaskId()); - } - return true; - } - return false; - } - - public boolean removeWorkflowWithExpiry(String workflowId, int ttlSeconds) { - WorkflowModel workflow = getWorkflow(workflowId, true); - if (workflow != null) { - recordRedisDaoRequests("removeWorkflow"); - - // Remove from lists - String key = - nsKey( - WORKFLOW_DEF_TO_WORKFLOWS, - workflow.getWorkflowName(), - dateStr(workflow.getCreateTime())); - jedisProxy.srem(key, workflowId); - jedisProxy.srem(nsKey(CORR_ID_TO_WORKFLOWS, workflow.getCorrelationId()), workflowId); - jedisProxy.srem(nsKey(PENDING_WORKFLOWS, workflow.getWorkflowName()), workflowId); - - // Remove the object - jedisProxy.expire(nsKey(WORKFLOW, workflowId), ttlSeconds); - for (TaskModel task : workflow.getTasks()) { - removeTaskWithExpiry(task.getTaskId(), ttlSeconds); - } - jedisProxy.expire(nsKey(WORKFLOW_TO_TASKS, workflowId), ttlSeconds); - - return true; - } - return false; - } - - @Override - public void removeFromPendingWorkflow(String workflowType, String workflowId) { - recordRedisDaoRequests("removePendingWorkflow"); - jedisProxy.del(nsKey(SCHEDULED_TASKS, workflowId)); - jedisProxy.srem(nsKey(PENDING_WORKFLOWS, workflowType), workflowId); - } - - @Override - public WorkflowModel getWorkflow(String workflowId) { - return getWorkflow(workflowId, true); - } - - @Override - public WorkflowModel getWorkflow(String workflowId, boolean includeTasks) { - String json = jedisProxy.get(nsKey(WORKFLOW, workflowId)); - WorkflowModel workflow = null; - - if (json != null) { - workflow = readValue(json, WorkflowModel.class); - recordRedisDaoRequests("getWorkflow", "n/a", workflow.getWorkflowName()); - recordRedisDaoPayloadSize( - "getWorkflow", json.length(), "n/a", workflow.getWorkflowName()); - if (includeTasks) { - List tasks = getTasksForWorkflow(workflowId); - tasks.sort(Comparator.comparingInt(TaskModel::getSeq)); - workflow.setTasks(tasks); - } - } - return workflow; - } - - /** - * @param workflowName name of the workflow - * @param version the workflow version - * @return list of workflow ids that are in RUNNING state returns workflows of all versions - * for the given workflow name - */ - @Override - public List getRunningWorkflowIds(String workflowName, int version) { - Preconditions.checkNotNull(workflowName, "workflowName cannot be null"); - List workflowIds; - recordRedisDaoRequests("getRunningWorkflowsByName"); - Set pendingWorkflows = jedisProxy.smembers(nsKey(PENDING_WORKFLOWS, workflowName)); - workflowIds = new LinkedList<>(pendingWorkflows); - return workflowIds; - } - - /** - * @param workflowName name of the workflow - * @param version the workflow version - * @return list of workflows that are in RUNNING state - */ - @Override - public List getPendingWorkflowsByType(String workflowName, int version) { - Preconditions.checkNotNull(workflowName, "workflowName cannot be null"); - List workflowIds = getRunningWorkflowIds(workflowName, version); - return workflowIds.stream() - .map(this::getWorkflow) - .filter(workflow -> workflow.getWorkflowVersion() == version) - .collect(Collectors.toList()); - } - - @Override - public List getWorkflowsByType( - String workflowName, Long startTime, Long endTime) { - Preconditions.checkNotNull(workflowName, "workflowName cannot be null"); - Preconditions.checkNotNull(startTime, "startTime cannot be null"); - Preconditions.checkNotNull(endTime, "endTime cannot be null"); - - List workflows = new LinkedList<>(); - - // Get all date strings between start and end - List dateStrs = dateStrBetweenDates(startTime, endTime); - dateStrs.forEach( - dateStr -> { - String key = nsKey(WORKFLOW_DEF_TO_WORKFLOWS, workflowName, dateStr); - jedisProxy - .smembers(key) - .forEach( - workflowId -> { - try { - WorkflowModel workflow = getWorkflow(workflowId); - if (workflow.getCreateTime() >= startTime - && workflow.getCreateTime() <= endTime) { - workflows.add(workflow); - } - } catch (Exception e) { - LOGGER.error( - "Failed to get workflow: {}", workflowId, e); - } - }); - }); - - return workflows; - } - - @Override - public List getWorkflowsByCorrelationId( - String workflowName, String correlationId, boolean includeTasks) { - throw new UnsupportedOperationException( - "This method is not implemented in RedisExecutionDAO. Please use ExecutionDAOFacade instead."); - } - - @Override - public boolean canSearchAcrossWorkflows() { - return false; - } - - /** - * Inserts a new workflow/ updates an existing workflow in the datastore. Additionally, if a - * workflow is in terminal state, it is removed from the set of pending workflows. - * - * @param workflow the workflow instance - * @param update flag to identify if update or create operation - * @return the workflowId - */ - private String insertOrUpdateWorkflow(WorkflowModel workflow, boolean update) { - Preconditions.checkNotNull(workflow, "workflow object cannot be null"); - - List tasks = workflow.getTasks(); - workflow.setTasks(new LinkedList<>()); - - String payload = toJson(workflow); - // Store the workflow object - jedisProxy.set(nsKey(WORKFLOW, workflow.getWorkflowId()), payload); - recordRedisDaoRequests("storeWorkflow", "n/a", workflow.getWorkflowName()); - recordRedisDaoPayloadSize( - "storeWorkflow", payload.length(), "n/a", workflow.getWorkflowName()); - if (!update) { - // Add to list of workflows for a workflowdef - String key = - nsKey( - WORKFLOW_DEF_TO_WORKFLOWS, - workflow.getWorkflowName(), - dateStr(workflow.getCreateTime())); - jedisProxy.sadd(key, workflow.getWorkflowId()); - if (workflow.getCorrelationId() != null) { - // Add to list of workflows for a correlationId - jedisProxy.sadd( - nsKey(CORR_ID_TO_WORKFLOWS, workflow.getCorrelationId()), - workflow.getWorkflowId()); - } - } - // Add or remove from the pending workflows - if (workflow.getStatus().isTerminal()) { - jedisProxy.srem( - nsKey(PENDING_WORKFLOWS, workflow.getWorkflowName()), workflow.getWorkflowId()); - } else { - jedisProxy.sadd( - nsKey(PENDING_WORKFLOWS, workflow.getWorkflowName()), workflow.getWorkflowId()); - } - - workflow.setTasks(tasks); - return workflow.getWorkflowId(); - } - - /** - * Stores the correlation of a task to the workflow instance in the datastore - * - * @param taskId the taskId to be correlated - * @param workflowInstanceId the workflowId to which the tasks belongs to - */ - @VisibleForTesting - void correlateTaskToWorkflowInDS(String taskId, String workflowInstanceId) { - String workflowToTaskKey = nsKey(WORKFLOW_TO_TASKS, workflowInstanceId); - jedisProxy.sadd(workflowToTaskKey, taskId); - LOGGER.debug( - "Task mapped in WORKFLOW_TO_TASKS with workflowToTaskKey: {}, workflowId: {}, taskId: {}", - workflowToTaskKey, - workflowInstanceId, - taskId); - } - - public long getPendingWorkflowCount(String workflowName) { - String key = nsKey(PENDING_WORKFLOWS, workflowName); - recordRedisDaoRequests("getPendingWorkflowCount"); - return jedisProxy.scard(key); - } - - @Override - public long getInProgressTaskCount(String taskDefName) { - String inProgressKey = nsKey(TASKS_IN_PROGRESS_STATUS, taskDefName); - recordRedisDaoRequests("getInProgressTaskCount"); - return jedisProxy.scard(inProgressKey); - } - - @Override - public boolean addEventExecution(EventExecution eventExecution) { - try { - String key = - nsKey( - EVENT_EXECUTION, - eventExecution.getName(), - eventExecution.getEvent(), - eventExecution.getMessageId()); - String json = objectMapper.writeValueAsString(eventExecution); - recordRedisDaoEventRequests("addEventExecution", eventExecution.getEvent()); - recordRedisDaoPayloadSize( - "addEventExecution", json.length(), eventExecution.getEvent(), "n/a"); - boolean added = jedisProxy.hsetnx(key, eventExecution.getId(), json) == 1L; - - if (ttlEventExecutionSeconds > 0) { - jedisProxy.expire(key, ttlEventExecutionSeconds); - } - - return added; - } catch (Exception e) { - throw new ApplicationException( - Code.BACKEND_ERROR, - "Unable to add event execution for " + eventExecution.getId(), - e); - } - } - - @Override - public void updateEventExecution(EventExecution eventExecution) { - try { - - String key = - nsKey( - EVENT_EXECUTION, - eventExecution.getName(), - eventExecution.getEvent(), - eventExecution.getMessageId()); - String json = objectMapper.writeValueAsString(eventExecution); - LOGGER.info("updating event execution {}", key); - jedisProxy.hset(key, eventExecution.getId(), json); - recordRedisDaoEventRequests("updateEventExecution", eventExecution.getEvent()); - recordRedisDaoPayloadSize( - "updateEventExecution", json.length(), eventExecution.getEvent(), "n/a"); - } catch (Exception e) { - throw new ApplicationException( - Code.BACKEND_ERROR, - "Unable to update event execution for " + eventExecution.getId(), - e); - } - } - - @Override - public void removeEventExecution(EventExecution eventExecution) { - try { - String key = - nsKey( - EVENT_EXECUTION, - eventExecution.getName(), - eventExecution.getEvent(), - eventExecution.getMessageId()); - LOGGER.info("removing event execution {}", key); - jedisProxy.hdel(key, eventExecution.getId()); - recordRedisDaoEventRequests("removeEventExecution", eventExecution.getEvent()); - } catch (Exception e) { - throw new ApplicationException( - Code.BACKEND_ERROR, - "Unable to remove event execution for " + eventExecution.getId(), - e); - } - } - - public List getEventExecutions( - String eventHandlerName, String eventName, String messageId, int max) { - try { - String key = nsKey(EVENT_EXECUTION, eventHandlerName, eventName, messageId); - LOGGER.info("getting event execution {}", key); - List executions = new LinkedList<>(); - for (int i = 0; i < max; i++) { - String field = messageId + "_" + i; - String value = jedisProxy.hget(key, field); - if (value == null) { - break; - } - recordRedisDaoEventRequests("getEventExecution", eventHandlerName); - recordRedisDaoPayloadSize( - "getEventExecution", value.length(), eventHandlerName, "n/a"); - EventExecution eventExecution = objectMapper.readValue(value, EventExecution.class); - executions.add(eventExecution); - } - return executions; - - } catch (Exception e) { - throw new ApplicationException( - Code.BACKEND_ERROR, - "Unable to get event executions for " + eventHandlerName, - e); - } - } - - private void validate(TaskModel task) { - try { - Preconditions.checkNotNull(task, "task object cannot be null"); - Preconditions.checkNotNull(task.getTaskId(), "Task id cannot be null"); - Preconditions.checkNotNull( - task.getWorkflowInstanceId(), "Workflow instance id cannot be null"); - Preconditions.checkNotNull( - task.getReferenceTaskName(), "Task reference name cannot be null"); - } catch (NullPointerException npe) { - throw new ApplicationException(Code.INVALID_INPUT, npe.getMessage(), npe); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisMetadataDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisMetadataDAO.java deleted file mode 100644 index 81d671b48..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisMetadataDAO.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.exception.ApplicationException.Code; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; - -@Component -@Conditional(AnyRedisCondition.class) -public class RedisMetadataDAO extends BaseDynoDAO implements MetadataDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisMetadataDAO.class); - - // Keys Families - private static final String ALL_TASK_DEFS = "TASK_DEFS"; - private static final String WORKFLOW_DEF_NAMES = "WORKFLOW_DEF_NAMES"; - private static final String WORKFLOW_DEF = "WORKFLOW_DEF"; - private static final String LATEST = "latest"; - private static final String className = RedisMetadataDAO.class.getSimpleName(); - private Map taskDefCache = new HashMap<>(); - - public RedisMetadataDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - super(jedisProxy, objectMapper, conductorProperties, properties); - refreshTaskDefs(); - long cacheRefreshTime = properties.getTaskDefCacheRefreshInterval().getSeconds(); - Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay( - this::refreshTaskDefs, - cacheRefreshTime, - cacheRefreshTime, - TimeUnit.SECONDS); - } - - @Override - public void createTaskDef(TaskDef taskDef) { - insertOrUpdateTaskDef(taskDef); - } - - @Override - public String updateTaskDef(TaskDef taskDef) { - return insertOrUpdateTaskDef(taskDef); - } - - private String insertOrUpdateTaskDef(TaskDef taskDef) { - // Store all task def in under one key - String payload = toJson(taskDef); - jedisProxy.hset(nsKey(ALL_TASK_DEFS), taskDef.getName(), payload); - recordRedisDaoRequests("storeTaskDef"); - recordRedisDaoPayloadSize("storeTaskDef", payload.length(), taskDef.getName(), "n/a"); - refreshTaskDefs(); - return taskDef.getName(); - } - - private void refreshTaskDefs() { - try { - Map map = new HashMap<>(); - getAllTaskDefs().forEach(taskDef -> map.put(taskDef.getName(), taskDef)); - this.taskDefCache = map; - LOGGER.debug("Refreshed task defs " + this.taskDefCache.size()); - } catch (Exception e) { - Monitors.error(className, "refreshTaskDefs"); - LOGGER.error("refresh TaskDefs failed ", e); - } - } - - @Override - public TaskDef getTaskDef(String name) { - return Optional.ofNullable(taskDefCache.get(name)).orElseGet(() -> getTaskDefFromDB(name)); - } - - private TaskDef getTaskDefFromDB(String name) { - Preconditions.checkNotNull(name, "TaskDef name cannot be null"); - - TaskDef taskDef = null; - String taskDefJsonStr = jedisProxy.hget(nsKey(ALL_TASK_DEFS), name); - if (taskDefJsonStr != null) { - taskDef = readValue(taskDefJsonStr, TaskDef.class); - recordRedisDaoRequests("getTaskDef"); - recordRedisDaoPayloadSize( - "getTaskDef", taskDefJsonStr.length(), taskDef.getName(), "n/a"); - } - return taskDef; - } - - @Override - public List getAllTaskDefs() { - List allTaskDefs = new LinkedList<>(); - - recordRedisDaoRequests("getAllTaskDefs"); - Map taskDefs = jedisProxy.hgetAll(nsKey(ALL_TASK_DEFS)); - int size = 0; - if (taskDefs.size() > 0) { - for (String taskDefJsonStr : taskDefs.values()) { - if (taskDefJsonStr != null) { - allTaskDefs.add(readValue(taskDefJsonStr, TaskDef.class)); - size += taskDefJsonStr.length(); - } - } - recordRedisDaoPayloadSize("getAllTaskDefs", size, "n/a", "n/a"); - } - - return allTaskDefs; - } - - @Override - public void removeTaskDef(String name) { - Preconditions.checkNotNull(name, "TaskDef name cannot be null"); - Long result = jedisProxy.hdel(nsKey(ALL_TASK_DEFS), name); - if (!result.equals(1L)) { - throw new ApplicationException( - Code.NOT_FOUND, "Cannot remove the task - no such task definition"); - } - recordRedisDaoRequests("removeTaskDef"); - refreshTaskDefs(); - } - - @Override - public void createWorkflowDef(WorkflowDef def) { - if (jedisProxy.hexists( - nsKey(WORKFLOW_DEF, def.getName()), String.valueOf(def.getVersion()))) { - throw new ApplicationException( - Code.CONFLICT, "Workflow with " + def.key() + " already exists!"); - } - _createOrUpdate(def); - } - - @Override - public void updateWorkflowDef(WorkflowDef def) { - _createOrUpdate(def); - } - - @Override - /* - * @param name Name of the workflow definition - * @return Latest version of workflow definition - * @see WorkflowDef - */ - public Optional getLatestWorkflowDef(String name) { - Preconditions.checkNotNull(name, "WorkflowDef name cannot be null"); - WorkflowDef workflowDef = null; - - Optional optionalMaxVersion = getWorkflowMaxVersion(name); - - if (optionalMaxVersion.isPresent()) { - String latestdata = - jedisProxy.hget(nsKey(WORKFLOW_DEF, name), optionalMaxVersion.get().toString()); - if (latestdata != null) { - workflowDef = readValue(latestdata, WorkflowDef.class); - } - } - - return Optional.ofNullable(workflowDef); - } - - private Optional getWorkflowMaxVersion(String workflowName) { - return jedisProxy.hkeys(nsKey(WORKFLOW_DEF, workflowName)).stream() - .filter(key -> !key.equals(LATEST)) - .map(Integer::valueOf) - .max(Comparator.naturalOrder()); - } - - public List getAllVersions(String name) { - Preconditions.checkNotNull(name, "WorkflowDef name cannot be null"); - List workflows = new LinkedList<>(); - - recordRedisDaoRequests("getAllWorkflowDefsByName"); - Map workflowDefs = jedisProxy.hgetAll(nsKey(WORKFLOW_DEF, name)); - int size = 0; - for (String key : workflowDefs.keySet()) { - if (key.equals(LATEST)) { - continue; - } - String workflowDef = workflowDefs.get(key); - workflows.add(readValue(workflowDef, WorkflowDef.class)); - size += workflowDef.length(); - } - recordRedisDaoPayloadSize("getAllWorkflowDefsByName", size, "n/a", name); - - return workflows; - } - - @Override - public Optional getWorkflowDef(String name, int version) { - Preconditions.checkNotNull(name, "WorkflowDef name cannot be null"); - WorkflowDef def = null; - - recordRedisDaoRequests("getWorkflowDef"); - String workflowDefJsonString = - jedisProxy.hget(nsKey(WORKFLOW_DEF, name), String.valueOf(version)); - if (workflowDefJsonString != null) { - def = readValue(workflowDefJsonString, WorkflowDef.class); - recordRedisDaoPayloadSize( - "getWorkflowDef", workflowDefJsonString.length(), "n/a", name); - } - return Optional.ofNullable(def); - } - - @Override - public void removeWorkflowDef(String name, Integer version) { - Preconditions.checkArgument( - StringUtils.isNotBlank(name), "WorkflowDef name cannot be null"); - Preconditions.checkNotNull(version, "Input version cannot be null"); - Long result = jedisProxy.hdel(nsKey(WORKFLOW_DEF, name), String.valueOf(version)); - if (!result.equals(1L)) { - throw new ApplicationException( - Code.NOT_FOUND, - String.format( - "Cannot remove the workflow - no such workflow" - + " definition: %s version: %d", - name, version)); - } - - // check if there are any more versions remaining if not delete the - // workflow name - Optional optionMaxVersion = getWorkflowMaxVersion(name); - - // delete workflow name - if (!optionMaxVersion.isPresent()) { - jedisProxy.srem(nsKey(WORKFLOW_DEF_NAMES), name); - } - - recordRedisDaoRequests("removeWorkflowDef"); - } - - public List findAll() { - Set wfNames = jedisProxy.smembers(nsKey(WORKFLOW_DEF_NAMES)); - return new ArrayList<>(wfNames); - } - - @Override - public List getAllWorkflowDefs() { - List workflows = new LinkedList<>(); - - // Get all from WORKFLOW_DEF_NAMES - recordRedisDaoRequests("getAllWorkflowDefs"); - Set wfNames = jedisProxy.smembers(nsKey(WORKFLOW_DEF_NAMES)); - int size = 0; - for (String wfName : wfNames) { - Map workflowDefs = jedisProxy.hgetAll(nsKey(WORKFLOW_DEF, wfName)); - for (String key : workflowDefs.keySet()) { - if (key.equals(LATEST)) { - continue; - } - String workflowDef = workflowDefs.get(key); - workflows.add(readValue(workflowDef, WorkflowDef.class)); - size += workflowDef.length(); - } - } - recordRedisDaoPayloadSize("getAllWorkflowDefs", size, "n/a", "n/a"); - return workflows; - } - - private void _createOrUpdate(WorkflowDef workflowDef) { - // First set the workflow def - jedisProxy.hset( - nsKey(WORKFLOW_DEF, workflowDef.getName()), - String.valueOf(workflowDef.getVersion()), - toJson(workflowDef)); - - jedisProxy.sadd(nsKey(WORKFLOW_DEF_NAMES), workflowDef.getName()); - recordRedisDaoRequests("storeWorkflowDef", "n/a", workflowDef.getName()); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisPollDataDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisPollDataDAO.java deleted file mode 100644 index 717e582a7..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisPollDataDAO.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import org.apache.commons.lang3.StringUtils; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.PollDataDAO; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; - -@Component -@Conditional(AnyRedisCondition.class) -public class RedisPollDataDAO extends BaseDynoDAO implements PollDataDAO { - - private static final String POLL_DATA = "POLL_DATA"; - - public RedisPollDataDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - super(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Override - public void updateLastPollData(String taskDefName, String domain, String workerId) { - Preconditions.checkNotNull(taskDefName, "taskDefName name cannot be null"); - PollData pollData = new PollData(taskDefName, domain, workerId, System.currentTimeMillis()); - - String key = nsKey(POLL_DATA, pollData.getQueueName()); - String field = (domain == null) ? "DEFAULT" : domain; - - String payload = toJson(pollData); - recordRedisDaoRequests("updatePollData"); - recordRedisDaoPayloadSize("updatePollData", payload.length(), "n/a", "n/a"); - jedisProxy.hset(key, field, payload); - } - - @Override - public PollData getPollData(String taskDefName, String domain) { - Preconditions.checkNotNull(taskDefName, "taskDefName name cannot be null"); - - String key = nsKey(POLL_DATA, taskDefName); - String field = (domain == null) ? "DEFAULT" : domain; - - String pollDataJsonString = jedisProxy.hget(key, field); - recordRedisDaoRequests("getPollData"); - recordRedisDaoPayloadSize( - "getPollData", StringUtils.length(pollDataJsonString), "n/a", "n/a"); - - PollData pollData = null; - if (StringUtils.isNotBlank(pollDataJsonString)) { - pollData = readValue(pollDataJsonString, PollData.class); - } - return pollData; - } - - @Override - public List getPollData(String taskDefName) { - Preconditions.checkNotNull(taskDefName, "taskDefName name cannot be null"); - - String key = nsKey(POLL_DATA, taskDefName); - - Map pMapdata = jedisProxy.hgetAll(key); - List pollData = new ArrayList<>(); - if (pMapdata != null) { - pMapdata.values() - .forEach( - pollDataJsonString -> { - pollData.add(readValue(pollDataJsonString, PollData.class)); - recordRedisDaoRequests("getPollData"); - recordRedisDaoPayloadSize( - "getPollData", pollDataJsonString.length(), "n/a", "n/a"); - }); - } - return pollData; - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisRateLimitingDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisRateLimitingDAO.java deleted file mode 100644 index 9535dc4cc..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dao/RedisRateLimitingDAO.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.Optional; - -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.RateLimitingDAO; -import com.netflix.conductor.metrics.Monitors; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.redis.config.AnyRedisCondition; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; - -@Component -@Conditional(AnyRedisCondition.class) -public class RedisRateLimitingDAO extends BaseDynoDAO implements RateLimitingDAO { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedisRateLimitingDAO.class); - - private static final String TASK_RATE_LIMIT_BUCKET = "TASK_RATE_LIMIT_BUCKET"; - - public RedisRateLimitingDAO( - JedisProxy jedisProxy, - ObjectMapper objectMapper, - ConductorProperties conductorProperties, - RedisProperties properties) { - super(jedisProxy, objectMapper, conductorProperties, properties); - } - - /** - * This method evaluates if the {@link TaskDef} is rate limited or not based on {@link - * TaskModel#getRateLimitPerFrequency()} and {@link TaskModel#getRateLimitFrequencyInSeconds()} - * if not checks the {@link TaskModel} is rate limited or not based on {@link - * TaskModel#getRateLimitPerFrequency()} and {@link TaskModel#getRateLimitFrequencyInSeconds()} - * - *

    The rate limiting is implemented using the Redis constructs of sorted set and TTL of each - * element in the rate limited bucket. - * - *

      - *
    • All the entries that are in the not in the frequency bucket are cleaned up by - * leveraging {@link JedisProxy#zremrangeByScore(String, String, String)}, this is done to - * make the next step of evaluation efficient - *
    • A current count(tasks executed within the frequency) is calculated based on the current - * time and the beginning of the rate limit frequency time(which is current time - {@link - * TaskModel#getRateLimitFrequencyInSeconds()} in millis), this is achieved by using - * {@link JedisProxy#zcount(String, double, double)} - *
    • Once the count is calculated then a evaluation is made to determine if it is within the - * bounds of {@link TaskModel#getRateLimitPerFrequency()}, if so the count is increased - * and an expiry TTL is added to the entry - *
    - * - * @param task: which needs to be evaluated whether it is rateLimited or not - * @return true: If the {@link TaskModel} is rateLimited false: If the {@link TaskModel} is not - * rateLimited - */ - @Override - public boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef) { - // Check if the TaskDefinition is not null then pick the definition values or else pick from - // the Task - ImmutablePair rateLimitPair = - Optional.ofNullable(taskDef) - .map( - definition -> - new ImmutablePair<>( - definition.getRateLimitPerFrequency(), - definition.getRateLimitFrequencyInSeconds())) - .orElse( - new ImmutablePair<>( - task.getRateLimitPerFrequency(), - task.getRateLimitFrequencyInSeconds())); - - int rateLimitPerFrequency = rateLimitPair.getLeft(); - int rateLimitFrequencyInSeconds = rateLimitPair.getRight(); - if (rateLimitPerFrequency <= 0 || rateLimitFrequencyInSeconds <= 0) { - LOGGER.debug( - "Rate limit not applied to the Task: {} either rateLimitPerFrequency: {} or rateLimitFrequencyInSeconds: {} is 0 or less", - task, - rateLimitPerFrequency, - rateLimitFrequencyInSeconds); - return false; - } else { - LOGGER.debug( - "Evaluating rate limiting for TaskId: {} with TaskDefinition of: {} with rateLimitPerFrequency: {} and rateLimitFrequencyInSeconds: {}", - task.getTaskId(), - task.getTaskDefName(), - rateLimitPerFrequency, - rateLimitFrequencyInSeconds); - long currentTimeEpochMillis = System.currentTimeMillis(); - long currentTimeEpochMinusRateLimitBucket = - currentTimeEpochMillis - (rateLimitFrequencyInSeconds * 1000L); - String key = nsKey(TASK_RATE_LIMIT_BUCKET, task.getTaskDefName()); - jedisProxy.zremrangeByScore( - key, "-inf", String.valueOf(currentTimeEpochMinusRateLimitBucket)); - int currentBucketCount = - Math.toIntExact( - jedisProxy.zcount( - key, - currentTimeEpochMinusRateLimitBucket, - currentTimeEpochMillis)); - if (currentBucketCount < rateLimitPerFrequency) { - jedisProxy.zadd( - key, currentTimeEpochMillis, String.valueOf(currentTimeEpochMillis)); - jedisProxy.expire(key, rateLimitFrequencyInSeconds); - LOGGER.info( - "TaskId: {} with TaskDefinition of: {} has rateLimitPerFrequency: {} and rateLimitFrequencyInSeconds: {} within the rate limit with current count {}", - task.getTaskId(), - task.getTaskDefName(), - rateLimitPerFrequency, - rateLimitFrequencyInSeconds, - ++currentBucketCount); - Monitors.recordTaskRateLimited(task.getTaskDefName(), rateLimitPerFrequency); - return false; - } else { - LOGGER.info( - "TaskId: {} with TaskDefinition of: {} has rateLimitPerFrequency: {} and rateLimitFrequencyInSeconds: {} is out of bounds of rate limit with current count {}", - task.getTaskId(), - task.getTaskDefName(), - rateLimitPerFrequency, - rateLimitFrequencyInSeconds, - currentBucketCount); - return true; - } - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/ConfigurationHostSupplier.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/ConfigurationHostSupplier.java deleted file mode 100644 index eb83b97c0..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/ConfigurationHostSupplier.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dynoqueue; - -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostBuilder; -import com.netflix.dyno.connectionpool.HostSupplier; - -public class ConfigurationHostSupplier implements HostSupplier { - - private static final Logger log = LoggerFactory.getLogger(ConfigurationHostSupplier.class); - - private final RedisProperties properties; - - public ConfigurationHostSupplier(RedisProperties properties) { - this.properties = properties; - } - - @Override - public List getHosts() { - return parseHostsFromConfig(); - } - - private List parseHostsFromConfig() { - String hosts = properties.getHosts(); - if (hosts == null) { - // FIXME This type of validation probably doesn't belong here. - String message = - "Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."; - log.error(message); - throw new RuntimeException(message); - } - return parseHostsFrom(hosts); - } - - private List parseHostsFrom(String hostConfig) { - List hostConfigs = Arrays.asList(hostConfig.split(";")); - - return hostConfigs.stream() - .map( - hc -> { - String[] hostConfigValues = hc.split(":"); - String host = hostConfigValues[0]; - int port = Integer.parseInt(hostConfigValues[1]); - String rack = hostConfigValues[2]; - - if (hostConfigValues.length >= 4) { - String password = hostConfigValues[3]; - return new HostBuilder() - .setHostname(host) - .setPort(port) - .setRack(rack) - .setStatus(Host.Status.Up) - .setPassword(password) - .createHost(); - } - return new HostBuilder() - .setHostname(host) - .setPort(port) - .setRack(rack) - .setStatus(Host.Status.Up) - .createHost(); - }) - .collect(Collectors.toList()); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/LocalhostHostSupplier.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/LocalhostHostSupplier.java deleted file mode 100644 index 7f1823af8..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/LocalhostHostSupplier.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dynoqueue; - -import java.util.List; - -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.HostBuilder; -import com.netflix.dyno.connectionpool.HostSupplier; - -import com.google.common.collect.Lists; - -public class LocalhostHostSupplier implements HostSupplier { - - private final RedisProperties properties; - - public LocalhostHostSupplier(RedisProperties properties) { - this.properties = properties; - } - - @Override - public List getHosts() { - Host dynoHost = - new HostBuilder() - .setHostname("localhost") - .setIpAddress("0") - .setRack(properties.getAvailabilityZone()) - .setStatus(Host.Status.Up) - .createHost(); - return Lists.newArrayList(dynoHost); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/RedisQueuesShardingStrategyProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/RedisQueuesShardingStrategyProvider.java deleted file mode 100644 index 2ba4528bb..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/dynoqueue/RedisQueuesShardingStrategyProvider.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dynoqueue; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.dyno.queues.Message; -import com.netflix.dyno.queues.ShardSupplier; -import com.netflix.dyno.queues.redis.sharding.RoundRobinStrategy; -import com.netflix.dyno.queues.redis.sharding.ShardingStrategy; - -public class RedisQueuesShardingStrategyProvider { - - public static final String LOCAL_ONLY_STRATEGY = "localOnly"; - public static final String ROUND_ROBIN_STRATEGY = "roundRobin"; - - private static final Logger LOGGER = - LoggerFactory.getLogger(RedisQueuesShardingStrategyProvider.class); - private final ShardSupplier shardSupplier; - private final RedisProperties properties; - - public RedisQueuesShardingStrategyProvider( - ShardSupplier shardSupplier, RedisProperties properties) { - this.shardSupplier = shardSupplier; - this.properties = properties; - } - - public ShardingStrategy get() { - String shardingStrat = properties.getQueueShardingStrategy(); - if (shardingStrat.equals(LOCAL_ONLY_STRATEGY)) { - LOGGER.info( - "Using {} sharding strategy for queues", - LocalOnlyStrategy.class.getSimpleName()); - return new LocalOnlyStrategy(shardSupplier); - } else { - LOGGER.info( - "Using {} sharding strategy for queues", - RoundRobinStrategy.class.getSimpleName()); - return new RoundRobinStrategy(); - } - } - - public static final class LocalOnlyStrategy implements ShardingStrategy { - - private static final Logger LOGGER = LoggerFactory.getLogger(LocalOnlyStrategy.class); - - private final ShardSupplier shardSupplier; - - public LocalOnlyStrategy(ShardSupplier shardSupplier) { - this.shardSupplier = shardSupplier; - } - - @Override - public String getNextShard(List allShards, Message message) { - LOGGER.debug( - "Always using {} shard out of {}", shardSupplier.getCurrentShard(), allShards); - return shardSupplier.getCurrentShard(); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java deleted file mode 100644 index b757f88c7..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java +++ /dev/null @@ -1,953 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.AbstractMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.stream.Collectors; - -import redis.clients.jedis.BitPosParams; -import redis.clients.jedis.GeoCoordinate; -import redis.clients.jedis.GeoRadiusResponse; -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.StreamConsumersInfo; -import redis.clients.jedis.StreamEntry; -import redis.clients.jedis.StreamEntryID; -import redis.clients.jedis.StreamGroupInfo; -import redis.clients.jedis.StreamInfo; -import redis.clients.jedis.StreamPendingEntry; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.commands.JedisCommands; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; - -public class JedisCluster implements JedisCommands { - - private final redis.clients.jedis.JedisCluster jedisCluster; - - public JedisCluster(redis.clients.jedis.JedisCluster jedisCluster) { - this.jedisCluster = jedisCluster; - } - - @Override - public String set(String key, String value) { - return jedisCluster.set(key, value); - } - - @Override - public String set(String key, String value, SetParams params) { - return jedisCluster.set(key, value, params); - } - - @Override - public String get(String key) { - return jedisCluster.get(key); - } - - @Override - public Boolean exists(String key) { - return jedisCluster.exists(key); - } - - @Override - public Long persist(String key) { - return jedisCluster.persist(key); - } - - @Override - public String type(String key) { - return jedisCluster.type(key); - } - - @Override - public byte[] dump(String key) { - return jedisCluster.dump(key); - } - - @Override - public String restore(String key, int ttl, byte[] serializedValue) { - return jedisCluster.restore(key, ttl, serializedValue); - } - - @Override - public String restoreReplace(String key, int ttl, byte[] serializedValue) { - throw new UnsupportedOperationException(); - } - - @Override - public Long expire(String key, int seconds) { - return jedisCluster.expire(key, seconds); - } - - @Override - public Long pexpire(String key, long milliseconds) { - return jedisCluster.pexpire(key, milliseconds); - } - - @Override - public Long expireAt(String key, long unixTime) { - return jedisCluster.expireAt(key, unixTime); - } - - @Override - public Long pexpireAt(String key, long millisecondsTimestamp) { - return jedisCluster.pexpireAt(key, millisecondsTimestamp); - } - - @Override - public Long ttl(String key) { - return jedisCluster.ttl(key); - } - - @Override - public Long pttl(String key) { - return jedisCluster.pttl(key); - } - - @Override - public Long touch(String key) { - return jedisCluster.touch(key); - } - - @Override - public Boolean setbit(String key, long offset, boolean value) { - return jedisCluster.setbit(key, offset, value); - } - - @Override - public Boolean setbit(String key, long offset, String value) { - return jedisCluster.setbit(key, offset, value); - } - - @Override - public Boolean getbit(String key, long offset) { - return jedisCluster.getbit(key, offset); - } - - @Override - public Long setrange(String key, long offset, String value) { - return jedisCluster.setrange(key, offset, value); - } - - @Override - public String getrange(String key, long startOffset, long endOffset) { - return jedisCluster.getrange(key, startOffset, endOffset); - } - - @Override - public String getSet(String key, String value) { - return jedisCluster.getSet(key, value); - } - - @Override - public Long setnx(String key, String value) { - return jedisCluster.setnx(key, value); - } - - @Override - public String setex(String key, int seconds, String value) { - return jedisCluster.setex(key, seconds, value); - } - - @Override - public String psetex(String key, long milliseconds, String value) { - return jedisCluster.psetex(key, milliseconds, value); - } - - @Override - public Long decrBy(String key, long integer) { - return jedisCluster.decrBy(key, integer); - } - - @Override - public Long decr(String key) { - return jedisCluster.decr(key); - } - - @Override - public Long incrBy(String key, long integer) { - return jedisCluster.incrBy(key, integer); - } - - @Override - public Double incrByFloat(String key, double value) { - return jedisCluster.incrByFloat(key, value); - } - - @Override - public Long incr(String key) { - return jedisCluster.incr(key); - } - - @Override - public Long append(String key, String value) { - return jedisCluster.append(key, value); - } - - @Override - public String substr(String key, int start, int end) { - return jedisCluster.substr(key, start, end); - } - - @Override - public Long hset(String key, String field, String value) { - return jedisCluster.hset(key, field, value); - } - - @Override - public Long hset(String key, Map hash) { - return jedisCluster.hset(key, hash); - } - - @Override - public String hget(String key, String field) { - return jedisCluster.hget(key, field); - } - - @Override - public Long hsetnx(String key, String field, String value) { - return jedisCluster.hsetnx(key, field, value); - } - - @Override - public String hmset(String key, Map hash) { - return jedisCluster.hmset(key, hash); - } - - @Override - public List hmget(String key, String... fields) { - return jedisCluster.hmget(key, fields); - } - - @Override - public Long hincrBy(String key, String field, long value) { - return jedisCluster.hincrBy(key, field, value); - } - - @Override - public Double hincrByFloat(String key, String field, double value) { - return jedisCluster.hincrByFloat(key.getBytes(), field.getBytes(), value); - } - - @Override - public Boolean hexists(String key, String field) { - return jedisCluster.hexists(key, field); - } - - @Override - public Long hdel(String key, String... field) { - return jedisCluster.hdel(key, field); - } - - @Override - public Long hlen(String key) { - return jedisCluster.hlen(key); - } - - @Override - public Set hkeys(String key) { - return jedisCluster.hkeys(key); - } - - @Override - public List hvals(String key) { - return jedisCluster.hvals(key); - } - - @Override - public Map hgetAll(String key) { - return jedisCluster.hgetAll(key); - } - - @Override - public Long rpush(String key, String... string) { - return jedisCluster.rpush(key, string); - } - - @Override - public Long lpush(String key, String... string) { - return jedisCluster.lpush(key, string); - } - - @Override - public Long llen(String key) { - return jedisCluster.llen(key); - } - - @Override - public List lrange(String key, long start, long end) { - return jedisCluster.lrange(key, start, end); - } - - @Override - public String ltrim(String key, long start, long end) { - return jedisCluster.ltrim(key, start, end); - } - - @Override - public String lindex(String key, long index) { - return jedisCluster.lindex(key, index); - } - - @Override - public String lset(String key, long index, String value) { - return jedisCluster.lset(key, index, value); - } - - @Override - public Long lrem(String key, long count, String value) { - return jedisCluster.lrem(key, count, value); - } - - @Override - public String lpop(String key) { - return jedisCluster.lpop(key); - } - - @Override - public String rpop(String key) { - return jedisCluster.rpop(key); - } - - @Override - public Long sadd(String key, String... member) { - return jedisCluster.sadd(key, member); - } - - @Override - public Set smembers(String key) { - return jedisCluster.smembers(key); - } - - @Override - public Long srem(String key, String... member) { - return jedisCluster.srem(key, member); - } - - @Override - public String spop(String key) { - return jedisCluster.spop(key); - } - - @Override - public Set spop(String key, long count) { - return jedisCluster.spop(key, count); - } - - @Override - public Long scard(String key) { - return jedisCluster.scard(key); - } - - @Override - public Boolean sismember(String key, String member) { - return jedisCluster.sismember(key, member); - } - - @Override - public String srandmember(String key) { - return jedisCluster.srandmember(key); - } - - @Override - public List srandmember(String key, int count) { - return jedisCluster.srandmember(key, count); - } - - @Override - public Long strlen(String key) { - return jedisCluster.strlen(key); - } - - @Override - public Long zadd(String key, double score, String member) { - return jedisCluster.zadd(key, score, member); - } - - @Override - public Long zadd(String key, double score, String member, ZAddParams params) { - return jedisCluster.zadd(key, score, member, params); - } - - @Override - public Long zadd(String key, Map scoreMembers) { - return jedisCluster.zadd(key, scoreMembers); - } - - @Override - public Long zadd(String key, Map scoreMembers, ZAddParams params) { - return jedisCluster.zadd(key, scoreMembers, params); - } - - @Override - public Set zrange(String key, long start, long end) { - return jedisCluster.zrange(key, start, end); - } - - @Override - public Long zrem(String key, String... member) { - return jedisCluster.zrem(key, member); - } - - @Override - public Double zincrby(String key, double score, String member) { - return jedisCluster.zincrby(key, score, member); - } - - @Override - public Double zincrby(String key, double score, String member, ZIncrByParams params) { - return jedisCluster.zincrby(key, score, member, params); - } - - @Override - public Long zrank(String key, String member) { - return jedisCluster.zrank(key, member); - } - - @Override - public Long zrevrank(String key, String member) { - return jedisCluster.zrevrank(key, member); - } - - @Override - public Set zrevrange(String key, long start, long end) { - return jedisCluster.zrevrange(key, start, end); - } - - @Override - public Set zrangeWithScores(String key, long start, long end) { - return jedisCluster.zrangeWithScores(key, start, end); - } - - @Override - public Set zrevrangeWithScores(String key, long start, long end) { - return jedisCluster.zrevrangeWithScores(key, start, end); - } - - @Override - public Long zcard(String key) { - return jedisCluster.zcard(key); - } - - @Override - public Double zscore(String key, String member) { - return jedisCluster.zscore(key, member); - } - - @Override - public Tuple zpopmax(String key) { - return jedisCluster.zpopmax(key); - } - - @Override - public Set zpopmax(String key, int count) { - return jedisCluster.zpopmax(key, count); - } - - @Override - public Tuple zpopmin(String key) { - return jedisCluster.zpopmin(key); - } - - @Override - public Set zpopmin(String key, int count) { - return jedisCluster.zpopmin(key, count); - } - - @Override - public List sort(String key) { - return jedisCluster.sort(key); - } - - @Override - public List sort(String key, SortingParams sortingParameters) { - return jedisCluster.sort(key, sortingParameters); - } - - @Override - public Long zcount(String key, double min, double max) { - return jedisCluster.zcount(key, min, max); - } - - @Override - public Long zcount(String key, String min, String max) { - return jedisCluster.zcount(key, min, max); - } - - @Override - public Set zrangeByScore(String key, double min, double max) { - return jedisCluster.zrangeByScore(key, min, max); - } - - @Override - public Set zrangeByScore(String key, String min, String max) { - return jedisCluster.zrangeByScore(key, min, max); - } - - @Override - public Set zrevrangeByScore(String key, double max, double min) { - return jedisCluster.zrevrangeByScore(key, max, min); - } - - @Override - public Set zrangeByScore(String key, double min, double max, int offset, int count) { - return jedisCluster.zrangeByScore(key, min, max, offset, count); - } - - @Override - public Set zrevrangeByScore(String key, String max, String min) { - return jedisCluster.zrevrangeByScore(key, max, min); - } - - @Override - public Set zrangeByScore(String key, String min, String max, int offset, int count) { - return jedisCluster.zrangeByScore(key, min, max, offset, count); - } - - @Override - public Set zrevrangeByScore(String key, double max, double min, int offset, int count) { - return jedisCluster.zrevrangeByScore(key, max, min, offset, count); - } - - @Override - public Set zrangeByScoreWithScores(String key, double min, double max) { - return jedisCluster.zrangeByScoreWithScores(key, min, max); - } - - @Override - public Set zrevrangeByScoreWithScores(String key, double max, double min) { - return jedisCluster.zrevrangeByScoreWithScores(key, max, min); - } - - @Override - public Set zrangeByScoreWithScores( - String key, double min, double max, int offset, int count) { - return jedisCluster.zrangeByScoreWithScores(key, min, max, offset, count); - } - - @Override - public Set zrevrangeByScore(String key, String max, String min, int offset, int count) { - return jedisCluster.zrevrangeByScore(key, max, min, offset, count); - } - - @Override - public Set zrangeByScoreWithScores(String key, String min, String max) { - return jedisCluster.zrangeByScoreWithScores(key, min, max); - } - - @Override - public Set zrevrangeByScoreWithScores(String key, String max, String min) { - return jedisCluster.zrevrangeByScoreWithScores(key, max, min); - } - - @Override - public Set zrangeByScoreWithScores( - String key, String min, String max, int offset, int count) { - return jedisCluster.zrangeByScoreWithScores(key, min, max, offset, count); - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, double max, double min, int offset, int count) { - return jedisCluster.zrevrangeByScoreWithScores(key, max, min, offset, count); - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, String max, String min, int offset, int count) { - return jedisCluster.zrevrangeByScoreWithScores(key, max, min, offset, count); - } - - @Override - public Long zremrangeByRank(String key, long start, long end) { - return jedisCluster.zremrangeByRank(key, start, end); - } - - @Override - public Long zremrangeByScore(String key, double start, double end) { - return jedisCluster.zremrangeByScore(key, start, end); - } - - @Override - public Long zremrangeByScore(String key, String start, String end) { - return jedisCluster.zremrangeByScore(key, start, end); - } - - @Override - public Long zlexcount(String key, String min, String max) { - return jedisCluster.zlexcount(key, min, max); - } - - @Override - public Set zrangeByLex(String key, String min, String max) { - return jedisCluster.zrangeByLex(key, min, max); - } - - @Override - public Set zrangeByLex(String key, String min, String max, int offset, int count) { - return jedisCluster.zrangeByLex(key, min, max, offset, count); - } - - @Override - public Set zrevrangeByLex(String key, String max, String min) { - return jedisCluster.zrevrangeByLex(key, max, min); - } - - @Override - public Set zrevrangeByLex(String key, String max, String min, int offset, int count) { - return jedisCluster.zrevrangeByLex(key, max, min, offset, count); - } - - @Override - public Long zremrangeByLex(String key, String min, String max) { - return jedisCluster.zremrangeByLex(key, min, max); - } - - @Override - public Long linsert(String key, ListPosition where, String pivot, String value) { - return jedisCluster.linsert(key, where, pivot, value); - } - - @Override - public Long lpushx(String key, String... string) { - return jedisCluster.lpushx(key, string); - } - - @Override - public Long rpushx(String key, String... string) { - return jedisCluster.rpushx(key, string); - } - - @Override - public List blpop(int timeout, String key) { - return jedisCluster.blpop(timeout, key); - } - - @Override - public List brpop(int timeout, String key) { - return jedisCluster.brpop(timeout, key); - } - - @Override - public Long del(String key) { - return jedisCluster.del(key); - } - - @Override - public Long unlink(String key) { - return jedisCluster.unlink(key); - } - - @Override - public String echo(String string) { - return jedisCluster.echo(string); - } - - @Override - public Long move(String key, int dbIndex) { - throw new UnsupportedOperationException(); - } - - @Override - public Long bitcount(String key) { - return jedisCluster.bitcount(key); - } - - @Override - public Long bitcount(String key, long start, long end) { - return jedisCluster.bitcount(key, start, end); - } - - @Override - public Long bitpos(String key, boolean value) { - throw new UnsupportedOperationException(); - } - - @Override - public Long bitpos(String key, boolean value, BitPosParams params) { - throw new UnsupportedOperationException(); - } - - @Override - public ScanResult> hscan(String key, String cursor) { - return jedisCluster.hscan(key, cursor); - } - - @Override - public ScanResult> hscan( - String key, String cursor, ScanParams params) { - ScanResult> scanResult = - jedisCluster.hscan(key.getBytes(), cursor.getBytes(), params); - List> results = - scanResult.getResult().stream() - .map( - entry -> - new AbstractMap.SimpleEntry<>( - new String(entry.getKey()), - new String(entry.getValue()))) - .collect(Collectors.toList()); - return new ScanResult<>(scanResult.getCursorAsBytes(), results); - } - - @Override - public ScanResult sscan(String key, String cursor) { - return jedisCluster.sscan(key, cursor); - } - - @Override - public ScanResult sscan(String key, String cursor, ScanParams params) { - ScanResult scanResult = - jedisCluster.sscan(key.getBytes(), cursor.getBytes(), params); - List results = - scanResult.getResult().stream().map(String::new).collect(Collectors.toList()); - return new ScanResult<>(scanResult.getCursorAsBytes(), results); - } - - @Override - public ScanResult zscan(String key, String cursor) { - return jedisCluster.zscan(key, cursor); - } - - @Override - public ScanResult zscan(String key, String cursor, ScanParams params) { - return jedisCluster.zscan(key.getBytes(), cursor.getBytes(), params); - } - - @Override - public Long pfadd(String key, String... elements) { - return jedisCluster.pfadd(key, elements); - } - - @Override - public long pfcount(String key) { - return jedisCluster.pfcount(key); - } - - @Override - public Long geoadd(String key, double longitude, double latitude, String member) { - return jedisCluster.geoadd(key, longitude, latitude, member); - } - - @Override - public Long geoadd(String key, Map memberCoordinateMap) { - return jedisCluster.geoadd(key, memberCoordinateMap); - } - - @Override - public Double geodist(String key, String member1, String member2) { - return jedisCluster.geodist(key, member1, member2); - } - - @Override - public Double geodist(String key, String member1, String member2, GeoUnit unit) { - return jedisCluster.geodist(key, member1, member2, unit); - } - - @Override - public List geohash(String key, String... members) { - return jedisCluster.geohash(key, members); - } - - @Override - public List geopos(String key, String... members) { - return jedisCluster.geopos(key, members); - } - - @Override - public List georadius( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - return jedisCluster.georadius(key, longitude, latitude, radius, unit); - } - - @Override - public List georadiusReadonly( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - return jedisCluster.georadiusReadonly(key, longitude, latitude, radius, unit); - } - - @Override - public List georadius( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - return jedisCluster.georadius(key, longitude, latitude, radius, unit, param); - } - - @Override - public List georadiusReadonly( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - return jedisCluster.georadiusReadonly(key, longitude, latitude, radius, unit, param); - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit) { - return jedisCluster.georadiusByMember(key, member, radius, unit); - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit) { - return jedisCluster.georadiusByMemberReadonly(key, member, radius, unit); - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - return jedisCluster.georadiusByMember(key, member, radius, unit, param); - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - return jedisCluster.georadiusByMemberReadonly(key, member, radius, unit, param); - } - - @Override - public List bitfield(String key, String... arguments) { - return jedisCluster.bitfield(key, arguments); - } - - @Override - public List bitfieldReadonly(String key, String... arguments) { - return jedisCluster.bitfieldReadonly(key, arguments); - } - - @Override - public Long hstrlen(String key, String field) { - return jedisCluster.hstrlen(key, field); - } - - @Override - public StreamEntryID xadd(String key, StreamEntryID id, Map hash) { - return jedisCluster.xadd(key, id, hash); - } - - @Override - public StreamEntryID xadd( - String key, - StreamEntryID id, - Map hash, - long maxLen, - boolean approximateLength) { - return jedisCluster.xadd(key, id, hash, maxLen, approximateLength); - } - - @Override - public Long xlen(String key) { - return jedisCluster.xlen(key); - } - - @Override - public List xrange(String key, StreamEntryID start, StreamEntryID end, int count) { - return jedisCluster.xrange(key, start, end, count); - } - - @Override - public List xrevrange( - String key, StreamEntryID end, StreamEntryID start, int count) { - return jedisCluster.xrevrange(key, end, start, count); - } - - @Override - public long xack(String key, String group, StreamEntryID... ids) { - return jedisCluster.xack(key, group, ids); - } - - @Override - public String xgroupCreate(String key, String groupname, StreamEntryID id, boolean makeStream) { - return jedisCluster.xgroupCreate(key, groupname, id, makeStream); - } - - @Override - public String xgroupSetID(String key, String groupname, StreamEntryID id) { - return jedisCluster.xgroupSetID(key, groupname, id); - } - - @Override - public long xgroupDestroy(String key, String groupname) { - return jedisCluster.xgroupDestroy(key, groupname); - } - - @Override - public Long xgroupDelConsumer(String key, String groupname, String consumername) { - return jedisCluster.xgroupDelConsumer(key, groupname, consumername); - } - - @Override - public List xpending( - String key, - String groupname, - StreamEntryID start, - StreamEntryID end, - int count, - String consumername) { - return jedisCluster.xpending(key, groupname, start, end, count, consumername); - } - - @Override - public long xdel(String key, StreamEntryID... ids) { - return jedisCluster.xdel(key, ids); - } - - @Override - public long xtrim(String key, long maxLen, boolean approximate) { - return jedisCluster.xtrim(key, maxLen, approximate); - } - - @Override - public List xclaim( - String key, - String group, - String consumername, - long minIdleTime, - long newIdleTime, - int retries, - boolean force, - StreamEntryID... ids) { - return jedisCluster.xclaim( - key, group, consumername, minIdleTime, newIdleTime, retries, force, ids); - } - - @Override - public StreamInfo xinfoStream(String key) { - return null; - } - - @Override - public List xinfoGroup(String key) { - return null; - } - - @Override - public List xinfoConsumers(String key, String group) { - return null; - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisMock.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisMock.java deleted file mode 100644 index 169146be3..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisMock.java +++ /dev/null @@ -1,1178 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import org.rarefiedredis.redis.IRedisClient; -import org.rarefiedredis.redis.IRedisSortedSet.ZsetPair; -import org.rarefiedredis.redis.RedisMock; - -import redis.clients.jedis.Jedis; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.exceptions.JedisException; -import redis.clients.jedis.params.ZAddParams; - -public class JedisMock extends Jedis { - - private final IRedisClient redis; - - public JedisMock() { - super(""); - this.redis = new RedisMock(); - } - - private Set toTupleSet(Set pairs) { - Set set = new HashSet<>(); - for (ZsetPair pair : pairs) { - set.add(new Tuple(pair.member, pair.score)); - } - return set; - } - - @Override - public String set(final String key, String value) { - try { - return redis.set(key, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String get(final String key) { - try { - return redis.get(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Boolean exists(final String key) { - try { - return redis.exists(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long del(final String... keys) { - try { - return redis.del(keys); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long del(String key) { - try { - return redis.del(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String type(final String key) { - try { - return redis.type(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long expire(final String key, final int seconds) { - try { - return redis.expire(key, seconds) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long expireAt(final String key, final long unixTime) { - try { - return redis.expireat(key, unixTime) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long ttl(final String key) { - try { - return redis.ttl(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long move(final String key, final int dbIndex) { - try { - return redis.move(key, dbIndex); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String getSet(final String key, final String value) { - try { - return redis.getset(key, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public List mget(final String... keys) { - try { - String[] mget = redis.mget(keys); - List lst = new ArrayList<>(mget.length); - for (String get : mget) { - lst.add(get); - } - return lst; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long setnx(final String key, final String value) { - try { - return redis.setnx(key, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String setex(final String key, final int seconds, final String value) { - try { - return redis.setex(key, seconds, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String mset(final String... keysvalues) { - try { - return redis.mset(keysvalues); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long msetnx(final String... keysvalues) { - try { - return redis.msetnx(keysvalues) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long decrBy(final String key, final long integer) { - try { - return redis.decrby(key, integer); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long decr(final String key) { - try { - return redis.decr(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long incrBy(final String key, final long integer) { - try { - return redis.incrby(key, integer); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Double incrByFloat(final String key, final double value) { - try { - return Double.parseDouble(redis.incrbyfloat(key, value)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long incr(final String key) { - try { - return redis.incr(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long append(final String key, final String value) { - try { - return redis.append(key, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String substr(final String key, final int start, final int end) { - try { - return redis.getrange(key, start, end); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long hset(final String key, final String field, final String value) { - try { - return redis.hset(key, field, value) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String hget(final String key, final String field) { - try { - return redis.hget(key, field); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long hsetnx(final String key, final String field, final String value) { - try { - return redis.hsetnx(key, field, value) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String hmset(final String key, final Map hash) { - try { - String field = null, value = null; - String[] args = new String[(hash.size() - 1) * 2]; - int idx = 0; - for (String f : hash.keySet()) { - if (field == null) { - field = f; - value = hash.get(f); - continue; - } - args[idx] = f; - args[idx + 1] = hash.get(f); - idx += 2; - } - return redis.hmset(key, field, value, args); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public List hmget(final String key, final String... fields) { - try { - String field = fields[0]; - String[] f = new String[fields.length - 1]; - for (int idx = 1; idx < fields.length; ++idx) { - f[idx - 1] = fields[idx]; - } - return redis.hmget(key, field, f); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long hincrBy(final String key, final String field, final long value) { - try { - return redis.hincrby(key, field, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Double hincrByFloat(final String key, final String field, final double value) { - try { - return Double.parseDouble(redis.hincrbyfloat(key, field, value)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Boolean hexists(final String key, final String field) { - try { - return redis.hexists(key, field); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long hdel(final String key, final String... fields) { - try { - String field = fields[0]; - String[] f = new String[fields.length - 1]; - for (int idx = 1; idx < fields.length; ++idx) { - f[idx - 1] = fields[idx]; - } - return redis.hdel(key, field, f); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long hlen(final String key) { - try { - return redis.hlen(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set hkeys(final String key) { - try { - return redis.hkeys(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public List hvals(final String key) { - try { - return redis.hvals(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Map hgetAll(final String key) { - try { - return redis.hgetall(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long rpush(final String key, final String... strings) { - try { - String element = strings[0]; - String[] elements = new String[strings.length - 1]; - for (int idx = 1; idx < strings.length; ++idx) { - elements[idx - 1] = strings[idx]; - } - return redis.rpush(key, element, elements); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long lpush(final String key, final String... strings) { - try { - String element = strings[0]; - String[] elements = new String[strings.length - 1]; - for (int idx = 1; idx < strings.length; ++idx) { - elements[idx - 1] = strings[idx]; - } - return redis.lpush(key, element, elements); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long llen(final String key) { - try { - return redis.llen(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public List lrange(final String key, final long start, final long end) { - try { - return redis.lrange(key, start, end); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String ltrim(final String key, final long start, final long end) { - try { - return redis.ltrim(key, start, end); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String lindex(final String key, final long index) { - try { - return redis.lindex(key, index); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String lset(final String key, final long index, final String value) { - try { - return redis.lset(key, index, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long lrem(final String key, final long count, final String value) { - try { - return redis.lrem(key, count, value); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String lpop(final String key) { - try { - return redis.lpop(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String rpop(final String key) { - try { - return redis.rpop(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String rpoplpush(final String srckey, final String dstkey) { - try { - return redis.rpoplpush(srckey, dstkey); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long sadd(final String key, final String... members) { - try { - String member = members[0]; - String[] m = new String[members.length - 1]; - for (int idx = 1; idx < members.length; ++idx) { - m[idx - 1] = members[idx]; - } - return redis.sadd(key, member, m); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set smembers(final String key) { - try { - return redis.smembers(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long srem(final String key, final String... members) { - try { - String member = members[0]; - String[] m = new String[members.length - 1]; - for (int idx = 1; idx < members.length; ++idx) { - m[idx - 1] = members[idx]; - } - return redis.srem(key, member, m); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String spop(final String key) { - try { - return redis.spop(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long smove(final String srckey, final String dstkey, final String member) { - try { - return redis.smove(srckey, dstkey, member) ? 1L : 0L; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long scard(final String key) { - try { - return redis.scard(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Boolean sismember(final String key, final String member) { - try { - return redis.sismember(key, member); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set sinter(final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sinter(key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long sinterstore(final String dstkey, final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sinterstore(dstkey, key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set sunion(final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sunion(key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long sunionstore(final String dstkey, final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sunionstore(dstkey, key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set sdiff(final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sdiff(key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long sdiffstore(final String dstkey, final String... keys) { - try { - String key = keys[0]; - String[] k = new String[keys.length - 1]; - for (int idx = 0; idx < keys.length; ++idx) { - k[idx - 1] = keys[idx]; - } - return redis.sdiffstore(dstkey, key, k); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String srandmember(final String key) { - try { - return redis.srandmember(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public List srandmember(final String key, final int count) { - try { - return redis.srandmember(key, count); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zadd(final String key, final double score, final String member) { - try { - return redis.zadd(key, new ZsetPair(member, score)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zadd(String key, double score, String member, ZAddParams params) { - - try { - - if (params.getParam("xx") != null) { - Double existing = redis.zscore(key, member); - if (existing == null) { - return 0L; - } - return redis.zadd(key, new ZsetPair(member, score)); - } else { - return redis.zadd(key, new ZsetPair(member, score)); - } - - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zadd(final String key, final Map scoreMembers) { - try { - Double score = null; - String member = null; - List scoresmembers = new ArrayList<>((scoreMembers.size() - 1) * 2); - for (String m : scoreMembers.keySet()) { - if (m == null) { - member = m; - score = scoreMembers.get(m); - continue; - } - scoresmembers.add(new ZsetPair(m, scoreMembers.get(m))); - } - return redis.zadd( - key, new ZsetPair(member, score), (ZsetPair[]) scoresmembers.toArray()); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrange(final String key, final long start, final long end) { - try { - return ZsetPair.members(redis.zrange(key, start, end)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zrem(final String key, final String... members) { - try { - String member = members[0]; - String[] ms = new String[members.length - 1]; - for (int idx = 1; idx < members.length; ++idx) { - ms[idx - 1] = members[idx]; - } - return redis.zrem(key, member, ms); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Double zincrby(final String key, final double score, final String member) { - try { - return Double.parseDouble(redis.zincrby(key, score, member)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zrank(final String key, final String member) { - try { - return redis.zrank(key, member); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zrevrank(final String key, final String member) { - try { - return redis.zrevrank(key, member); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrange(final String key, final long start, final long end) { - try { - return ZsetPair.members(redis.zrevrange(key, start, end)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeWithScores(final String key, final long start, final long end) { - try { - return toTupleSet(redis.zrange(key, start, end, "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeWithScores(final String key, final long start, final long end) { - try { - return toTupleSet(redis.zrevrange(key, start, end, "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zcard(final String key) { - try { - return redis.zcard(key); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Double zscore(final String key, final String member) { - try { - return redis.zscore(key, member); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public String watch(final String... keys) { - try { - for (String key : keys) { - redis.watch(key); - } - return "OK"; - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zcount(final String key, final double min, final double max) { - try { - return redis.zcount(key, min, max); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zcount(final String key, final String min, final String max) { - try { - return redis.zcount(key, Double.parseDouble(min), Double.parseDouble(max)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScore(final String key, final double min, final double max) { - try { - return ZsetPair.members( - redis.zrangebyscore(key, String.valueOf(min), String.valueOf(max))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScore(final String key, final String min, final String max) { - try { - return ZsetPair.members(redis.zrangebyscore(key, min, max)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScore( - final String key, - final double min, - final double max, - final int offset, - final int count) { - try { - return ZsetPair.members( - redis.zrangebyscore( - key, - String.valueOf(min), - String.valueOf(max), - "limit", - String.valueOf(offset), - String.valueOf(count))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScore( - final String key, - final String min, - final String max, - final int offset, - final int count) { - try { - return ZsetPair.members( - redis.zrangebyscore( - key, min, max, "limit", String.valueOf(offset), String.valueOf(count))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScoreWithScores( - final String key, final double min, final double max) { - try { - return toTupleSet( - redis.zrangebyscore( - key, String.valueOf(min), String.valueOf(max), "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScoreWithScores( - final String key, final String min, final String max) { - try { - return toTupleSet(redis.zrangebyscore(key, min, max, "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScoreWithScores( - final String key, - final double min, - final double max, - final int offset, - final int count) { - try { - return toTupleSet( - redis.zrangebyscore( - key, - String.valueOf(min), - String.valueOf(max), - "limit", - String.valueOf(offset), - String.valueOf(count), - "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrangeByScoreWithScores( - final String key, - final String min, - final String max, - final int offset, - final int count) { - try { - return toTupleSet( - redis.zrangebyscore( - key, - min, - max, - "limit", - String.valueOf(offset), - String.valueOf(count), - "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScore(final String key, final double max, final double min) { - try { - return ZsetPair.members( - redis.zrevrangebyscore(key, String.valueOf(max), String.valueOf(min))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScore(final String key, final String max, final String min) { - try { - return ZsetPair.members(redis.zrevrangebyscore(key, max, min)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScore( - final String key, - final double max, - final double min, - final int offset, - final int count) { - try { - return ZsetPair.members( - redis.zrevrangebyscore( - key, - String.valueOf(max), - String.valueOf(min), - "limit", - String.valueOf(offset), - String.valueOf(count))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - final String key, final double max, final double min) { - try { - return toTupleSet( - redis.zrevrangebyscore( - key, String.valueOf(max), String.valueOf(min), "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - final String key, - final double max, - final double min, - final int offset, - final int count) { - try { - return toTupleSet( - redis.zrevrangebyscore( - key, - String.valueOf(max), - String.valueOf(min), - "limit", - String.valueOf(offset), - String.valueOf(count), - "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - final String key, - final String max, - final String min, - final int offset, - final int count) { - try { - return toTupleSet( - redis.zrevrangebyscore( - key, - max, - min, - "limit", - String.valueOf(offset), - String.valueOf(count), - "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScore( - final String key, - final String max, - final String min, - final int offset, - final int count) { - try { - return ZsetPair.members( - redis.zrevrangebyscore( - key, max, min, "limit", String.valueOf(offset), String.valueOf(count))); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - final String key, final String max, final String min) { - try { - return toTupleSet(redis.zrevrangebyscore(key, max, min, "withscores")); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zremrangeByRank(final String key, final long start, final long end) { - try { - return redis.zremrangebyrank(key, start, end); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zremrangeByScore(final String key, final double start, final double end) { - try { - return redis.zremrangebyscore(key, String.valueOf(start), String.valueOf(end)); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zremrangeByScore(final String key, final String start, final String end) { - try { - return redis.zremrangebyscore(key, start, end); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public Long zunionstore(final String dstkey, final String... sets) { - try { - return redis.zunionstore(dstkey, sets.length, sets); - } catch (Exception e) { - throw new JedisException(e); - } - } - - @Override - public ScanResult sscan(String key, String cursor, ScanParams params) { - try { - org.rarefiedredis.redis.ScanResult> sr = - redis.sscan(key, Long.parseLong(cursor), "count", "1000000"); - List list = new ArrayList<>(sr.results); - return new ScanResult<>("0", list); - } catch (Exception e) { - throw new JedisException(e); - } - } - - public ScanResult> hscan(final String key, final String cursor) { - try { - org.rarefiedredis.redis.ScanResult> mockr = - redis.hscan(key, Long.parseLong(cursor), "count", "1000000"); - Map results = mockr.results; - List> list = new ArrayList<>(results.entrySet()); - return new ScanResult<>("0", list); - } catch (Exception e) { - throw new JedisException(e); - } - } - - public ScanResult zscan(final String key, final String cursor) { - try { - org.rarefiedredis.redis.ScanResult> sr = - redis.zscan(key, Long.parseLong(cursor), "count", "1000000"); - List list = new ArrayList<>(sr.results); - List tl = new LinkedList<>(); - list.forEach(p -> tl.add(new Tuple(p.member, p.score))); - return new ScanResult<>("0", tl); - } catch (Exception e) { - throw new JedisException(e); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisProxy.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisProxy.java deleted file mode 100644 index 38abcb964..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisProxy.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.Conditional; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.redis.config.AnyRedisCondition; - -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.commands.JedisCommands; -import redis.clients.jedis.params.ZAddParams; - -import static com.netflix.conductor.redis.config.RedisCommonConfiguration.DEFAULT_CLIENT_INJECTION_NAME; - -/** Proxy for the {@link JedisCommands} object. */ -@Component -@Conditional(AnyRedisCondition.class) -public class JedisProxy { - - private static final Logger LOGGER = LoggerFactory.getLogger(JedisProxy.class); - - protected JedisCommands jedisCommands; - - public JedisProxy(@Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands) { - this.jedisCommands = jedisCommands; - } - - public Set zrange(String key, long start, long end) { - return jedisCommands.zrange(key, start, end); - } - - public Set zrangeByScoreWithScores(String key, double maxScore, int count) { - return jedisCommands.zrangeByScoreWithScores(key, 0, maxScore, 0, count); - } - - public Set zrangeByScore(String key, double maxScore, int count) { - return jedisCommands.zrangeByScore(key, 0, maxScore, 0, count); - } - - public Set zrangeByScore(String key, double minScore, double maxScore, int count) { - return jedisCommands.zrangeByScore(key, minScore, maxScore, 0, count); - } - - public ScanResult zscan(String key, int cursor) { - return jedisCommands.zscan(key, "" + cursor); - } - - public String get(String key) { - return jedisCommands.get(key); - } - - public Long zcard(String key) { - return jedisCommands.zcard(key); - } - - public Long del(String key) { - return jedisCommands.del(key); - } - - public Long zrem(String key, String member) { - return jedisCommands.zrem(key, member); - } - - public long zremrangeByScore(String key, String start, String end) { - return jedisCommands.zremrangeByScore(key, start, end); - } - - public long zcount(String key, double min, double max) { - return jedisCommands.zcount(key, min, max); - } - - public String set(String key, String value) { - return jedisCommands.set(key, value); - } - - public Long setnx(String key, String value) { - return jedisCommands.setnx(key, value); - } - - public Long zadd(String key, double score, String member) { - return jedisCommands.zadd(key, score, member); - } - - public Long zaddnx(String key, double score, String member) { - ZAddParams params = ZAddParams.zAddParams().nx(); - return jedisCommands.zadd(key, score, member, params); - } - - public Long hset(String key, String field, String value) { - return jedisCommands.hset(key, field, value); - } - - public Long hsetnx(String key, String field, String value) { - return jedisCommands.hsetnx(key, field, value); - } - - public Long hlen(String key) { - return jedisCommands.hlen(key); - } - - public String hget(String key, String field) { - return jedisCommands.hget(key, field); - } - - public Optional optionalHget(String key, String field) { - return Optional.ofNullable(jedisCommands.hget(key, field)); - } - - public Map hscan(String key, int count) { - Map m = new HashMap<>(); - int cursor = 0; - do { - ScanResult> scanResult = jedisCommands.hscan(key, "" + cursor); - cursor = Integer.parseInt(scanResult.getCursor()); - for (Entry r : scanResult.getResult()) { - m.put(r.getKey(), r.getValue()); - } - if (m.size() > count) { - break; - } - } while (cursor > 0); - - return m; - } - - public Map hgetAll(String key) { - Map m = new HashMap<>(); - int cursor = 0; - do { - ScanResult> scanResult = jedisCommands.hscan(key, "" + cursor); - cursor = Integer.parseInt(scanResult.getCursor()); - for (Entry r : scanResult.getResult()) { - m.put(r.getKey(), r.getValue()); - } - } while (cursor > 0); - - return m; - } - - public List hvals(String key) { - LOGGER.trace("hvals {}", key); - return jedisCommands.hvals(key); - } - - public Set hkeys(String key) { - LOGGER.trace("hkeys {}", key); - Set keys = new HashSet<>(); - int cursor = 0; - do { - ScanResult> sr = jedisCommands.hscan(key, "" + cursor); - cursor = Integer.parseInt(sr.getCursor()); - List> result = sr.getResult(); - for (Entry e : result) { - keys.add(e.getKey()); - } - } while (cursor > 0); - - return keys; - } - - public Long hdel(String key, String... fields) { - LOGGER.trace("hdel {} {}", key, fields[0]); - return jedisCommands.hdel(key, fields); - } - - public Long expire(String key, int seconds) { - return jedisCommands.expire(key, seconds); - } - - public Boolean hexists(String key, String field) { - return jedisCommands.hexists(key, field); - } - - public Long sadd(String key, String value) { - LOGGER.trace("sadd {} {}", key, value); - return jedisCommands.sadd(key, value); - } - - public Long srem(String key, String member) { - LOGGER.trace("srem {} {}", key, member); - return jedisCommands.srem(key, member); - } - - public boolean sismember(String key, String member) { - return jedisCommands.sismember(key, member); - } - - public Set smembers(String key) { - LOGGER.trace("smembers {}", key); - Set r = new HashSet<>(); - int cursor = 0; - ScanParams sp = new ScanParams(); - sp.count(50); - - do { - ScanResult scanResult = jedisCommands.sscan(key, "" + cursor, sp); - cursor = Integer.parseInt(scanResult.getCursor()); - r.addAll(scanResult.getResult()); - } while (cursor > 0); - - return r; - } - - public Long scard(String key) { - return jedisCommands.scard(key); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java deleted file mode 100644 index 50f603228..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java +++ /dev/null @@ -1,1276 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import redis.clients.jedis.BitPosParams; -import redis.clients.jedis.GeoCoordinate; -import redis.clients.jedis.GeoRadiusResponse; -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPoolAbstract; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.StreamConsumersInfo; -import redis.clients.jedis.StreamEntry; -import redis.clients.jedis.StreamEntryID; -import redis.clients.jedis.StreamGroupInfo; -import redis.clients.jedis.StreamInfo; -import redis.clients.jedis.StreamPendingEntry; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.commands.JedisCommands; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; - -public class JedisSentinel implements JedisCommands { - - private final JedisPoolAbstract jedisPool; - - public JedisSentinel(JedisPoolAbstract jedisPool) { - this.jedisPool = jedisPool; - } - - @Override - public String set(String key, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.set(key, value); - } - } - - @Override - public String set(String key, String value, SetParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.set(key, value, params); - } - } - - @Override - public String get(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.get(key); - } - } - - @Override - public Boolean exists(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.exists(key); - } - } - - @Override - public Long persist(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.persist(key); - } - } - - @Override - public String type(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.type(key); - } - } - - @Override - public byte[] dump(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.dump(key); - } - } - - @Override - public String restore(String key, int ttl, byte[] serializedValue) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.restore(key, ttl, serializedValue); - } - } - - @Override - public String restoreReplace(String key, int ttl, byte[] serializedValue) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.restoreReplace(key, ttl, serializedValue); - } - } - - @Override - public Long expire(String key, int seconds) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.expire(key, seconds); - } - } - - @Override - public Long pexpire(String key, long milliseconds) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.pexpire(key, milliseconds); - } - } - - @Override - public Long expireAt(String key, long unixTime) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.expireAt(key, unixTime); - } - } - - @Override - public Long pexpireAt(String key, long millisecondsTimestamp) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.pexpireAt(key, millisecondsTimestamp); - } - } - - @Override - public Long ttl(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.ttl(key); - } - } - - @Override - public Long pttl(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.pttl(key); - } - } - - @Override - public Long touch(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.touch(key); - } - } - - @Override - public Boolean setbit(String key, long offset, boolean value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.setbit(key, offset, value); - } - } - - @Override - public Boolean setbit(String key, long offset, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.setbit(key, offset, value); - } - } - - @Override - public Boolean getbit(String key, long offset) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.getbit(key, offset); - } - } - - @Override - public Long setrange(String key, long offset, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.setrange(key, offset, value); - } - } - - @Override - public String getrange(String key, long startOffset, long endOffset) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.getrange(key, startOffset, endOffset); - } - } - - @Override - public String getSet(String key, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.getSet(key, value); - } - } - - @Override - public Long setnx(String key, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.setnx(key, value); - } - } - - @Override - public String setex(String key, int seconds, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.setex(key, seconds, value); - } - } - - @Override - public String psetex(String key, long milliseconds, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.psetex(key, milliseconds, value); - } - } - - @Override - public Long decrBy(String key, long integer) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.decrBy(key, integer); - } - } - - @Override - public Long decr(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.decr(key); - } - } - - @Override - public Long incrBy(String key, long integer) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.incrBy(key, integer); - } - } - - @Override - public Double incrByFloat(String key, double value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.incrByFloat(key, value); - } - } - - @Override - public Long incr(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.incr(key); - } - } - - @Override - public Long append(String key, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.append(key, value); - } - } - - @Override - public String substr(String key, int start, int end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.substr(key, start, end); - } - } - - @Override - public Long hset(String key, String field, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hset(key, field, value); - } - } - - @Override - public Long hset(String key, Map hash) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hset(key, hash); - } - } - - @Override - public String hget(String key, String field) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hget(key, field); - } - } - - @Override - public Long hsetnx(String key, String field, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hsetnx(key, field, value); - } - } - - @Override - public String hmset(String key, Map hash) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hmset(key, hash); - } - } - - @Override - public List hmget(String key, String... fields) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hmget(key, fields); - } - } - - @Override - public Long hincrBy(String key, String field, long value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hincrBy(key, field, value); - } - } - - @Override - public Double hincrByFloat(String key, String field, double value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hincrByFloat(key, field, value); - } - } - - @Override - public Boolean hexists(String key, String field) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hexists(key, field); - } - } - - @Override - public Long hdel(String key, String... field) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hdel(key, field); - } - } - - @Override - public Long hlen(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hlen(key); - } - } - - @Override - public Set hkeys(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hkeys(key); - } - } - - @Override - public List hvals(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hvals(key); - } - } - - @Override - public Map hgetAll(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hgetAll(key); - } - } - - @Override - public Long rpush(String key, String... string) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.rpush(key, string); - } - } - - @Override - public Long lpush(String key, String... string) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lpush(key, string); - } - } - - @Override - public Long llen(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.llen(key); - } - } - - @Override - public List lrange(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lrange(key, start, end); - } - } - - @Override - public String ltrim(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.ltrim(key, start, end); - } - } - - @Override - public String lindex(String key, long index) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lindex(key, index); - } - } - - @Override - public String lset(String key, long index, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lset(key, index, value); - } - } - - @Override - public Long lrem(String key, long count, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lrem(key, count, value); - } - } - - @Override - public String lpop(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lpop(key); - } - } - - @Override - public String rpop(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.rpop(key); - } - } - - @Override - public Long sadd(String key, String... member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sadd(key, member); - } - } - - @Override - public Set smembers(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.smembers(key); - } - } - - @Override - public Long srem(String key, String... member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.srem(key, member); - } - } - - @Override - public String spop(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.spop(key); - } - } - - @Override - public Set spop(String key, long count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.spop(key, count); - } - } - - @Override - public Long scard(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.scard(key); - } - } - - @Override - public Boolean sismember(String key, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sismember(key, member); - } - } - - @Override - public String srandmember(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.srandmember(key); - } - } - - @Override - public List srandmember(String key, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.srandmember(key, count); - } - } - - @Override - public Long strlen(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.strlen(key); - } - } - - @Override - public Long zadd(String key, double score, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zadd(key, score, member); - } - } - - @Override - public Long zadd(String key, double score, String member, ZAddParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zadd(key, score, member, params); - } - } - - @Override - public Long zadd(String key, Map scoreMembers) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zadd(key, scoreMembers); - } - } - - @Override - public Long zadd(String key, Map scoreMembers, ZAddParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zadd(key, scoreMembers, params); - } - } - - @Override - public Set zrange(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrange(key, start, end); - } - } - - @Override - public Long zrem(String key, String... member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrem(key, member); - } - } - - @Override - public Double zincrby(String key, double score, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zincrby(key, score, member); - } - } - - @Override - public Double zincrby(String key, double score, String member, ZIncrByParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zincrby(key, score, member, params); - } - } - - @Override - public Long zrank(String key, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrank(key, member); - } - } - - @Override - public Long zrevrank(String key, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrank(key, member); - } - } - - @Override - public Set zrevrange(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrange(key, start, end); - } - } - - @Override - public Set zrangeWithScores(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeWithScores(key, start, end); - } - } - - @Override - public Set zrevrangeWithScores(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeWithScores(key, start, end); - } - } - - @Override - public Long zcard(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zcard(key); - } - } - - @Override - public Double zscore(String key, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zscore(key, member); - } - } - - @Override - public Tuple zpopmax(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zpopmax(key); - } - } - - @Override - public Set zpopmax(String key, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zpopmax(key, count); - } - } - - @Override - public Tuple zpopmin(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zpopmin(key); - } - } - - @Override - public Set zpopmin(String key, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zpopmin(key, count); - } - } - - @Override - public List sort(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sort(key); - } - } - - @Override - public List sort(String key, SortingParams sortingParameters) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sort(key, sortingParameters); - } - } - - @Override - public Long zcount(String key, double min, double max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zcount(key, min, max); - } - } - - @Override - public Long zcount(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zcount(key, min, max); - } - } - - @Override - public Set zrangeByScore(String key, double min, double max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScore(key, min, max); - } - } - - @Override - public Set zrangeByScore(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScore(key, min, max); - } - } - - @Override - public Set zrevrangeByScore(String key, double max, double min) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScore(key, max, min); - } - } - - @Override - public Set zrangeByScore(String key, double min, double max, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScore(key, min, max, offset, count); - } - } - - @Override - public Set zrevrangeByScore(String key, String max, String min) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScore(key, max, min); - } - } - - @Override - public Set zrangeByScore(String key, String min, String max, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScore(key, min, max, offset, count); - } - } - - @Override - public Set zrevrangeByScore(String key, double max, double min, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScore(key, max, min, offset, count); - } - } - - @Override - public Set zrangeByScoreWithScores(String key, double min, double max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScoreWithScores(key, min, max); - } - } - - @Override - public Set zrevrangeByScoreWithScores(String key, double max, double min) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScoreWithScores(key, max, min); - } - } - - @Override - public Set zrangeByScoreWithScores( - String key, double min, double max, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScoreWithScores(key, min, max, offset, count); - } - } - - @Override - public Set zrevrangeByScore(String key, String max, String min, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScore(key, max, min, offset, count); - } - } - - @Override - public Set zrangeByScoreWithScores(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScoreWithScores(key, min, max); - } - } - - @Override - public Set zrevrangeByScoreWithScores(String key, String max, String min) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScoreWithScores(key, max, min); - } - } - - @Override - public Set zrangeByScoreWithScores( - String key, String min, String max, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByScoreWithScores(key, min, max, offset, count); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, double max, double min, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScoreWithScores(key, max, min, offset, count); - } - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, String max, String min, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByScoreWithScores(key, max, min, offset, count); - } - } - - @Override - public Long zremrangeByRank(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zremrangeByRank(key, start, end); - } - } - - @Override - public Long zremrangeByScore(String key, double start, double end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zremrangeByScore(key, start, end); - } - } - - @Override - public Long zremrangeByScore(String key, String start, String end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zremrangeByScore(key, start, end); - } - } - - @Override - public Long zlexcount(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zlexcount(key, min, max); - } - } - - @Override - public Set zrangeByLex(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByLex(key, min, max); - } - } - - @Override - public Set zrangeByLex(String key, String min, String max, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrangeByLex(key, min, max, offset, count); - } - } - - @Override - public Set zrevrangeByLex(String key, String max, String min) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByLex(key, max, min); - } - } - - @Override - public Set zrevrangeByLex(String key, String max, String min, int offset, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zrevrangeByLex(key, max, min, offset, count); - } - } - - @Override - public Long zremrangeByLex(String key, String min, String max) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zremrangeByLex(key, min, max); - } - } - - @Override - public Long linsert(String key, ListPosition where, String pivot, String value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.linsert(key, where, pivot, value); - } - } - - @Override - public Long lpushx(String key, String... string) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.lpushx(key, string); - } - } - - @Override - public Long rpushx(String key, String... string) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.rpushx(key, string); - } - } - - @Override - public List blpop(int timeout, String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.blpop(timeout, key); - } - } - - @Override - public List brpop(int timeout, String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.brpop(timeout, key); - } - } - - @Override - public Long del(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.del(key); - } - } - - @Override - public Long unlink(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.unlink(key); - } - } - - @Override - public String echo(String string) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.echo(string); - } - } - - @Override - public Long move(String key, int dbIndex) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.move(key, dbIndex); - } - } - - @Override - public Long bitcount(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitcount(key); - } - } - - @Override - public Long bitcount(String key, long start, long end) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitcount(key, start, end); - } - } - - @Override - public Long bitpos(String key, boolean value) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitpos(key, value); - } - } - - @Override - public Long bitpos(String key, boolean value, BitPosParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitpos(key, value, params); - } - } - - @Override - public ScanResult> hscan(String key, String cursor) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hscan(key, cursor); - } - } - - @Override - public ScanResult> hscan(String key, String cursor, ScanParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hscan(key, cursor, params); - } - } - - @Override - public ScanResult sscan(String key, String cursor) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sscan(key, cursor); - } - } - - @Override - public ScanResult sscan(String key, String cursor, ScanParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.sscan(key, cursor, params); - } - } - - @Override - public ScanResult zscan(String key, String cursor) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zscan(key, cursor); - } - } - - @Override - public ScanResult zscan(String key, String cursor, ScanParams params) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.zscan(key, cursor, params); - } - } - - @Override - public Long pfadd(String key, String... elements) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.pfadd(key, elements); - } - } - - @Override - public long pfcount(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.pfcount(key); - } - } - - @Override - public Long geoadd(String key, double longitude, double latitude, String member) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geoadd(key, longitude, latitude, member); - } - } - - @Override - public Long geoadd(String key, Map memberCoordinateMap) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geoadd(key, memberCoordinateMap); - } - } - - @Override - public Double geodist(String key, String member1, String member2) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geodist(key, member1, member2); - } - } - - @Override - public Double geodist(String key, String member1, String member2, GeoUnit unit) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geodist(key, member1, member2, unit); - } - } - - @Override - public List geohash(String key, String... members) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geohash(key, members); - } - } - - @Override - public List geopos(String key, String... members) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.geopos(key, members); - } - } - - @Override - public List georadius( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadius(key, longitude, latitude, radius, unit); - } - } - - @Override - public List georadiusReadonly( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusReadonly(key, longitude, latitude, radius, unit); - } - } - - @Override - public List georadius( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadius(key, longitude, latitude, radius, unit, param); - } - } - - @Override - public List georadiusReadonly( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusReadonly(key, longitude, latitude, radius, unit, param); - } - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusByMember(key, member, radius, unit); - } - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusByMemberReadonly(key, member, radius, unit); - } - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusByMember(key, member, radius, unit, param); - } - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.georadiusByMemberReadonly(key, member, radius, unit, param); - } - } - - @Override - public List bitfield(String key, String... arguments) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitfield(key, arguments); - } - } - - @Override - public List bitfieldReadonly(String key, String... arguments) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.bitfieldReadonly(key, arguments); - } - } - - @Override - public Long hstrlen(String key, String field) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.hstrlen(key, field); - } - } - - @Override - public StreamEntryID xadd(String key, StreamEntryID id, Map hash) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xadd(key, id, hash); - } - } - - @Override - public StreamEntryID xadd( - String key, - StreamEntryID id, - Map hash, - long maxLen, - boolean approximateLength) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xadd(key, id, hash, maxLen, approximateLength); - } - } - - @Override - public Long xlen(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xlen(key); - } - } - - @Override - public List xrange(String key, StreamEntryID start, StreamEntryID end, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xrange(key, start, end, count); - } - } - - @Override - public List xrevrange( - String key, StreamEntryID end, StreamEntryID start, int count) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xrevrange(key, end, start, count); - } - } - - @Override - public long xack(String key, String group, StreamEntryID... ids) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xack(key, group, ids); - } - } - - @Override - public String xgroupCreate(String key, String groupname, StreamEntryID id, boolean makeStream) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xgroupCreate(key, groupname, id, makeStream); - } - } - - @Override - public String xgroupSetID(String key, String groupname, StreamEntryID id) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xgroupSetID(key, groupname, id); - } - } - - @Override - public long xgroupDestroy(String key, String groupname) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xgroupDestroy(key, groupname); - } - } - - @Override - public Long xgroupDelConsumer(String key, String groupname, String consumername) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xgroupDelConsumer(key, groupname, consumername); - } - } - - @Override - public List xpending( - String key, - String groupname, - StreamEntryID start, - StreamEntryID end, - int count, - String consumername) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xpending(key, groupname, start, end, count, consumername); - } - } - - @Override - public long xdel(String key, StreamEntryID... ids) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xdel(key, ids); - } - } - - @Override - public long xtrim(String key, long maxLen, boolean approximate) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xtrim(key, maxLen, approximate); - } - } - - @Override - public List xclaim( - String key, - String group, - String consumername, - long minIdleTime, - long newIdleTime, - int retries, - boolean force, - StreamEntryID... ids) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xclaim( - key, group, consumername, minIdleTime, newIdleTime, retries, force, ids); - } - } - - @Override - public StreamInfo xinfoStream(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xinfoStream(key); - } - } - - @Override - public List xinfoGroup(String key) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xinfoGroup(key); - } - } - - @Override - public List xinfoConsumers(String key, String group) { - try (Jedis jedis = jedisPool.getResource()) { - return jedis.xinfoConsumers(key, group); - } - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisStandalone.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisStandalone.java deleted file mode 100644 index 97b326e44..000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisStandalone.java +++ /dev/null @@ -1,962 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Function; - -import redis.clients.jedis.BitPosParams; -import redis.clients.jedis.GeoCoordinate; -import redis.clients.jedis.GeoRadiusResponse; -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.StreamConsumersInfo; -import redis.clients.jedis.StreamEntry; -import redis.clients.jedis.StreamEntryID; -import redis.clients.jedis.StreamGroupInfo; -import redis.clients.jedis.StreamInfo; -import redis.clients.jedis.StreamPendingEntry; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.commands.JedisCommands; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; - -/** A {@link JedisCommands} implementation that delegates to {@link JedisPool}. */ -public class JedisStandalone implements JedisCommands { - - private final JedisPool jedisPool; - - public JedisStandalone(JedisPool jedisPool) { - this.jedisPool = jedisPool; - } - - private R executeInJedis(Function function) { - try (Jedis jedis = jedisPool.getResource()) { - return function.apply(jedis); - } - } - - @Override - public String set(String key, String value) { - return executeInJedis(jedis -> jedis.set(key, value)); - } - - @Override - public String set(String key, String value, SetParams params) { - return executeInJedis(jedis -> jedis.set(key, value, params)); - } - - @Override - public String get(String key) { - return executeInJedis(jedis -> jedis.get(key)); - } - - @Override - public Boolean exists(String key) { - return executeInJedis(jedis -> jedis.exists(key)); - } - - @Override - public Long persist(String key) { - return executeInJedis(jedis -> jedis.persist(key)); - } - - @Override - public String type(String key) { - return executeInJedis(jedis -> jedis.type(key)); - } - - @Override - public byte[] dump(String key) { - return executeInJedis(jedis -> jedis.dump(key)); - } - - @Override - public String restore(String key, int ttl, byte[] serializedValue) { - return executeInJedis(jedis -> jedis.restore(key, ttl, serializedValue)); - } - - @Override - public String restoreReplace(String key, int ttl, byte[] serializedValue) { - return executeInJedis(jedis -> jedis.restoreReplace(key, ttl, serializedValue)); - } - - @Override - public Long expire(String key, int seconds) { - return executeInJedis(jedis -> jedis.expire(key, seconds)); - } - - @Override - public Long pexpire(String key, long milliseconds) { - return executeInJedis(jedis -> jedis.pexpire(key, milliseconds)); - } - - @Override - public Long expireAt(String key, long unixTime) { - return executeInJedis(jedis -> jedis.expireAt(key, unixTime)); - } - - @Override - public Long pexpireAt(String key, long millisecondsTimestamp) { - return executeInJedis(jedis -> jedis.pexpireAt(key, millisecondsTimestamp)); - } - - @Override - public Long ttl(String key) { - return executeInJedis(jedis -> jedis.ttl(key)); - } - - @Override - public Long pttl(String key) { - return executeInJedis(jedis -> jedis.pttl(key)); - } - - @Override - public Long touch(String key) { - return executeInJedis(jedis -> jedis.touch(key)); - } - - @Override - public Boolean setbit(String key, long offset, boolean value) { - return executeInJedis(jedis -> jedis.setbit(key, offset, value)); - } - - @Override - public Boolean setbit(String key, long offset, String value) { - return executeInJedis(jedis -> jedis.setbit(key, offset, value)); - } - - @Override - public Boolean getbit(String key, long offset) { - return executeInJedis(jedis -> jedis.getbit(key, offset)); - } - - @Override - public Long setrange(String key, long offset, String value) { - return executeInJedis(jedis -> jedis.setrange(key, offset, value)); - } - - @Override - public String getrange(String key, long startOffset, long endOffset) { - return executeInJedis(jedis -> jedis.getrange(key, startOffset, endOffset)); - } - - @Override - public String getSet(String key, String value) { - return executeInJedis(jedis -> jedis.getSet(key, value)); - } - - @Override - public Long setnx(String key, String value) { - return executeInJedis(jedis -> jedis.setnx(key, value)); - } - - @Override - public String setex(String key, int seconds, String value) { - return executeInJedis(jedis -> jedis.setex(key, seconds, value)); - } - - @Override - public String psetex(String key, long milliseconds, String value) { - return executeInJedis(jedis -> jedis.psetex(key, milliseconds, value)); - } - - @Override - public Long decrBy(String key, long decrement) { - return executeInJedis(jedis -> jedis.decrBy(key, decrement)); - } - - @Override - public Long decr(String key) { - return executeInJedis(jedis -> jedis.decr(key)); - } - - @Override - public Long incrBy(String key, long increment) { - return executeInJedis(jedis -> jedis.incrBy(key, increment)); - } - - @Override - public Double incrByFloat(String key, double increment) { - return executeInJedis(jedis -> jedis.incrByFloat(key, increment)); - } - - @Override - public Long incr(String key) { - return executeInJedis(jedis -> jedis.incr(key)); - } - - @Override - public Long append(String key, String value) { - return executeInJedis(jedis -> jedis.append(key, value)); - } - - @Override - public String substr(String key, int start, int end) { - return executeInJedis(jedis -> jedis.substr(key, start, end)); - } - - @Override - public Long hset(String key, String field, String value) { - return executeInJedis(jedis -> jedis.hset(key, field, value)); - } - - @Override - public Long hset(String key, Map hash) { - return executeInJedis(jedis -> jedis.hset(key, hash)); - } - - @Override - public String hget(String key, String field) { - return executeInJedis(jedis -> jedis.hget(key, field)); - } - - @Override - public Long hsetnx(String key, String field, String value) { - return executeInJedis(jedis -> jedis.hsetnx(key, field, value)); - } - - @Override - public String hmset(String key, Map hash) { - return executeInJedis(jedis -> jedis.hmset(key, hash)); - } - - @Override - public List hmget(String key, String... fields) { - return executeInJedis(jedis -> jedis.hmget(key, fields)); - } - - @Override - public Long hincrBy(String key, String field, long value) { - return executeInJedis(jedis -> jedis.hincrBy(key, field, value)); - } - - @Override - public Double hincrByFloat(String key, String field, double value) { - return executeInJedis(jedis -> jedis.hincrByFloat(key, field, value)); - } - - @Override - public Boolean hexists(String key, String field) { - return executeInJedis(jedis -> jedis.hexists(key, field)); - } - - @Override - public Long hdel(String key, String... field) { - return executeInJedis(jedis -> jedis.hdel(key, field)); - } - - @Override - public Long hlen(String key) { - return executeInJedis(jedis -> jedis.hlen(key)); - } - - @Override - public Set hkeys(String key) { - return executeInJedis(jedis -> jedis.hkeys(key)); - } - - @Override - public List hvals(String key) { - return executeInJedis(jedis -> jedis.hvals(key)); - } - - @Override - public Map hgetAll(String key) { - return executeInJedis(jedis -> jedis.hgetAll(key)); - } - - @Override - public Long rpush(String key, String... string) { - return executeInJedis(jedis -> jedis.rpush(key)); - } - - @Override - public Long lpush(String key, String... string) { - return executeInJedis(jedis -> jedis.lpush(key, string)); - } - - @Override - public Long llen(String key) { - return executeInJedis(jedis -> jedis.llen(key)); - } - - @Override - public List lrange(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.lrange(key, start, stop)); - } - - @Override - public String ltrim(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.ltrim(key, start, stop)); - } - - @Override - public String lindex(String key, long index) { - return executeInJedis(jedis -> jedis.lindex(key, index)); - } - - @Override - public String lset(String key, long index, String value) { - return executeInJedis(jedis -> jedis.lset(key, index, value)); - } - - @Override - public Long lrem(String key, long count, String value) { - return executeInJedis(jedis -> jedis.lrem(key, count, value)); - } - - @Override - public String lpop(String key) { - return executeInJedis(jedis -> jedis.lpop(key)); - } - - @Override - public String rpop(String key) { - return executeInJedis(jedis -> jedis.rpop(key)); - } - - @Override - public Long sadd(String key, String... member) { - return executeInJedis(jedis -> jedis.sadd(key, member)); - } - - @Override - public Set smembers(String key) { - return executeInJedis(jedis -> jedis.smembers(key)); - } - - @Override - public Long srem(String key, String... member) { - return executeInJedis(jedis -> jedis.srem(key, member)); - } - - @Override - public String spop(String key) { - return executeInJedis(jedis -> jedis.spop(key)); - } - - @Override - public Set spop(String key, long count) { - return executeInJedis(jedis -> jedis.spop(key, count)); - } - - @Override - public Long scard(String key) { - return executeInJedis(jedis -> jedis.scard(key)); - } - - @Override - public Boolean sismember(String key, String member) { - return executeInJedis(jedis -> jedis.sismember(key, member)); - } - - @Override - public String srandmember(String key) { - return executeInJedis(jedis -> jedis.srandmember(key)); - } - - @Override - public List srandmember(String key, int count) { - return executeInJedis(jedis -> jedis.srandmember(key, count)); - } - - @Override - public Long strlen(String key) { - return executeInJedis(jedis -> jedis.strlen(key)); - } - - @Override - public Long zadd(String key, double score, String member) { - return executeInJedis(jedis -> jedis.zadd(key, score, member)); - } - - @Override - public Long zadd(String key, double score, String member, ZAddParams params) { - return executeInJedis(jedis -> jedis.zadd(key, score, member, params)); - } - - @Override - public Long zadd(String key, Map scoreMembers) { - return executeInJedis(jedis -> jedis.zadd(key, scoreMembers)); - } - - @Override - public Long zadd(String key, Map scoreMembers, ZAddParams params) { - return executeInJedis(jedis -> jedis.zadd(key, scoreMembers, params)); - } - - @Override - public Set zrange(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.zrange(key, start, stop)); - } - - @Override - public Long zrem(String key, String... members) { - return executeInJedis(jedis -> jedis.zrem(key, members)); - } - - @Override - public Double zincrby(String key, double increment, String member) { - return executeInJedis(jedis -> jedis.zincrby(key, increment, member)); - } - - @Override - public Double zincrby(String key, double increment, String member, ZIncrByParams params) { - return executeInJedis(jedis -> jedis.zincrby(key, increment, member, params)); - } - - @Override - public Long zrank(String key, String member) { - return executeInJedis(jedis -> jedis.zrank(key, member)); - } - - @Override - public Long zrevrank(String key, String member) { - return executeInJedis(jedis -> jedis.zrevrank(key, member)); - } - - @Override - public Set zrevrange(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.zrevrange(key, start, stop)); - } - - @Override - public Set zrangeWithScores(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.zrangeWithScores(key, start, stop)); - } - - @Override - public Set zrevrangeWithScores(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.zrevrangeWithScores(key, start, stop)); - } - - @Override - public Long zcard(String key) { - return executeInJedis(jedis -> jedis.zcard(key)); - } - - @Override - public Double zscore(String key, String member) { - return executeInJedis(jedis -> jedis.zscore(key, member)); - } - - @Override - public Tuple zpopmax(String key) { - return executeInJedis(jedis -> jedis.zpopmax(key)); - } - - @Override - public Set zpopmax(String key, int count) { - return executeInJedis(jedis -> jedis.zpopmax(key, count)); - } - - @Override - public Tuple zpopmin(String key) { - return executeInJedis(jedis -> jedis.zpopmin(key)); - } - - @Override - public Set zpopmin(String key, int count) { - return executeInJedis(jedis -> jedis.zpopmin(key, count)); - } - - @Override - public List sort(String key) { - return executeInJedis(jedis -> jedis.sort(key)); - } - - @Override - public List sort(String key, SortingParams sortingParameters) { - return executeInJedis(jedis -> jedis.sort(key, sortingParameters)); - } - - @Override - public Long zcount(String key, double min, double max) { - return executeInJedis(jedis -> jedis.zcount(key, min, max)); - } - - @Override - public Long zcount(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zcount(key, min, max)); - } - - @Override - public Set zrangeByScore(String key, double min, double max) { - return executeInJedis(jedis -> jedis.zrangeByScore(key, min, max)); - } - - @Override - public Set zrangeByScore(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zrangeByScore(key, min, max)); - } - - @Override - public Set zrevrangeByScore(String key, double max, double min) { - return executeInJedis(jedis -> jedis.zrevrangeByScore(key, max, min)); - } - - @Override - public Set zrangeByScore(String key, double min, double max, int offset, int count) { - return executeInJedis(jedis -> jedis.zrangeByScore(key, min, max, offset, count)); - } - - @Override - public Set zrevrangeByScore(String key, String max, String min) { - return executeInJedis(jedis -> jedis.zrevrangeByScore(key, max, min)); - } - - @Override - public Set zrangeByScore(String key, String min, String max, int offset, int count) { - return executeInJedis(jedis -> jedis.zrangeByScore(key, min, max, offset, count)); - } - - @Override - public Set zrevrangeByScore(String key, double max, double min, int offset, int count) { - return executeInJedis(jedis -> jedis.zrevrangeByScore(key, max, min, offset, count)); - } - - @Override - public Set zrangeByScoreWithScores(String key, double min, double max) { - return executeInJedis(jedis -> jedis.zrangeByScoreWithScores(key, min, max)); - } - - @Override - public Set zrevrangeByScoreWithScores(String key, double max, double min) { - return executeInJedis(jedis -> jedis.zrevrangeByScoreWithScores(key, max, min)); - } - - @Override - public Set zrangeByScoreWithScores( - String key, double min, double max, int offset, int count) { - return executeInJedis(jedis -> jedis.zrangeByScoreWithScores(key, min, max, offset, count)); - } - - @Override - public Set zrevrangeByScore(String key, String max, String min, int offset, int count) { - return executeInJedis(jedis -> jedis.zrevrangeByScore(key, max, min, offset, count)); - } - - @Override - public Set zrangeByScoreWithScores(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zrangeByScoreWithScores(key, min, max)); - } - - @Override - public Set zrevrangeByScoreWithScores(String key, String max, String min) { - return executeInJedis(jedis -> jedis.zrevrangeByScoreWithScores(key, max, min)); - } - - @Override - public Set zrangeByScoreWithScores( - String key, String min, String max, int offset, int count) { - return executeInJedis(jedis -> jedis.zrangeByScoreWithScores(key, min, max, offset, count)); - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, double max, double min, int offset, int count) { - return executeInJedis( - jedis -> jedis.zrevrangeByScoreWithScores(key, max, min, offset, count)); - } - - @Override - public Set zrevrangeByScoreWithScores( - String key, String max, String min, int offset, int count) { - return executeInJedis( - jedis -> jedis.zrevrangeByScoreWithScores(key, max, min, offset, count)); - } - - @Override - public Long zremrangeByRank(String key, long start, long stop) { - return executeInJedis(jedis -> jedis.zremrangeByRank(key, start, stop)); - } - - @Override - public Long zremrangeByScore(String key, double min, double max) { - return executeInJedis(jedis -> jedis.zremrangeByScore(key, min, max)); - } - - @Override - public Long zremrangeByScore(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zremrangeByScore(key, min, max)); - } - - @Override - public Long zlexcount(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zlexcount(key, min, max)); - } - - @Override - public Set zrangeByLex(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zrangeByLex(key, min, max)); - } - - @Override - public Set zrangeByLex(String key, String min, String max, int offset, int count) { - return executeInJedis(jedis -> jedis.zrangeByLex(key, min, max, offset, count)); - } - - @Override - public Set zrevrangeByLex(String key, String max, String min) { - return executeInJedis(jedis -> jedis.zrevrangeByLex(key, max, min)); - } - - @Override - public Set zrevrangeByLex(String key, String max, String min, int offset, int count) { - return executeInJedis(jedis -> jedis.zrevrangeByLex(key, max, min, offset, count)); - } - - @Override - public Long zremrangeByLex(String key, String min, String max) { - return executeInJedis(jedis -> jedis.zremrangeByLex(key, min, max)); - } - - @Override - public Long linsert(String key, ListPosition where, String pivot, String value) { - return executeInJedis(jedis -> jedis.linsert(key, where, pivot, value)); - } - - @Override - public Long lpushx(String key, String... string) { - return executeInJedis(jedis -> jedis.lpushx(key, string)); - } - - @Override - public Long rpushx(String key, String... string) { - return executeInJedis(jedis -> jedis.rpushx(key, string)); - } - - @Override - public List blpop(int timeout, String key) { - return executeInJedis(jedis -> jedis.blpop(timeout, key)); - } - - @Override - public List brpop(int timeout, String key) { - return executeInJedis(jedis -> jedis.brpop(timeout, key)); - } - - @Override - public Long del(String key) { - return executeInJedis(jedis -> jedis.del(key)); - } - - @Override - public Long unlink(String key) { - return executeInJedis(jedis -> jedis.unlink(key)); - } - - @Override - public String echo(String string) { - return executeInJedis(jedis -> jedis.echo(string)); - } - - @Override - public Long move(String key, int dbIndex) { - return executeInJedis(jedis -> jedis.move(key, dbIndex)); - } - - @Override - public Long bitcount(String key) { - return executeInJedis(jedis -> jedis.bitcount(key)); - } - - @Override - public Long bitcount(String key, long start, long end) { - return executeInJedis(jedis -> jedis.bitcount(key, start, end)); - } - - @Override - public Long bitpos(String key, boolean value) { - return executeInJedis(jedis -> jedis.bitpos(key, value)); - } - - @Override - public Long bitpos(String key, boolean value, BitPosParams params) { - return executeInJedis(jedis -> jedis.bitpos(key, value, params)); - } - - @Override - public ScanResult> hscan(String key, String cursor) { - return executeInJedis(jedis -> jedis.hscan(key, cursor)); - } - - @Override - public ScanResult> hscan( - String key, String cursor, ScanParams params) { - return executeInJedis(jedis -> jedis.hscan(key, cursor, params)); - } - - @Override - public ScanResult sscan(String key, String cursor) { - return executeInJedis(jedis -> jedis.sscan(key, cursor)); - } - - @Override - public ScanResult zscan(String key, String cursor) { - return executeInJedis(jedis -> jedis.zscan(key, cursor)); - } - - @Override - public ScanResult zscan(String key, String cursor, ScanParams params) { - return executeInJedis(jedis -> jedis.zscan(key, cursor, params)); - } - - @Override - public ScanResult sscan(String key, String cursor, ScanParams params) { - return executeInJedis(jedis -> jedis.sscan(key, cursor, params)); - } - - @Override - public Long pfadd(String key, String... elements) { - return executeInJedis(jedis -> jedis.pfadd(key, elements)); - } - - @Override - public long pfcount(String key) { - return executeInJedis(jedis -> jedis.pfcount(key)); - } - - @Override - public Long geoadd(String key, double longitude, double latitude, String member) { - return executeInJedis(jedis -> jedis.geoadd(key, longitude, latitude, member)); - } - - @Override - public Long geoadd(String key, Map memberCoordinateMap) { - return executeInJedis(jedis -> jedis.geoadd(key, memberCoordinateMap)); - } - - @Override - public Double geodist(String key, String member1, String member2) { - return executeInJedis(jedis -> jedis.geodist(key, member1, member2)); - } - - @Override - public Double geodist(String key, String member1, String member2, GeoUnit unit) { - return executeInJedis(jedis -> jedis.geodist(key, member1, member2, unit)); - } - - @Override - public List geohash(String key, String... members) { - return executeInJedis(jedis -> jedis.geohash(key, members)); - } - - @Override - public List geopos(String key, String... members) { - return executeInJedis(jedis -> jedis.geopos(key, members)); - } - - @Override - public List georadius( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - return executeInJedis(jedis -> jedis.georadius(key, longitude, latitude, radius, unit)); - } - - @Override - public List georadiusReadonly( - String key, double longitude, double latitude, double radius, GeoUnit unit) { - return executeInJedis( - jedis -> jedis.georadiusReadonly(key, longitude, latitude, radius, unit)); - } - - @Override - public List georadius( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - return executeInJedis( - jedis -> jedis.georadius(key, longitude, latitude, radius, unit, param)); - } - - @Override - public List georadiusReadonly( - String key, - double longitude, - double latitude, - double radius, - GeoUnit unit, - GeoRadiusParam param) { - return executeInJedis( - jedis -> jedis.georadiusReadonly(key, longitude, latitude, radius, unit, param)); - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit) { - return executeInJedis(jedis -> jedis.georadiusByMember(key, member, radius, unit)); - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit) { - return executeInJedis(jedis -> jedis.georadiusByMemberReadonly(key, member, radius, unit)); - } - - @Override - public List georadiusByMember( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - return executeInJedis(jedis -> jedis.georadiusByMember(key, member, radius, unit, param)); - } - - @Override - public List georadiusByMemberReadonly( - String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { - return executeInJedis( - jedis -> jedis.georadiusByMemberReadonly(key, member, radius, unit, param)); - } - - @Override - public List bitfield(String key, String... arguments) { - return executeInJedis(jedis -> jedis.bitfield(key, arguments)); - } - - @Override - public List bitfieldReadonly(String key, String... arguments) { - return executeInJedis(jedis -> jedis.bitfieldReadonly(key, arguments)); - } - - @Override - public Long hstrlen(String key, String field) { - return executeInJedis(jedis -> jedis.hstrlen(key, field)); - } - - @Override - public StreamEntryID xadd(String key, StreamEntryID id, Map hash) { - return executeInJedis(jedis -> jedis.xadd(key, id, hash)); - } - - @Override - public StreamEntryID xadd( - String key, - StreamEntryID id, - Map hash, - long maxLen, - boolean approximateLength) { - return executeInJedis(jedis -> jedis.xadd(key, id, hash, maxLen, approximateLength)); - } - - @Override - public Long xlen(String key) { - return executeInJedis(jedis -> jedis.xlen(key)); - } - - @Override - public List xrange(String key, StreamEntryID start, StreamEntryID end, int count) { - return executeInJedis(jedis -> jedis.xrange(key, start, end, count)); - } - - @Override - public List xrevrange( - String key, StreamEntryID end, StreamEntryID start, int count) { - return executeInJedis(jedis -> jedis.xrevrange(key, end, start, count)); - } - - @Override - public long xack(String key, String group, StreamEntryID... ids) { - return executeInJedis(jedis -> jedis.xack(key, group, ids)); - } - - @Override - public String xgroupCreate(String key, String groupname, StreamEntryID id, boolean makeStream) { - return executeInJedis(jedis -> jedis.xgroupCreate(key, groupname, id, makeStream)); - } - - @Override - public String xgroupSetID(String key, String groupname, StreamEntryID id) { - return executeInJedis(jedis -> jedis.xgroupSetID(key, groupname, id)); - } - - @Override - public long xgroupDestroy(String key, String groupname) { - return executeInJedis(jedis -> jedis.xgroupDestroy(key, groupname)); - } - - @Override - public Long xgroupDelConsumer(String key, String groupname, String consumername) { - return executeInJedis(jedis -> jedis.hsetnx(key, groupname, consumername)); - } - - @Override - public List xpending( - String key, - String groupname, - StreamEntryID start, - StreamEntryID end, - int count, - String consumername) { - return executeInJedis( - jedis -> jedis.xpending(key, groupname, start, end, count, consumername)); - } - - @Override - public long xdel(String key, StreamEntryID... ids) { - return executeInJedis(jedis -> jedis.xdel(key, ids)); - } - - @Override - public long xtrim(String key, long maxLen, boolean approximate) { - return executeInJedis(jedis -> jedis.xtrim(key, maxLen, approximate)); - } - - @Override - public List xclaim( - String key, - String group, - String consumername, - long minIdleTime, - long newIdleTime, - int retries, - boolean force, - StreamEntryID... ids) { - return executeInJedis( - jedis -> - jedis.xclaim( - key, - group, - consumername, - minIdleTime, - newIdleTime, - retries, - force, - ids)); - } - - @Override - public StreamInfo xinfoStream(String key) { - return executeInJedis(jedis -> jedis.xinfoStream(key)); - } - - @Override - public List xinfoGroup(String key) { - return executeInJedis(jedis -> jedis.xinfoGroup(key)); - } - - @Override - public List xinfoConsumers(String key, String group) { - return executeInJedis(jedis -> jedis.xinfoConsumers(key, group)); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/config/utils/RedisQueuesShardingStrategyProviderTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/config/utils/RedisQueuesShardingStrategyProviderTest.java deleted file mode 100644 index 2ec4b3470..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/config/utils/RedisQueuesShardingStrategyProviderTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.config.utils; - -import java.util.Collections; - -import org.junit.Test; - -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider; -import com.netflix.dyno.queues.Message; -import com.netflix.dyno.queues.ShardSupplier; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class RedisQueuesShardingStrategyProviderTest { - - @Test - public void testStrategy() { - ShardSupplier shardSupplier = mock(ShardSupplier.class); - doReturn("current").when(shardSupplier).getCurrentShard(); - RedisQueuesShardingStrategyProvider.LocalOnlyStrategy strat = - new RedisQueuesShardingStrategyProvider.LocalOnlyStrategy(shardSupplier); - - assertEquals("current", strat.getNextShard(Collections.emptyList(), new Message("a", "b"))); - } - - @Test - public void testProvider() { - ShardSupplier shardSupplier = mock(ShardSupplier.class); - RedisProperties properties = mock(RedisProperties.class); - when(properties.getQueueShardingStrategy()).thenReturn("localOnly"); - RedisQueuesShardingStrategyProvider stratProvider = - new RedisQueuesShardingStrategyProvider(shardSupplier, properties); - assertTrue( - stratProvider.get() - instanceof RedisQueuesShardingStrategyProvider.LocalOnlyStrategy); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/BaseDynoDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/BaseDynoDAOTest.java deleted file mode 100644 index a3ce44e52..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/BaseDynoDAOTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class BaseDynoDAOTest { - - @Mock private JedisProxy jedisProxy; - - @Mock private ObjectMapper objectMapper; - - private RedisProperties properties; - private ConductorProperties conductorProperties; - - private BaseDynoDAO baseDynoDAO; - - @Before - public void setUp() { - properties = mock(RedisProperties.class); - conductorProperties = mock(ConductorProperties.class); - this.baseDynoDAO = - new BaseDynoDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Test - public void testNsKey() { - assertEquals("", baseDynoDAO.nsKey()); - - String[] keys = {"key1", "key2"}; - assertEquals("key1.key2", baseDynoDAO.nsKey(keys)); - - when(properties.getWorkflowNamespacePrefix()).thenReturn("test"); - assertEquals("test", baseDynoDAO.nsKey()); - - assertEquals("test.key1.key2", baseDynoDAO.nsKey(keys)); - - when(conductorProperties.getStack()).thenReturn("stack"); - assertEquals("test.stack.key1.key2", baseDynoDAO.nsKey(keys)); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/DynoQueueDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/DynoQueueDAOTest.java deleted file mode 100644 index 6efa7ccd4..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/DynoQueueDAOTest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.queues.ShardSupplier; -import com.netflix.dyno.queues.redis.RedisQueues; -import com.netflix.dyno.queues.redis.sharding.ShardingStrategy; - -import redis.clients.jedis.commands.JedisCommands; - -import static com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider.LOCAL_ONLY_STRATEGY; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class DynoQueueDAOTest { - - private QueueDAO queueDAO; - - @Before - public void init() { - RedisProperties properties = mock(RedisProperties.class); - when(properties.getQueueShardingStrategy()).thenReturn(LOCAL_ONLY_STRATEGY); - JedisCommands jedisMock = new JedisMock(); - ShardSupplier shardSupplier = - new ShardSupplier() { - - @Override - public Set getQueueShards() { - return new HashSet<>(Collections.singletonList("a")); - } - - @Override - public String getCurrentShard() { - return "a"; - } - - @Override - public String getShardForHost(Host host) { - return "a"; - } - }; - ShardingStrategy shardingStrategy = - new RedisQueuesShardingStrategyProvider(shardSupplier, properties).get(); - RedisQueues redisQueues = - new RedisQueues( - jedisMock, jedisMock, "", shardSupplier, 60_000, 60_000, shardingStrategy); - queueDAO = new DynoQueueDAO(redisQueues); - } - - @Rule public ExpectedException expected = ExpectedException.none(); - - @Test - public void test() { - String queueName = "TestQueue"; - long offsetTimeInSecond = 0; - - for (int i = 0; i < 10; i++) { - String messageId = "msg" + i; - queueDAO.push(queueName, messageId, offsetTimeInSecond); - } - int size = queueDAO.getSize(queueName); - assertEquals(10, size); - Map details = queueDAO.queuesDetail(); - assertEquals(1, details.size()); - assertEquals(10L, details.get(queueName).longValue()); - - for (int i = 0; i < 10; i++) { - String messageId = "msg" + i; - queueDAO.pushIfNotExists(queueName, messageId, offsetTimeInSecond); - } - - List popped = queueDAO.pop(queueName, 10, 100); - assertNotNull(popped); - assertEquals(10, popped.size()); - - Map>> verbose = queueDAO.queuesDetailVerbose(); - assertEquals(1, verbose.size()); - long shardSize = verbose.get(queueName).get("a").get("size"); - long unackedSize = verbose.get(queueName).get("a").get("uacked"); - assertEquals(0, shardSize); - assertEquals(10, unackedSize); - - popped.forEach(messageId -> queueDAO.ack(queueName, messageId)); - - verbose = queueDAO.queuesDetailVerbose(); - assertEquals(1, verbose.size()); - shardSize = verbose.get(queueName).get("a").get("size"); - unackedSize = verbose.get(queueName).get("a").get("uacked"); - assertEquals(0, shardSize); - assertEquals(0, unackedSize); - - popped = queueDAO.pop(queueName, 10, 100); - assertNotNull(popped); - assertEquals(0, popped.size()); - - for (int i = 0; i < 10; i++) { - String messageId = "msg" + i; - queueDAO.pushIfNotExists(queueName, messageId, offsetTimeInSecond); - } - size = queueDAO.getSize(queueName); - assertEquals(10, size); - - for (int i = 0; i < 10; i++) { - String messageId = "msg" + i; - queueDAO.remove(queueName, messageId); - } - - size = queueDAO.getSize(queueName); - assertEquals(0, size); - - for (int i = 0; i < 10; i++) { - String messageId = "msg" + i; - queueDAO.pushIfNotExists(queueName, messageId, offsetTimeInSecond); - } - queueDAO.flush(queueName); - size = queueDAO.getSize(queueName); - assertEquals(0, size); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAOTest.java deleted file mode 100644 index 6b53f00e6..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisEventHandlerDAOTest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.List; -import java.util.UUID; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.events.EventHandler.Action; -import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; -import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import redis.clients.jedis.commands.JedisCommands; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class RedisEventHandlerDAOTest { - - private RedisEventHandlerDAO redisEventHandlerDAO; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void init() { - ConductorProperties conductorProperties = mock(ConductorProperties.class); - RedisProperties properties = mock(RedisProperties.class); - JedisCommands jedisMock = new JedisMock(); - JedisProxy jedisProxy = new JedisProxy(jedisMock); - - redisEventHandlerDAO = - new RedisEventHandlerDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Test - public void testEventHandlers() { - String event1 = "SQS::arn:account090:sqstest1"; - String event2 = "SQS::arn:account090:sqstest2"; - - EventHandler eventHandler = new EventHandler(); - eventHandler.setName(UUID.randomUUID().toString()); - eventHandler.setActive(false); - Action action = new Action(); - action.setAction(Type.start_workflow); - action.setStart_workflow(new StartWorkflow()); - action.getStart_workflow().setName("test_workflow"); - eventHandler.getActions().add(action); - eventHandler.setEvent(event1); - - redisEventHandlerDAO.addEventHandler(eventHandler); - List allEventHandlers = redisEventHandlerDAO.getAllEventHandlers(); - assertNotNull(allEventHandlers); - assertEquals(1, allEventHandlers.size()); - assertEquals(eventHandler.getName(), allEventHandlers.get(0).getName()); - assertEquals(eventHandler.getEvent(), allEventHandlers.get(0).getEvent()); - - List byEvents = redisEventHandlerDAO.getEventHandlersForEvent(event1, true); - assertNotNull(byEvents); - assertEquals(0, byEvents.size()); // event is marked as in-active - - eventHandler.setActive(true); - eventHandler.setEvent(event2); - redisEventHandlerDAO.updateEventHandler(eventHandler); - - allEventHandlers = redisEventHandlerDAO.getAllEventHandlers(); - assertNotNull(allEventHandlers); - assertEquals(1, allEventHandlers.size()); - - byEvents = redisEventHandlerDAO.getEventHandlersForEvent(event1, true); - assertNotNull(byEvents); - assertEquals(0, byEvents.size()); - - byEvents = redisEventHandlerDAO.getEventHandlersForEvent(event2, true); - assertNotNull(byEvents); - assertEquals(1, byEvents.size()); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisExecutionDAOTest.java deleted file mode 100644 index 7bc844f39..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisExecutionDAOTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.time.Duration; -import java.util.Collections; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.ExecutionDAOTest; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import redis.clients.jedis.commands.JedisCommands; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class RedisExecutionDAOTest extends ExecutionDAOTest { - - private RedisExecutionDAO executionDAO; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void init() { - ConductorProperties conductorProperties = mock(ConductorProperties.class); - RedisProperties properties = mock(RedisProperties.class); - when(properties.getEventExecutionPersistenceTTL()).thenReturn(Duration.ofSeconds(5)); - JedisCommands jedisMock = new JedisMock(); - JedisProxy jedisProxy = new JedisProxy(jedisMock); - - executionDAO = - new RedisExecutionDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Test - public void testCorrelateTaskToWorkflowInDS() { - String workflowId = "workflowId"; - String taskId = "taskId1"; - String taskDefName = "task1"; - - TaskDef def = new TaskDef(); - def.setName("task1"); - def.setConcurrentExecLimit(1); - - TaskModel task = new TaskModel(); - task.setTaskId(taskId); - task.setWorkflowInstanceId(workflowId); - task.setReferenceTaskName("ref_name"); - task.setTaskDefName(taskDefName); - task.setTaskType(taskDefName); - task.setStatus(TaskModel.Status.IN_PROGRESS); - List tasks = executionDAO.createTasks(Collections.singletonList(task)); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - - executionDAO.correlateTaskToWorkflowInDS(taskId, workflowId); - tasks = executionDAO.getTasksForWorkflow(workflowId); - assertNotNull(tasks); - assertEquals(workflowId, tasks.get(0).getWorkflowInstanceId()); - assertEquals(taskId, tasks.get(0).getTaskId()); - } - - @Override - protected ExecutionDAO getExecutionDAO() { - return executionDAO; - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisMetadataDAOTest.java deleted file mode 100644 index 9a303ce52..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisMetadataDAOTest.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.time.Duration; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.RetryLogic; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import redis.clients.jedis.commands.JedisCommands; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class RedisMetadataDAOTest { - - private RedisMetadataDAO redisMetadataDAO; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void init() { - ConductorProperties conductorProperties = mock(ConductorProperties.class); - RedisProperties properties = mock(RedisProperties.class); - when(properties.getTaskDefCacheRefreshInterval()).thenReturn(Duration.ofSeconds(60)); - JedisCommands jedisMock = new JedisMock(); - JedisProxy jedisProxy = new JedisProxy(jedisMock); - - redisMetadataDAO = - new RedisMetadataDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Test(expected = ApplicationException.class) - public void testDup() { - WorkflowDef def = new WorkflowDef(); - def.setName("testDup"); - def.setVersion(1); - - redisMetadataDAO.createWorkflowDef(def); - redisMetadataDAO.createWorkflowDef(def); - } - - @Test - public void testWorkflowDefOperations() { - - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - def.setVersion(1); - def.setDescription("description"); - def.setCreatedBy("unit_test"); - def.setCreateTime(1L); - def.setOwnerApp("ownerApp"); - def.setUpdatedBy("unit_test2"); - def.setUpdateTime(2L); - - redisMetadataDAO.createWorkflowDef(def); - - List all = redisMetadataDAO.getAllWorkflowDefs(); - assertNotNull(all); - assertEquals(1, all.size()); - assertEquals("test", all.get(0).getName()); - assertEquals(1, all.get(0).getVersion()); - - WorkflowDef found = redisMetadataDAO.getWorkflowDef("test", 1).get(); - assertEquals(def, found); - - def.setVersion(2); - redisMetadataDAO.createWorkflowDef(def); - - all = redisMetadataDAO.getAllWorkflowDefs(); - assertNotNull(all); - assertEquals(2, all.size()); - assertEquals("test", all.get(0).getName()); - assertEquals(1, all.get(0).getVersion()); - - found = redisMetadataDAO.getLatestWorkflowDef(def.getName()).get(); - assertEquals(def.getName(), found.getName()); - assertEquals(def.getVersion(), found.getVersion()); - assertEquals(2, found.getVersion()); - - all = redisMetadataDAO.getAllVersions(def.getName()); - assertNotNull(all); - assertEquals(2, all.size()); - assertEquals("test", all.get(0).getName()); - assertEquals("test", all.get(1).getName()); - assertEquals(1, all.get(0).getVersion()); - assertEquals(2, all.get(1).getVersion()); - - def.setDescription("updated"); - redisMetadataDAO.updateWorkflowDef(def); - found = redisMetadataDAO.getWorkflowDef(def.getName(), def.getVersion()).get(); - assertEquals(def.getDescription(), found.getDescription()); - - List allnames = redisMetadataDAO.findAll(); - assertNotNull(allnames); - assertEquals(1, allnames.size()); - assertEquals(def.getName(), allnames.get(0)); - - redisMetadataDAO.removeWorkflowDef("test", 1); - Optional deleted = redisMetadataDAO.getWorkflowDef("test", 1); - assertFalse(deleted.isPresent()); - redisMetadataDAO.removeWorkflowDef("test", 2); - Optional latestDef = redisMetadataDAO.getLatestWorkflowDef("test"); - assertFalse(latestDef.isPresent()); - - WorkflowDef[] workflowDefsArray = new WorkflowDef[3]; - for (int i = 1; i <= 3; i++) { - workflowDefsArray[i - 1] = new WorkflowDef(); - workflowDefsArray[i - 1].setName("test"); - workflowDefsArray[i - 1].setVersion(i); - workflowDefsArray[i - 1].setDescription("description"); - workflowDefsArray[i - 1].setCreatedBy("unit_test"); - workflowDefsArray[i - 1].setCreateTime(1L); - workflowDefsArray[i - 1].setOwnerApp("ownerApp"); - workflowDefsArray[i - 1].setUpdatedBy("unit_test2"); - workflowDefsArray[i - 1].setUpdateTime(2L); - redisMetadataDAO.createWorkflowDef(workflowDefsArray[i - 1]); - } - redisMetadataDAO.removeWorkflowDef("test", 1); - redisMetadataDAO.removeWorkflowDef("test", 2); - WorkflowDef workflow = redisMetadataDAO.getLatestWorkflowDef("test").get(); - assertEquals(workflow.getVersion(), 3); - } - - @Test(expected = ApplicationException.class) - public void removeInvalidWorkflowDef() { - redisMetadataDAO.removeWorkflowDef("hello", 1); - } - - @Test - public void testTaskDefOperations() { - - TaskDef def = new TaskDef("taskA"); - def.setDescription("description"); - def.setCreatedBy("unit_test"); - def.setCreateTime(1L); - def.setInputKeys(Arrays.asList("a", "b", "c")); - def.setOutputKeys(Arrays.asList("01", "o2")); - def.setOwnerApp("ownerApp"); - def.setRetryCount(3); - def.setRetryDelaySeconds(100); - def.setRetryLogic(RetryLogic.FIXED); - def.setTimeoutPolicy(TimeoutPolicy.ALERT_ONLY); - def.setUpdatedBy("unit_test2"); - def.setUpdateTime(2L); - def.setRateLimitPerFrequency(50); - def.setRateLimitFrequencyInSeconds(1); - - redisMetadataDAO.createTaskDef(def); - - TaskDef found = redisMetadataDAO.getTaskDef(def.getName()); - assertEquals(def, found); - - def.setDescription("updated description"); - redisMetadataDAO.updateTaskDef(def); - found = redisMetadataDAO.getTaskDef(def.getName()); - assertEquals(def, found); - assertEquals("updated description", found.getDescription()); - - for (int i = 0; i < 9; i++) { - TaskDef tdf = new TaskDef("taskA" + i); - redisMetadataDAO.createTaskDef(tdf); - } - - List all = redisMetadataDAO.getAllTaskDefs(); - assertNotNull(all); - assertEquals(10, all.size()); - Set allnames = all.stream().map(TaskDef::getName).collect(Collectors.toSet()); - assertEquals(10, allnames.size()); - List sorted = allnames.stream().sorted().collect(Collectors.toList()); - assertEquals(def.getName(), sorted.get(0)); - - for (int i = 0; i < 9; i++) { - assertEquals(def.getName() + i, sorted.get(i + 1)); - } - - for (int i = 0; i < 9; i++) { - redisMetadataDAO.removeTaskDef(def.getName() + i); - } - all = redisMetadataDAO.getAllTaskDefs(); - assertNotNull(all); - assertEquals(1, all.size()); - assertEquals(def.getName(), all.get(0).getName()); - } - - @Test(expected = ApplicationException.class) - public void testRemoveTaskDef() { - redisMetadataDAO.removeTaskDef("test" + UUID.randomUUID().toString()); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisPollDataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisPollDataDAOTest.java deleted file mode 100644 index 3553856ce..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisPollDataDAOTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import org.junit.Before; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.dao.PollDataDAO; -import com.netflix.conductor.dao.PollDataDAOTest; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import redis.clients.jedis.commands.JedisCommands; - -import static org.mockito.Mockito.mock; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class RedisPollDataDAOTest extends PollDataDAOTest { - - private PollDataDAO redisPollDataDAO; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void init() { - ConductorProperties conductorProperties = mock(ConductorProperties.class); - RedisProperties properties = mock(RedisProperties.class); - JedisCommands jedisMock = new JedisMock(); - JedisProxy jedisProxy = new JedisProxy(jedisMock); - - redisPollDataDAO = - new RedisPollDataDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Override - protected PollDataDAO getPollDataDAO() { - return redisPollDataDAO; - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisRateLimitDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisRateLimitDAOTest.java deleted file mode 100644 index 30877fc18..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/dao/RedisRateLimitDAOTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.dao; - -import java.util.UUID; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.config.ConductorProperties; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.jedis.JedisMock; -import com.netflix.conductor.redis.jedis.JedisProxy; - -import com.fasterxml.jackson.databind.ObjectMapper; -import redis.clients.jedis.commands.JedisCommands; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; - -@ContextConfiguration(classes = {TestObjectMapperConfiguration.class}) -@RunWith(SpringRunner.class) -public class RedisRateLimitDAOTest { - - private RedisRateLimitingDAO rateLimitingDao; - - @Autowired private ObjectMapper objectMapper; - - @Before - public void init() { - ConductorProperties conductorProperties = mock(ConductorProperties.class); - RedisProperties properties = mock(RedisProperties.class); - JedisCommands jedisMock = new JedisMock(); - JedisProxy jedisProxy = new JedisProxy(jedisMock); - - rateLimitingDao = - new RedisRateLimitingDAO(jedisProxy, objectMapper, conductorProperties, properties); - } - - @Test - public void testExceedsRateLimitWhenNoRateLimitSet() { - TaskDef taskDef = new TaskDef("TestTaskDefinition"); - TaskModel task = new TaskModel(); - task.setTaskId(UUID.randomUUID().toString()); - task.setTaskDefName(taskDef.getName()); - assertFalse(rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef)); - } - - @Test - public void testExceedsRateLimitWithinLimit() { - TaskDef taskDef = new TaskDef("TestTaskDefinition"); - taskDef.setRateLimitFrequencyInSeconds(60); - taskDef.setRateLimitPerFrequency(20); - TaskModel task = new TaskModel(); - task.setTaskId(UUID.randomUUID().toString()); - task.setTaskDefName(taskDef.getName()); - assertFalse(rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef)); - } - - @Test - public void testExceedsRateLimitOutOfLimit() { - TaskDef taskDef = new TaskDef("TestTaskDefinition"); - taskDef.setRateLimitFrequencyInSeconds(60); - taskDef.setRateLimitPerFrequency(1); - TaskModel task = new TaskModel(); - task.setTaskId(UUID.randomUUID().toString()); - task.setTaskDefName(taskDef.getName()); - assertFalse(rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef)); - assertTrue(rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef)); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/ConfigurationHostSupplierTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/ConfigurationHostSupplierTest.java deleted file mode 100644 index 7760c68a3..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/ConfigurationHostSupplierTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.redis.config.RedisProperties; -import com.netflix.conductor.redis.dynoqueue.ConfigurationHostSupplier; -import com.netflix.dyno.connectionpool.Host; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class ConfigurationHostSupplierTest { - - private RedisProperties properties; - - private ConfigurationHostSupplier configurationHostSupplier; - - @Before - public void setUp() { - properties = mock(RedisProperties.class); - configurationHostSupplier = new ConfigurationHostSupplier(properties); - } - - @Test - public void getHost() { - when(properties.getHosts()).thenReturn("dyno1:8102:us-east-1c"); - - List hosts = configurationHostSupplier.getHosts(); - assertEquals(1, hosts.size()); - - Host firstHost = hosts.get(0); - assertEquals("dyno1", firstHost.getHostName()); - assertEquals(8102, firstHost.getPort()); - assertEquals("us-east-1c", firstHost.getRack()); - assertTrue(firstHost.isUp()); - } - - @Test - public void getMultipleHosts() { - when(properties.getHosts()).thenReturn("dyno1:8102:us-east-1c;dyno2:8103:us-east-1c"); - - List hosts = configurationHostSupplier.getHosts(); - assertEquals(2, hosts.size()); - - Host firstHost = hosts.get(0); - assertEquals("dyno1", firstHost.getHostName()); - assertEquals(8102, firstHost.getPort()); - assertEquals("us-east-1c", firstHost.getRack()); - assertTrue(firstHost.isUp()); - - Host secondHost = hosts.get(1); - assertEquals("dyno2", secondHost.getHostName()); - assertEquals(8103, secondHost.getPort()); - assertEquals("us-east-1c", secondHost.getRack()); - assertTrue(secondHost.isUp()); - } - - @Test - public void getAuthenticatedHost() { - when(properties.getHosts()).thenReturn("redis1:6432:us-east-1c:password"); - - List hosts = configurationHostSupplier.getHosts(); - assertEquals(1, hosts.size()); - - Host firstHost = hosts.get(0); - assertEquals("redis1", firstHost.getHostName()); - assertEquals(6432, firstHost.getPort()); - assertEquals("us-east-1c", firstHost.getRack()); - assertEquals("password", firstHost.getPassword()); - assertTrue(firstHost.isUp()); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisClusterTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisClusterTest.java deleted file mode 100644 index e69b22a46..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisClusterTest.java +++ /dev/null @@ -1,614 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.AbstractMap; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; - -import org.junit.Test; -import org.mockito.Mockito; - -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class JedisClusterTest { - - private final redis.clients.jedis.JedisCluster mockCluster = - mock(redis.clients.jedis.JedisCluster.class); - private final JedisCluster jedisCluster = new JedisCluster(mockCluster); - - @Test - public void testSet() { - jedisCluster.set("key", "value"); - jedisCluster.set("key", "value", SetParams.setParams()); - } - - @Test - public void testGet() { - jedisCluster.get("key"); - } - - @Test - public void testExists() { - jedisCluster.exists("key"); - } - - @Test - public void testPersist() { - jedisCluster.persist("key"); - } - - @Test - public void testType() { - jedisCluster.type("key"); - } - - @Test - public void testExpire() { - jedisCluster.expire("key", 1337); - } - - @Test - public void testPexpire() { - jedisCluster.pexpire("key", 1337); - } - - @Test - public void testExpireAt() { - jedisCluster.expireAt("key", 1337); - } - - @Test - public void testPexpireAt() { - jedisCluster.pexpireAt("key", 1337); - } - - @Test - public void testTtl() { - jedisCluster.ttl("key"); - } - - @Test - public void testPttl() { - jedisCluster.pttl("key"); - } - - @Test - public void testSetbit() { - jedisCluster.setbit("key", 1337, "value"); - jedisCluster.setbit("key", 1337, true); - } - - @Test - public void testGetbit() { - jedisCluster.getbit("key", 1337); - } - - @Test - public void testSetrange() { - jedisCluster.setrange("key", 1337, "value"); - } - - @Test - public void testGetrange() { - jedisCluster.getrange("key", 1337, 1338); - } - - @Test - public void testGetSet() { - jedisCluster.getSet("key", "value"); - } - - @Test - public void testSetnx() { - jedisCluster.setnx("test", "value"); - } - - @Test - public void testSetex() { - jedisCluster.setex("key", 1337, "value"); - } - - @Test - public void testPsetex() { - jedisCluster.psetex("key", 1337, "value"); - } - - @Test - public void testDecrBy() { - jedisCluster.decrBy("key", 1337); - } - - @Test - public void testDecr() { - jedisCluster.decr("key"); - } - - @Test - public void testIncrBy() { - jedisCluster.incrBy("key", 1337); - } - - @Test - public void testIncrByFloat() { - jedisCluster.incrByFloat("key", 1337); - } - - @Test - public void testIncr() { - jedisCluster.incr("key"); - } - - @Test - public void testAppend() { - jedisCluster.append("key", "value"); - } - - @Test - public void testSubstr() { - jedisCluster.substr("key", 1337, 1338); - } - - @Test - public void testHset() { - jedisCluster.hset("key", "field", "value"); - } - - @Test - public void testHget() { - jedisCluster.hget("key", "field"); - } - - @Test - public void testHsetnx() { - jedisCluster.hsetnx("key", "field", "value"); - } - - @Test - public void testHmset() { - jedisCluster.hmset("key", new HashMap<>()); - } - - @Test - public void testHmget() { - jedisCluster.hmget("key", "fields"); - } - - @Test - public void testHincrBy() { - jedisCluster.hincrBy("key", "field", 1337); - } - - @Test - public void testHincrByFloat() { - jedisCluster.hincrByFloat("key", "field", 1337); - } - - @Test - public void testHexists() { - jedisCluster.hexists("key", "field"); - } - - @Test - public void testHdel() { - jedisCluster.hdel("key", "field"); - } - - @Test - public void testHlen() { - jedisCluster.hlen("key"); - } - - @Test - public void testHkeys() { - jedisCluster.hkeys("key"); - } - - @Test - public void testHvals() { - jedisCluster.hvals("key"); - } - - @Test - public void testGgetAll() { - jedisCluster.hgetAll("key"); - } - - @Test - public void testRpush() { - jedisCluster.rpush("key", "string"); - } - - @Test - public void testLpush() { - jedisCluster.lpush("key", "string"); - } - - @Test - public void testLlen() { - jedisCluster.llen("key"); - } - - @Test - public void testLrange() { - jedisCluster.lrange("key", 1337, 1338); - } - - @Test - public void testLtrim() { - jedisCluster.ltrim("key", 1337, 1338); - } - - @Test - public void testLindex() { - jedisCluster.lindex("key", 1337); - } - - @Test - public void testLset() { - jedisCluster.lset("key", 1337, "value"); - } - - @Test - public void testLrem() { - jedisCluster.lrem("key", 1337, "value"); - } - - @Test - public void testLpop() { - jedisCluster.lpop("key"); - } - - @Test - public void testRpop() { - jedisCluster.rpop("key"); - } - - @Test - public void testSadd() { - jedisCluster.sadd("key", "member"); - } - - @Test - public void testSmembers() { - jedisCluster.smembers("key"); - } - - @Test - public void testSrem() { - jedisCluster.srem("key", "member"); - } - - @Test - public void testSpop() { - jedisCluster.spop("key"); - jedisCluster.spop("key", 1337); - } - - @Test - public void testScard() { - jedisCluster.scard("key"); - } - - @Test - public void testSismember() { - jedisCluster.sismember("key", "member"); - } - - @Test - public void testSrandmember() { - jedisCluster.srandmember("key"); - jedisCluster.srandmember("key", 1337); - } - - @Test - public void testStrlen() { - jedisCluster.strlen("key"); - } - - @Test - public void testZadd() { - jedisCluster.zadd("key", new HashMap<>()); - jedisCluster.zadd("key", new HashMap<>(), ZAddParams.zAddParams()); - jedisCluster.zadd("key", 1337, "members"); - jedisCluster.zadd("key", 1337, "members", ZAddParams.zAddParams()); - } - - @Test - public void testZrange() { - jedisCluster.zrange("key", 1337, 1338); - } - - @Test - public void testZrem() { - jedisCluster.zrem("key", "member"); - } - - @Test - public void testZincrby() { - jedisCluster.zincrby("key", 1337, "member"); - jedisCluster.zincrby("key", 1337, "member", ZIncrByParams.zIncrByParams()); - } - - @Test - public void testZrank() { - jedisCluster.zrank("key", "member"); - } - - @Test - public void testZrevrank() { - jedisCluster.zrevrank("key", "member"); - } - - @Test - public void testZrevrange() { - jedisCluster.zrevrange("key", 1337, 1338); - } - - @Test - public void testZrangeWithScores() { - jedisCluster.zrangeWithScores("key", 1337, 1338); - } - - @Test - public void testZrevrangeWithScores() { - jedisCluster.zrevrangeWithScores("key", 1337, 1338); - } - - @Test - public void testZcard() { - jedisCluster.zcard("key"); - } - - @Test - public void testZscore() { - jedisCluster.zscore("key", "member"); - } - - @Test - public void testSort() { - jedisCluster.sort("key"); - jedisCluster.sort("key", new SortingParams()); - } - - @Test - public void testZcount() { - jedisCluster.zcount("key", "min", "max"); - jedisCluster.zcount("key", 1337, 1338); - } - - @Test - public void testZrangeByScore() { - jedisCluster.zrangeByScore("key", "min", "max"); - jedisCluster.zrangeByScore("key", 1337, 1338); - jedisCluster.zrangeByScore("key", "min", "max", 1337, 1338); - jedisCluster.zrangeByScore("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrevrangeByScore() { - jedisCluster.zrevrangeByScore("key", "max", "min"); - jedisCluster.zrevrangeByScore("key", 1337, 1338); - jedisCluster.zrevrangeByScore("key", "max", "min", 1337, 1338); - jedisCluster.zrevrangeByScore("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrangeByScoreWithScores() { - jedisCluster.zrangeByScoreWithScores("key", "min", "max"); - jedisCluster.zrangeByScoreWithScores("key", "min", "max", 1337, 1338); - jedisCluster.zrangeByScoreWithScores("key", 1337, 1338); - jedisCluster.zrangeByScoreWithScores("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrevrangeByScoreWithScores() { - jedisCluster.zrevrangeByScoreWithScores("key", "max", "min"); - jedisCluster.zrevrangeByScoreWithScores("key", "max", "min", 1337, 1338); - jedisCluster.zrevrangeByScoreWithScores("key", 1337, 1338); - jedisCluster.zrevrangeByScoreWithScores("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZremrangeByRank() { - jedisCluster.zremrangeByRank("key", 1337, 1338); - } - - @Test - public void testZremrangeByScore() { - jedisCluster.zremrangeByScore("key", "start", "end"); - jedisCluster.zremrangeByScore("key", 1337, 1338); - } - - @Test - public void testZlexcount() { - jedisCluster.zlexcount("key", "min", "max"); - } - - @Test - public void testZrangeByLex() { - jedisCluster.zrangeByLex("key", "min", "max"); - jedisCluster.zrangeByLex("key", "min", "max", 1337, 1338); - } - - @Test - public void testZrevrangeByLex() { - jedisCluster.zrevrangeByLex("key", "max", "min"); - jedisCluster.zrevrangeByLex("key", "max", "min", 1337, 1338); - } - - @Test - public void testZremrangeByLex() { - jedisCluster.zremrangeByLex("key", "min", "max"); - } - - @Test - public void testLinsert() { - jedisCluster.linsert("key", ListPosition.AFTER, "pivot", "value"); - } - - @Test - public void testLpushx() { - jedisCluster.lpushx("key", "string"); - } - - @Test - public void testRpushx() { - jedisCluster.rpushx("key", "string"); - } - - @Test - public void testBlpop() { - jedisCluster.blpop(1337, "arg"); - } - - @Test - public void testBrpop() { - jedisCluster.brpop(1337, "arg"); - } - - @Test - public void testDel() { - jedisCluster.del("key"); - } - - @Test - public void testEcho() { - jedisCluster.echo("string"); - } - - @Test(expected = UnsupportedOperationException.class) - public void testMove() { - jedisCluster.move("key", 1337); - } - - @Test - public void testBitcount() { - jedisCluster.bitcount("key"); - jedisCluster.bitcount("key", 1337, 1338); - } - - @Test(expected = UnsupportedOperationException.class) - public void testBitpos() { - jedisCluster.bitpos("key", true); - } - - @Test - public void testHscan() { - jedisCluster.hscan("key", "cursor"); - - ScanResult> scanResult = - new ScanResult<>( - "cursor".getBytes(), - Arrays.asList( - new AbstractMap.SimpleEntry<>("key1".getBytes(), "val1".getBytes()), - new AbstractMap.SimpleEntry<>( - "key2".getBytes(), "val2".getBytes()))); - - when(mockCluster.hscan(Mockito.any(), Mockito.any(), Mockito.any(ScanParams.class))) - .thenReturn(scanResult); - ScanResult> result = - jedisCluster.hscan("key", "cursor", new ScanParams()); - - assertEquals("cursor", result.getCursor()); - assertEquals(2, result.getResult().size()); - assertEquals("val1", result.getResult().get(0).getValue()); - } - - @Test - public void testSscan() { - jedisCluster.sscan("key", "cursor"); - - ScanResult scanResult = - new ScanResult<>( - "sscursor".getBytes(), Arrays.asList("val1".getBytes(), "val2".getBytes())); - - when(mockCluster.sscan(Mockito.any(), Mockito.any(), Mockito.any(ScanParams.class))) - .thenReturn(scanResult); - - ScanResult result = jedisCluster.sscan("key", "cursor", new ScanParams()); - assertEquals("sscursor", result.getCursor()); - assertEquals(2, result.getResult().size()); - assertEquals("val1", result.getResult().get(0)); - } - - @Test - public void testZscan() { - jedisCluster.zscan("key", "cursor"); - jedisCluster.zscan("key", "cursor", new ScanParams()); - } - - @Test - public void testPfadd() { - jedisCluster.pfadd("key", "elements"); - } - - @Test - public void testPfcount() { - jedisCluster.pfcount("key"); - } - - @Test - public void testGeoadd() { - jedisCluster.geoadd("key", new HashMap<>()); - jedisCluster.geoadd("key", 1337, 1338, "member"); - } - - @Test - public void testGeodist() { - jedisCluster.geodist("key", "member1", "member2"); - jedisCluster.geodist("key", "member1", "member2", GeoUnit.KM); - } - - @Test - public void testGeohash() { - jedisCluster.geohash("key", "members"); - } - - @Test - public void testGeopos() { - jedisCluster.geopos("key", "members"); - } - - @Test - public void testGeoradius() { - jedisCluster.georadius("key", 1337, 1338, 32, GeoUnit.KM); - jedisCluster.georadius("key", 1337, 1338, 32, GeoUnit.KM, GeoRadiusParam.geoRadiusParam()); - } - - @Test - public void testGeoradiusByMember() { - jedisCluster.georadiusByMember("key", "member", 1337, GeoUnit.KM); - jedisCluster.georadiusByMember( - "key", "member", 1337, GeoUnit.KM, GeoRadiusParam.geoRadiusParam()); - } - - @Test - public void testBitfield() { - jedisCluster.bitfield("key", "arguments"); - } -} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisSentinelTest.java b/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisSentinelTest.java deleted file mode 100644 index b38e13468..000000000 --- a/redis-persistence/src/test/java/com/netflix/conductor/redis/jedis/JedisSentinelTest.java +++ /dev/null @@ -1,588 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.redis.jedis; - -import java.util.HashMap; - -import org.junit.Before; -import org.junit.Test; - -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisSentinelPool; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class JedisSentinelTest { - - private final Jedis jedis = mock(Jedis.class); - private final JedisSentinelPool jedisPool = mock(JedisSentinelPool.class); - private final JedisSentinel jedisSentinel = new JedisSentinel(jedisPool); - - @Before - public void init() { - when(this.jedisPool.getResource()).thenReturn(this.jedis); - } - - @Test - public void testSet() { - jedisSentinel.set("key", "value"); - jedisSentinel.set("key", "value", SetParams.setParams()); - } - - @Test - public void testGet() { - jedisSentinel.get("key"); - } - - @Test - public void testExists() { - jedisSentinel.exists("key"); - } - - @Test - public void testPersist() { - jedisSentinel.persist("key"); - } - - @Test - public void testType() { - jedisSentinel.type("key"); - } - - @Test - public void testExpire() { - jedisSentinel.expire("key", 1337); - } - - @Test - public void testPexpire() { - jedisSentinel.pexpire("key", 1337); - } - - @Test - public void testExpireAt() { - jedisSentinel.expireAt("key", 1337); - } - - @Test - public void testPexpireAt() { - jedisSentinel.pexpireAt("key", 1337); - } - - @Test - public void testTtl() { - jedisSentinel.ttl("key"); - } - - @Test - public void testPttl() { - jedisSentinel.pttl("key"); - } - - @Test - public void testSetbit() { - jedisSentinel.setbit("key", 1337, "value"); - jedisSentinel.setbit("key", 1337, true); - } - - @Test - public void testGetbit() { - jedisSentinel.getbit("key", 1337); - } - - @Test - public void testSetrange() { - jedisSentinel.setrange("key", 1337, "value"); - } - - @Test - public void testGetrange() { - jedisSentinel.getrange("key", 1337, 1338); - } - - @Test - public void testGetSet() { - jedisSentinel.getSet("key", "value"); - } - - @Test - public void testSetnx() { - jedisSentinel.setnx("test", "value"); - } - - @Test - public void testSetex() { - jedisSentinel.setex("key", 1337, "value"); - } - - @Test - public void testPsetex() { - jedisSentinel.psetex("key", 1337, "value"); - } - - @Test - public void testDecrBy() { - jedisSentinel.decrBy("key", 1337); - } - - @Test - public void testDecr() { - jedisSentinel.decr("key"); - } - - @Test - public void testIncrBy() { - jedisSentinel.incrBy("key", 1337); - } - - @Test - public void testIncrByFloat() { - jedisSentinel.incrByFloat("key", 1337); - } - - @Test - public void testIncr() { - jedisSentinel.incr("key"); - } - - @Test - public void testAppend() { - jedisSentinel.append("key", "value"); - } - - @Test - public void testSubstr() { - jedisSentinel.substr("key", 1337, 1338); - } - - @Test - public void testHset() { - jedisSentinel.hset("key", "field", "value"); - } - - @Test - public void testHget() { - jedisSentinel.hget("key", "field"); - } - - @Test - public void testHsetnx() { - jedisSentinel.hsetnx("key", "field", "value"); - } - - @Test - public void testHmset() { - jedisSentinel.hmset("key", new HashMap<>()); - } - - @Test - public void testHmget() { - jedisSentinel.hmget("key", "fields"); - } - - @Test - public void testHincrBy() { - jedisSentinel.hincrBy("key", "field", 1337); - } - - @Test - public void testHincrByFloat() { - jedisSentinel.hincrByFloat("key", "field", 1337); - } - - @Test - public void testHexists() { - jedisSentinel.hexists("key", "field"); - } - - @Test - public void testHdel() { - jedisSentinel.hdel("key", "field"); - } - - @Test - public void testHlen() { - jedisSentinel.hlen("key"); - } - - @Test - public void testHkeys() { - jedisSentinel.hkeys("key"); - } - - @Test - public void testHvals() { - jedisSentinel.hvals("key"); - } - - @Test - public void testGgetAll() { - jedisSentinel.hgetAll("key"); - } - - @Test - public void testRpush() { - jedisSentinel.rpush("key", "string"); - } - - @Test - public void testLpush() { - jedisSentinel.lpush("key", "string"); - } - - @Test - public void testLlen() { - jedisSentinel.llen("key"); - } - - @Test - public void testLrange() { - jedisSentinel.lrange("key", 1337, 1338); - } - - @Test - public void testLtrim() { - jedisSentinel.ltrim("key", 1337, 1338); - } - - @Test - public void testLindex() { - jedisSentinel.lindex("key", 1337); - } - - @Test - public void testLset() { - jedisSentinel.lset("key", 1337, "value"); - } - - @Test - public void testLrem() { - jedisSentinel.lrem("key", 1337, "value"); - } - - @Test - public void testLpop() { - jedisSentinel.lpop("key"); - } - - @Test - public void testRpop() { - jedisSentinel.rpop("key"); - } - - @Test - public void testSadd() { - jedisSentinel.sadd("key", "member"); - } - - @Test - public void testSmembers() { - jedisSentinel.smembers("key"); - } - - @Test - public void testSrem() { - jedisSentinel.srem("key", "member"); - } - - @Test - public void testSpop() { - jedisSentinel.spop("key"); - jedisSentinel.spop("key", 1337); - } - - @Test - public void testScard() { - jedisSentinel.scard("key"); - } - - @Test - public void testSismember() { - jedisSentinel.sismember("key", "member"); - } - - @Test - public void testSrandmember() { - jedisSentinel.srandmember("key"); - jedisSentinel.srandmember("key", 1337); - } - - @Test - public void testStrlen() { - jedisSentinel.strlen("key"); - } - - @Test - public void testZadd() { - jedisSentinel.zadd("key", new HashMap<>()); - jedisSentinel.zadd("key", new HashMap<>(), ZAddParams.zAddParams()); - jedisSentinel.zadd("key", 1337, "members"); - jedisSentinel.zadd("key", 1337, "members", ZAddParams.zAddParams()); - } - - @Test - public void testZrange() { - jedisSentinel.zrange("key", 1337, 1338); - } - - @Test - public void testZrem() { - jedisSentinel.zrem("key", "member"); - } - - @Test - public void testZincrby() { - jedisSentinel.zincrby("key", 1337, "member"); - jedisSentinel.zincrby("key", 1337, "member", ZIncrByParams.zIncrByParams()); - } - - @Test - public void testZrank() { - jedisSentinel.zrank("key", "member"); - } - - @Test - public void testZrevrank() { - jedisSentinel.zrevrank("key", "member"); - } - - @Test - public void testZrevrange() { - jedisSentinel.zrevrange("key", 1337, 1338); - } - - @Test - public void testZrangeWithScores() { - jedisSentinel.zrangeWithScores("key", 1337, 1338); - } - - @Test - public void testZrevrangeWithScores() { - jedisSentinel.zrevrangeWithScores("key", 1337, 1338); - } - - @Test - public void testZcard() { - jedisSentinel.zcard("key"); - } - - @Test - public void testZscore() { - jedisSentinel.zscore("key", "member"); - } - - @Test - public void testSort() { - jedisSentinel.sort("key"); - jedisSentinel.sort("key", new SortingParams()); - } - - @Test - public void testZcount() { - jedisSentinel.zcount("key", "min", "max"); - jedisSentinel.zcount("key", 1337, 1338); - } - - @Test - public void testZrangeByScore() { - jedisSentinel.zrangeByScore("key", "min", "max"); - jedisSentinel.zrangeByScore("key", 1337, 1338); - jedisSentinel.zrangeByScore("key", "min", "max", 1337, 1338); - jedisSentinel.zrangeByScore("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrevrangeByScore() { - jedisSentinel.zrevrangeByScore("key", "max", "min"); - jedisSentinel.zrevrangeByScore("key", 1337, 1338); - jedisSentinel.zrevrangeByScore("key", "max", "min", 1337, 1338); - jedisSentinel.zrevrangeByScore("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrangeByScoreWithScores() { - jedisSentinel.zrangeByScoreWithScores("key", "min", "max"); - jedisSentinel.zrangeByScoreWithScores("key", "min", "max", 1337, 1338); - jedisSentinel.zrangeByScoreWithScores("key", 1337, 1338); - jedisSentinel.zrangeByScoreWithScores("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZrevrangeByScoreWithScores() { - jedisSentinel.zrevrangeByScoreWithScores("key", "max", "min"); - jedisSentinel.zrevrangeByScoreWithScores("key", "max", "min", 1337, 1338); - jedisSentinel.zrevrangeByScoreWithScores("key", 1337, 1338); - jedisSentinel.zrevrangeByScoreWithScores("key", 1337, 1338, 1339, 1340); - } - - @Test - public void testZremrangeByRank() { - jedisSentinel.zremrangeByRank("key", 1337, 1338); - } - - @Test - public void testZremrangeByScore() { - jedisSentinel.zremrangeByScore("key", "start", "end"); - jedisSentinel.zremrangeByScore("key", 1337, 1338); - } - - @Test - public void testZlexcount() { - jedisSentinel.zlexcount("key", "min", "max"); - } - - @Test - public void testZrangeByLex() { - jedisSentinel.zrangeByLex("key", "min", "max"); - jedisSentinel.zrangeByLex("key", "min", "max", 1337, 1338); - } - - @Test - public void testZrevrangeByLex() { - jedisSentinel.zrevrangeByLex("key", "max", "min"); - jedisSentinel.zrevrangeByLex("key", "max", "min", 1337, 1338); - } - - @Test - public void testZremrangeByLex() { - jedisSentinel.zremrangeByLex("key", "min", "max"); - } - - @Test - public void testLinsert() { - jedisSentinel.linsert("key", ListPosition.AFTER, "pivot", "value"); - } - - @Test - public void testLpushx() { - jedisSentinel.lpushx("key", "string"); - } - - @Test - public void testRpushx() { - jedisSentinel.rpushx("key", "string"); - } - - @Test - public void testBlpop() { - jedisSentinel.blpop(1337, "arg"); - } - - @Test - public void testBrpop() { - jedisSentinel.brpop(1337, "arg"); - } - - @Test - public void testDel() { - jedisSentinel.del("key"); - } - - @Test - public void testEcho() { - jedisSentinel.echo("string"); - } - - @Test - public void testMove() { - jedisSentinel.move("key", 1337); - } - - @Test - public void testBitcount() { - jedisSentinel.bitcount("key"); - jedisSentinel.bitcount("key", 1337, 1338); - } - - @Test - public void testBitpos() { - jedisSentinel.bitpos("key", true); - } - - @Test - public void testHscan() { - jedisSentinel.hscan("key", "cursor"); - jedisSentinel.hscan("key", "cursor", new ScanParams()); - } - - @Test - public void testSscan() { - jedisSentinel.sscan("key", "cursor"); - jedisSentinel.sscan("key", "cursor", new ScanParams()); - } - - @Test - public void testZscan() { - jedisSentinel.zscan("key", "cursor"); - jedisSentinel.zscan("key", "cursor", new ScanParams()); - } - - @Test - public void testPfadd() { - jedisSentinel.pfadd("key", "elements"); - } - - @Test - public void testPfcount() { - jedisSentinel.pfcount("key"); - } - - @Test - public void testGeoadd() { - jedisSentinel.geoadd("key", new HashMap<>()); - jedisSentinel.geoadd("key", 1337, 1338, "member"); - } - - @Test - public void testGeodist() { - jedisSentinel.geodist("key", "member1", "member2"); - jedisSentinel.geodist("key", "member1", "member2", GeoUnit.KM); - } - - @Test - public void testGeohash() { - jedisSentinel.geohash("key", "members"); - } - - @Test - public void testGeopos() { - jedisSentinel.geopos("key", "members"); - } - - @Test - public void testGeoradius() { - jedisSentinel.georadius("key", 1337, 1338, 32, GeoUnit.KM); - jedisSentinel.georadius("key", 1337, 1338, 32, GeoUnit.KM, GeoRadiusParam.geoRadiusParam()); - } - - @Test - public void testGeoradiusByMember() { - jedisSentinel.georadiusByMember("key", "member", 1337, GeoUnit.KM); - jedisSentinel.georadiusByMember( - "key", "member", 1337, GeoUnit.KM, GeoRadiusParam.geoRadiusParam()); - } - - @Test - public void testBitfield() { - jedisSentinel.bitfield("key", "arguments"); - } -} diff --git a/rest/build.gradle b/rest/build.gradle deleted file mode 100644 index 97d66d816..000000000 --- a/rest/build.gradle +++ /dev/null @@ -1,11 +0,0 @@ -dependencies { - - implementation project(':conductor-common') - implementation project(':conductor-core') - - implementation 'org.springframework.boot:spring-boot-starter-web' - - implementation "com.netflix.runtime:health-api:${revHealth}" - - implementation "org.springdoc:springdoc-openapi-ui:${revOpenapi}" -} diff --git a/rest/dependencies.lock b/rest/dependencies.lock deleted file mode 100644 index e7edb2fc7..000000000 --- a/rest/dependencies.lock +++ /dev/null @@ -1,399 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.runtime:health-api": { - "locked": "1.1.4" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.runtime:health-api": { - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - } - }, - "testCompileClasspath": { - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.runtime:health-api": { - "locked": "1.1.4" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.9.3" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.runtime:health-api": { - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - } - } -} \ No newline at end of file diff --git a/rest/src/main/java/com/netflix/conductor/rest/config/RequestMappingConstants.java b/rest/src/main/java/com/netflix/conductor/rest/config/RequestMappingConstants.java deleted file mode 100644 index 940d3fb92..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/config/RequestMappingConstants.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.config; - -public interface RequestMappingConstants { - - String API_PREFIX = "/api/"; - - String ADMIN = API_PREFIX + "admin"; - String EVENT = API_PREFIX + "event"; - String METADATA = API_PREFIX + "metadata"; - String QUEUE = API_PREFIX + "queue"; - String TASKS = API_PREFIX + "tasks"; - String WORKFLOW_BULK = API_PREFIX + "workflow/bulk"; - String WORKFLOW = API_PREFIX + "workflow"; -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/config/RestConfiguration.java b/rest/src/main/java/com/netflix/conductor/rest/config/RestConfiguration.java deleted file mode 100644 index e4b001591..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/config/RestConfiguration.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.config; - -import org.springframework.context.annotation.Configuration; -import org.springframework.web.servlet.config.annotation.ContentNegotiationConfigurer; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -import static org.springframework.http.MediaType.APPLICATION_JSON; -import static org.springframework.http.MediaType.TEXT_PLAIN; - -@Configuration -public class RestConfiguration implements WebMvcConfigurer { - - /** - * Disable all 3 (Accept header, url parameter, path extension) strategies of content - * negotiation and only allow application/json and text/plain types. - *
    - * - *

    Any "mapping" that is annotated with produces=TEXT_PLAIN_VALUE will be sent - * as text/plain all others as application/json.
    - * More details on Spring MVC content negotiation can be found at https://spring.io/blog/2013/05/11/content-negotiation-using-spring-mvc - *
    - */ - @Override - public void configureContentNegotiation(ContentNegotiationConfigurer configurer) { - configurer - .favorParameter(false) - .favorPathExtension(false) - .ignoreAcceptHeader(true) - .defaultContentType(APPLICATION_JSON, TEXT_PLAIN); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java deleted file mode 100644 index 4221917c5..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; -import java.util.Map; - -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.service.AdminService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.ADMIN; - -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - -@RestController -@RequestMapping(ADMIN) -public class AdminResource { - - private final AdminService adminService; - - public AdminResource(AdminService adminService) { - this.adminService = adminService; - } - - @Operation(summary = "Get all the configuration parameters") - @GetMapping("/config") - public Map getAllConfig() { - return adminService.getAllConfig(); - } - - @GetMapping("/task/{tasktype}") - @Operation(summary = "Get the list of pending tasks for a given task type") - public List view( - @PathVariable("tasktype") String taskType, - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "count", defaultValue = "100", required = false) int count) { - return adminService.getListOfPendingTask(taskType, start, count); - } - - @PostMapping(value = "/sweep/requeue/{workflowId}", produces = TEXT_PLAIN_VALUE) - @Operation(summary = "Queue up all the running workflows for sweep") - public String requeueSweep(@PathVariable("workflowId") String workflowId) { - return adminService.requeueSweep(workflowId); - } - - @PostMapping(value = "/consistency/verifyAndRepair/{workflowId}", produces = TEXT_PLAIN_VALUE) - @Operation(summary = "Verify and repair workflow consistency") - public String verifyAndRepairWorkflowConsistency( - @PathVariable("workflowId") String workflowId) { - return String.valueOf(adminService.verifyAndRepairWorkflowConsistency(workflowId)); - } - - @GetMapping("/queues") - @Operation(summary = "Get registered queues") - public Map getEventQueues( - @RequestParam(value = "verbose", defaultValue = "false", required = false) - boolean verbose) { - return adminService.getEventQueues(verbose); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/ApplicationExceptionMapper.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/ApplicationExceptionMapper.java deleted file mode 100644 index 32eb2e212..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/ApplicationExceptionMapper.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import javax.servlet.http.HttpServletRequest; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.annotation.Order; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.RestControllerAdvice; - -import com.netflix.conductor.common.validation.ErrorResponse; -import com.netflix.conductor.core.exception.ApplicationException; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.metrics.Monitors; - -import com.fasterxml.jackson.databind.exc.InvalidFormatException; - -import static com.netflix.conductor.core.exception.ApplicationException.Code.INTERNAL_ERROR; -import static com.netflix.conductor.core.exception.ApplicationException.Code.INVALID_INPUT; - -@RestControllerAdvice -@Order(ValidationExceptionMapper.ORDER + 1) -public class ApplicationExceptionMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationExceptionMapper.class); - - private final String host = Utils.getServerId(); - - @ExceptionHandler(ApplicationException.class) - public ResponseEntity handleApplicationException( - HttpServletRequest request, ApplicationException ex) { - logException(request, ex); - - Monitors.error("error", String.valueOf(ex.getHttpStatusCode())); - - return new ResponseEntity<>( - toErrorResponse(ex), HttpStatus.valueOf(ex.getHttpStatusCode())); - } - - @ExceptionHandler(Throwable.class) - public ResponseEntity handleAll(HttpServletRequest request, Throwable th) { - logException(request, th); - - ApplicationException.Code code = - (th instanceof IllegalArgumentException || th instanceof InvalidFormatException) - ? INVALID_INPUT - : INTERNAL_ERROR; - - ApplicationException ex = new ApplicationException(code, th.getMessage(), th); - - return handleApplicationException(request, ex); - } - - private void logException(HttpServletRequest request, Throwable exception) { - LOGGER.error( - String.format( - "Error %s url: '%s'", - exception.getClass().getSimpleName(), request.getRequestURI()), - exception); - } - - private ErrorResponse toErrorResponse(ApplicationException ex) { - ErrorResponse errorResponse = new ErrorResponse(); - errorResponse.setInstance(host); - errorResponse.setStatus(ex.getHttpStatusCode()); - errorResponse.setMessage(ex.getMessage()); - errorResponse.setRetryable(ex.isRetryable()); - return errorResponse; - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java deleted file mode 100644 index 02b620c1e..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; - -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.service.EventService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.EVENT; - -@RestController -@RequestMapping(EVENT) -public class EventResource { - - private final EventService eventService; - - public EventResource(EventService eventService) { - this.eventService = eventService; - } - - @PostMapping - @Operation(summary = "Add a new event handler.") - public void addEventHandler(@RequestBody EventHandler eventHandler) { - eventService.addEventHandler(eventHandler); - } - - @PutMapping - @Operation(summary = "Update an existing event handler.") - public void updateEventHandler(@RequestBody EventHandler eventHandler) { - eventService.updateEventHandler(eventHandler); - } - - @DeleteMapping("/{name}") - @Operation(summary = "Remove an event handler") - public void removeEventHandlerStatus(@PathVariable("name") String name) { - eventService.removeEventHandlerStatus(name); - } - - @GetMapping - @Operation(summary = "Get all the event handlers") - public List getEventHandlers() { - return eventService.getEventHandlers(); - } - - @GetMapping("/{event}") - @Operation(summary = "Get event handlers for a given event") - public List getEventHandlersForEvent( - @PathVariable("event") String event, - @RequestParam(value = "activeOnly", defaultValue = "true", required = false) - boolean activeOnly) { - return eventService.getEventHandlersForEvent(event, activeOnly); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/HealthCheckResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/HealthCheckResource.java deleted file mode 100644 index ffd3767ad..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/HealthCheckResource.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.Collections; - -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.runtime.health.api.HealthCheckStatus; - -@RestController -@RequestMapping("/health") -public class HealthCheckResource { - - // SBMTODO: Move this Spring boot health check - @GetMapping - public HealthCheckStatus doCheck() throws Exception { - return HealthCheckStatus.create(true, Collections.emptyList()); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java deleted file mode 100644 index d4ade04d3..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; - -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.service.MetadataService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.METADATA; - -@RestController -@RequestMapping(value = METADATA) -public class MetadataResource { - - private final MetadataService metadataService; - - public MetadataResource(MetadataService metadataService) { - this.metadataService = metadataService; - } - - @PostMapping("/workflow") - @Operation(summary = "Create a new workflow definition") - public void create(@RequestBody WorkflowDef workflowDef) { - metadataService.registerWorkflowDef(workflowDef); - } - - @PutMapping("/workflow") - @Operation(summary = "Create or update workflow definition") - public void update(@RequestBody List workflowDefs) { - metadataService.updateWorkflowDef(workflowDefs); - } - - @Operation(summary = "Retrieves workflow definition along with blueprint") - @GetMapping("/workflow/{name}") - public WorkflowDef get( - @PathVariable("name") String name, - @RequestParam(value = "version", required = false) Integer version) { - return metadataService.getWorkflowDef(name, version); - } - - @Operation(summary = "Retrieves all workflow definition along with blueprint") - @GetMapping("/workflow") - public List getAll() { - return metadataService.getWorkflowDefs(); - } - - @DeleteMapping("/workflow/{name}/{version}") - @Operation( - summary = - "Removes workflow definition. It does not remove workflows associated with the definition.") - public void unregisterWorkflowDef( - @PathVariable("name") String name, @PathVariable("version") Integer version) { - metadataService.unregisterWorkflowDef(name, version); - } - - @PostMapping("/taskdefs") - @Operation(summary = "Create new task definition(s)") - public void registerTaskDef(@RequestBody List taskDefs) { - metadataService.registerTaskDef(taskDefs); - } - - @PutMapping("/taskdefs") - @Operation(summary = "Update an existing task") - public void registerTaskDef(@RequestBody TaskDef taskDef) { - metadataService.updateTaskDef(taskDef); - } - - @GetMapping(value = "/taskdefs") - @Operation(summary = "Gets all task definition") - public List getTaskDefs() { - return metadataService.getTaskDefs(); - } - - @GetMapping("/taskdefs/{tasktype}") - @Operation(summary = "Gets the task definition") - public TaskDef getTaskDef(@PathVariable("tasktype") String taskType) { - return metadataService.getTaskDef(taskType); - } - - @DeleteMapping("/taskdefs/{tasktype}") - @Operation(summary = "Remove a task definition") - public void unregisterTaskDef(@PathVariable("tasktype") String taskType) { - metadataService.unregisterTaskDef(taskType); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/QueueAdminResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/QueueAdminResource.java deleted file mode 100644 index 70eb05872..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/QueueAdminResource.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.Map; - -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.core.events.queue.DefaultEventQueueProcessor; -import com.netflix.conductor.model.TaskModel.Status; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.QUEUE; - -@RestController -@RequestMapping(QUEUE) -public class QueueAdminResource { - - private final DefaultEventQueueProcessor defaultEventQueueProcessor; - - public QueueAdminResource(DefaultEventQueueProcessor defaultEventQueueProcessor) { - this.defaultEventQueueProcessor = defaultEventQueueProcessor; - } - - @Operation(summary = "Get the queue length") - @GetMapping(value = "/size") - public Map size() { - return defaultEventQueueProcessor.size(); - } - - @Operation(summary = "Get Queue Names") - @GetMapping(value = "/") - public Map names() { - return defaultEventQueueProcessor.queues(); - } - - @Operation(summary = "Publish a message in queue to mark a wait task as completed.") - @PostMapping(value = "/update/{workflowId}/{taskRefName}/{status}") - public void update( - @PathVariable("workflowId") String workflowId, - @PathVariable("taskRefName") String taskRefName, - @PathVariable("status") Status status, - @RequestBody Map output) - throws Exception { - defaultEventQueueProcessor.updateByTaskRefName(workflowId, taskRefName, output, status); - } - - @Operation(summary = "Publish a message in queue to mark a wait task (by taskId) as completed.") - @PostMapping("/update/{workflowId}/task/{taskId}/{status}") - public void updateByTaskId( - @PathVariable("workflowId") String workflowId, - @PathVariable("taskId") String taskId, - @PathVariable("status") Status status, - @RequestBody Map output) - throws Exception { - defaultEventQueueProcessor.updateByTaskId(workflowId, taskId, output, status); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java deleted file mode 100644 index 1b1fe6b7a..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.service.TaskService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.TASKS; - -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - -@RestController -@RequestMapping(value = TASKS) -public class TaskResource { - - private final TaskService taskService; - - public TaskResource(TaskService taskService) { - this.taskService = taskService; - } - - @GetMapping("/poll/{tasktype}") - @Operation(summary = "Poll for a task of a certain type") - public ResponseEntity poll( - @PathVariable("tasktype") String taskType, - @RequestParam(value = "workerid", required = false) String workerId, - @RequestParam(value = "domain", required = false) String domain) { - // for backwards compatibility with 2.x client which expects a 204 when no Task is found - return Optional.ofNullable(taskService.poll(taskType, workerId, domain)) - .map(ResponseEntity::ok) - .orElse(ResponseEntity.noContent().build()); - } - - @GetMapping("/poll/batch/{tasktype}") - @Operation(summary = "Batch poll for a task of a certain type") - public ResponseEntity> batchPoll( - @PathVariable("tasktype") String taskType, - @RequestParam(value = "workerid", required = false) String workerId, - @RequestParam(value = "domain", required = false) String domain, - @RequestParam(value = "count", defaultValue = "1") int count, - @RequestParam(value = "timeout", defaultValue = "100") int timeout) { - // for backwards compatibility with 2.x client which expects a 204 when no Task is found - return Optional.ofNullable( - taskService.batchPoll(taskType, workerId, domain, count, timeout)) - .map(ResponseEntity::ok) - .orElse(ResponseEntity.noContent().build()); - } - - @PostMapping(produces = TEXT_PLAIN_VALUE) - @Operation(summary = "Update a task") - public String updateTask(@RequestBody TaskResult taskResult) { - return taskService.updateTask(taskResult); - } - - @PostMapping("/{taskId}/log") - @Operation(summary = "Log Task Execution Details") - public void log(@PathVariable("taskId") String taskId, @RequestBody String log) { - taskService.log(taskId, log); - } - - @GetMapping("/{taskId}/log") - @Operation(summary = "Get Task Execution Logs") - public List getTaskLogs(@PathVariable("taskId") String taskId) { - return taskService.getTaskLogs(taskId); - } - - @GetMapping("/{taskId}") - @Operation(summary = "Get task by Id") - public ResponseEntity getTask(@PathVariable("taskId") String taskId) { - // for backwards compatibility with 2.x client which expects a 204 when no Task is found - return Optional.ofNullable(taskService.getTask(taskId)) - .map(ResponseEntity::ok) - .orElse(ResponseEntity.noContent().build()); - } - - @GetMapping("/queue/sizes") - @Operation(summary = "Deprecated. Please use /tasks/queue/size endpoint") - @Deprecated - public Map size( - @RequestParam(value = "taskType", required = false) List taskTypes) { - return taskService.getTaskQueueSizes(taskTypes); - } - - @GetMapping("/queue/size") - @Operation(summary = "Get queue size for a task type.") - public Integer taskDepth( - @RequestParam("taskType") String taskType, - @RequestParam(value = "domain", required = false) String domain, - @RequestParam(value = "isolationGroupId", required = false) String isolationGroupId, - @RequestParam(value = "executionNamespace", required = false) - String executionNamespace) { - return taskService.getTaskQueueSize(taskType, domain, executionNamespace, isolationGroupId); - } - - @GetMapping("/queue/all/verbose") - @Operation(summary = "Get the details about each queue") - public Map>> allVerbose() { - return taskService.allVerbose(); - } - - @GetMapping("/queue/all") - @Operation(summary = "Get the details about each queue") - public Map all() { - return taskService.getAllQueueDetails(); - } - - @GetMapping("/queue/polldata") - @Operation(summary = "Get the last poll data for a given task type") - public List getPollData(@RequestParam("taskType") String taskType) { - return taskService.getPollData(taskType); - } - - @GetMapping("/queue/polldata/all") - @Operation(summary = "Get the last poll data for all task types") - public List getAllPollData() { - return taskService.getAllPollData(); - } - - @PostMapping(value = "/queue/requeue/{taskType}", produces = TEXT_PLAIN_VALUE) - @Operation(summary = "Requeue pending tasks") - public String requeuePendingTask(@PathVariable("taskType") String taskType) { - return taskService.requeuePendingTask(taskType); - } - - @Operation( - summary = "Search for tasks based in payload and other parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC") - @GetMapping(value = "/search") - public SearchResult search( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return taskService.search(start, size, sort, freeText, query); - } - - @Operation( - summary = "Search for tasks based in payload and other parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC") - @GetMapping(value = "/search-v2") - public SearchResult searchV2( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return taskService.searchV2(start, size, sort, freeText, query); - } - - @Operation(summary = "Get the external uri where the task payload is to be stored") - @GetMapping("/externalstoragelocation") - public ExternalStorageLocation getExternalStorageLocation( - @RequestParam("path") String path, - @RequestParam("operation") String operation, - @RequestParam("payloadType") String payloadType) { - return taskService.getExternalStorageLocation(path, operation, payloadType); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/ValidationExceptionMapper.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/ValidationExceptionMapper.java deleted file mode 100644 index 704f72e05..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/ValidationExceptionMapper.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import javax.servlet.http.HttpServletRequest; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.validation.ValidationException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.RestControllerAdvice; - -import com.netflix.conductor.common.validation.ErrorResponse; -import com.netflix.conductor.common.validation.ValidationError; -import com.netflix.conductor.core.utils.Utils; -import com.netflix.conductor.metrics.Monitors; - -/** This class converts Hibernate {@link ValidationException} into http response. */ -@RestControllerAdvice -@Order(ValidationExceptionMapper.ORDER) -public class ValidationExceptionMapper { - - private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationExceptionMapper.class); - - public static final int ORDER = Ordered.HIGHEST_PRECEDENCE; - - private final String host = Utils.getServerId(); - - @ExceptionHandler(ValidationException.class) - public ResponseEntity toResponse( - HttpServletRequest request, ValidationException exception) { - logException(request, exception); - - HttpStatus httpStatus; - - if (exception instanceof ConstraintViolationException) { - httpStatus = HttpStatus.BAD_REQUEST; - } else { - httpStatus = HttpStatus.INTERNAL_SERVER_ERROR; - Monitors.error("error", "error"); - } - - return new ResponseEntity<>(toErrorResponse(exception), httpStatus); - } - - private ErrorResponse toErrorResponse(ValidationException ve) { - if (ve instanceof ConstraintViolationException) { - return constraintViolationExceptionToErrorResponse((ConstraintViolationException) ve); - } else { - ErrorResponse result = new ErrorResponse(); - result.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); - result.setMessage(ve.getMessage()); - result.setInstance(host); - return result; - } - } - - private ErrorResponse constraintViolationExceptionToErrorResponse( - ConstraintViolationException exception) { - ErrorResponse errorResponse = new ErrorResponse(); - errorResponse.setStatus(HttpStatus.BAD_REQUEST.value()); - errorResponse.setMessage("Validation failed, check below errors for detail."); - - List validationErrors = new ArrayList<>(); - - exception - .getConstraintViolations() - .forEach( - e -> - validationErrors.add( - new ValidationError( - getViolationPath(e), - e.getMessage(), - getViolationInvalidValue(e.getInvalidValue())))); - - errorResponse.setValidationErrors(validationErrors); - return errorResponse; - } - - private String getViolationPath(final ConstraintViolation violation) { - final String propertyPath = violation.getPropertyPath().toString(); - return !"".equals(propertyPath) ? propertyPath : ""; - } - - private String getViolationInvalidValue(final Object invalidValue) { - if (invalidValue == null) { - return null; - } - - if (invalidValue.getClass().isArray()) { - if (invalidValue instanceof Object[]) { - // not helpful to return object array, skip it. - return null; - } else if (invalidValue instanceof boolean[]) { - return Arrays.toString((boolean[]) invalidValue); - } else if (invalidValue instanceof byte[]) { - return Arrays.toString((byte[]) invalidValue); - } else if (invalidValue instanceof char[]) { - return Arrays.toString((char[]) invalidValue); - } else if (invalidValue instanceof double[]) { - return Arrays.toString((double[]) invalidValue); - } else if (invalidValue instanceof float[]) { - return Arrays.toString((float[]) invalidValue); - } else if (invalidValue instanceof int[]) { - return Arrays.toString((int[]) invalidValue); - } else if (invalidValue instanceof long[]) { - return Arrays.toString((long[]) invalidValue); - } else if (invalidValue instanceof short[]) { - return Arrays.toString((short[]) invalidValue); - } - } - - // It is only helpful to return invalid value of primitive types - if (invalidValue.getClass().getName().startsWith("java.lang.")) { - return invalidValue.toString(); - } - - return null; - } - - private void logException(HttpServletRequest request, ValidationException exception) { - LOGGER.error( - "Error {} url: '{}'", - exception.getClass().getSimpleName(), - request.getRequestURI(), - exception); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java deleted file mode 100644 index 409328e81..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; - -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.model.BulkResponse; -import com.netflix.conductor.service.WorkflowBulkService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW_BULK; - -/** Synchronous Bulk APIs to process the workflows in batches */ -@RestController -@RequestMapping(WORKFLOW_BULK) -public class WorkflowBulkResource { - - private final WorkflowBulkService workflowBulkService; - - public WorkflowBulkResource(WorkflowBulkService workflowBulkService) { - this.workflowBulkService = workflowBulkService; - } - - /** - * Pause the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform pause operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - @PutMapping("/pause") - @Operation(summary = "Pause the list of workflows") - public BulkResponse pauseWorkflow(@RequestBody List workflowIds) { - return workflowBulkService.pauseWorkflow(workflowIds); - } - - /** - * Resume the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform resume operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - @PutMapping("/resume") - @Operation(summary = "Resume the list of workflows") - public BulkResponse resumeWorkflow(@RequestBody List workflowIds) { - return workflowBulkService.resumeWorkflow(workflowIds); - } - - /** - * Restart the list of workflows. - * - * @param workflowIds - list of workflow Ids to perform restart operation on - * @param useLatestDefinitions if true, use latest workflow and task definitions upon restart - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - @PostMapping("/restart") - @Operation(summary = "Restart the list of completed workflow") - public BulkResponse restart( - @RequestBody List workflowIds, - @RequestParam(value = "useLatestDefinitions", defaultValue = "false", required = false) - boolean useLatestDefinitions) { - return workflowBulkService.restart(workflowIds, useLatestDefinitions); - } - - /** - * Retry the last failed task for each workflow from the list. - * - * @param workflowIds - list of workflow Ids to perform retry operation on - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - @PostMapping("/retry") - @Operation(summary = "Retry the last failed task for each workflow from the list") - public BulkResponse retry(@RequestBody List workflowIds) { - return workflowBulkService.retry(workflowIds); - } - - /** - * Terminate workflows execution. - * - * @param workflowIds - list of workflow Ids to perform terminate operation on - * @param reason - description to be specified for the terminated workflow for future - * references. - * @return bulk response object containing a list of succeeded workflows and a list of failed - * ones with errors - */ - @PostMapping("/terminate") - @Operation(summary = "Terminate workflows execution") - public BulkResponse terminate( - @RequestBody List workflowIds, - @RequestParam(value = "reason", required = false) String reason) { - return workflowBulkService.terminate(workflowIds, reason); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java deleted file mode 100644 index bb986592e..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.List; -import java.util.Map; - -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.bind.annotation.RestController; - -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.service.WorkflowService; - -import io.swagger.v3.oas.annotations.Operation; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW; - -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - -@RestController -@RequestMapping(WORKFLOW) -public class WorkflowResource { - - private final WorkflowService workflowService; - - public WorkflowResource(WorkflowService workflowService) { - this.workflowService = workflowService; - } - - @PostMapping(produces = TEXT_PLAIN_VALUE) - @Operation( - summary = - "Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain") - public String startWorkflow(@RequestBody StartWorkflowRequest request) { - return workflowService.startWorkflow(request); - } - - @PostMapping(value = "/{name}", produces = TEXT_PLAIN_VALUE) - @Operation( - summary = - "Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking") - public String startWorkflow( - @PathVariable("name") String name, - @RequestParam(value = "version", required = false) Integer version, - @RequestParam(value = "correlationId", required = false) String correlationId, - @RequestParam(value = "priority", defaultValue = "0", required = false) int priority, - @RequestBody Map input) { - return workflowService.startWorkflow(name, version, correlationId, priority, input); - } - - @GetMapping("/{name}/correlated/{correlationId}") - @Operation(summary = "Lists workflows for the given correlation id") - public List getWorkflows( - @PathVariable("name") String name, - @PathVariable("correlationId") String correlationId, - @RequestParam(value = "includeClosed", defaultValue = "false", required = false) - boolean includeClosed, - @RequestParam(value = "includeTasks", defaultValue = "false", required = false) - boolean includeTasks) { - return workflowService.getWorkflows(name, correlationId, includeClosed, includeTasks); - } - - @PostMapping(value = "/{name}/correlated") - @Operation(summary = "Lists workflows for the given correlation id list") - public Map> getWorkflows( - @PathVariable("name") String name, - @RequestParam(value = "includeClosed", defaultValue = "false", required = false) - boolean includeClosed, - @RequestParam(value = "includeTasks", defaultValue = "false", required = false) - boolean includeTasks, - @RequestBody List correlationIds) { - return workflowService.getWorkflows(name, includeClosed, includeTasks, correlationIds); - } - - @GetMapping("/{workflowId}") - @Operation(summary = "Gets the workflow by workflow id") - public Workflow getExecutionStatus( - @PathVariable("workflowId") String workflowId, - @RequestParam(value = "includeTasks", defaultValue = "true", required = false) - boolean includeTasks) { - return workflowService.getExecutionStatus(workflowId, includeTasks); - } - - @DeleteMapping("/{workflowId}/remove") - @Operation(summary = "Removes the workflow from the system") - public void delete( - @PathVariable("workflowId") String workflowId, - @RequestParam(value = "archiveWorkflow", defaultValue = "true", required = false) - boolean archiveWorkflow) { - workflowService.deleteWorkflow(workflowId, archiveWorkflow); - } - - @GetMapping("/running/{name}") - @Operation(summary = "Retrieve all the running workflows") - public List getRunningWorkflow( - @PathVariable("name") String workflowName, - @RequestParam(value = "version", defaultValue = "1", required = false) int version, - @RequestParam(value = "startTime", required = false) Long startTime, - @RequestParam(value = "endTime", required = false) Long endTime) { - return workflowService.getRunningWorkflows(workflowName, version, startTime, endTime); - } - - @PutMapping("/decide/{workflowId}") - @Operation(summary = "Starts the decision task for a workflow") - public void decide(@PathVariable("workflowId") String workflowId) { - workflowService.decideWorkflow(workflowId); - } - - @PutMapping("/{workflowId}/pause") - @Operation(summary = "Pauses the workflow") - public void pauseWorkflow(@PathVariable("workflowId") String workflowId) { - workflowService.pauseWorkflow(workflowId); - } - - @PutMapping("/{workflowId}/resume") - @Operation(summary = "Resumes the workflow") - public void resumeWorkflow(@PathVariable("workflowId") String workflowId) { - workflowService.resumeWorkflow(workflowId); - } - - @PutMapping("/{workflowId}/skiptask/{taskReferenceName}") - @Operation(summary = "Skips a given task from a current running workflow") - public void skipTaskFromWorkflow( - @PathVariable("workflowId") String workflowId, - @PathVariable("taskReferenceName") String taskReferenceName, - SkipTaskRequest skipTaskRequest) { - workflowService.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest); - } - - @PostMapping(value = "/{workflowId}/rerun", produces = TEXT_PLAIN_VALUE) - @Operation(summary = "Reruns the workflow from a specific task") - public String rerun( - @PathVariable("workflowId") String workflowId, - @RequestBody RerunWorkflowRequest request) { - return workflowService.rerunWorkflow(workflowId, request); - } - - @PostMapping("/{workflowId}/restart") - @Operation(summary = "Restarts a completed workflow") - @ResponseStatus( - value = HttpStatus.NO_CONTENT) // for backwards compatibility with 2.x client which - // expects a 204 for this request - public void restart( - @PathVariable("workflowId") String workflowId, - @RequestParam(value = "useLatestDefinitions", defaultValue = "false", required = false) - boolean useLatestDefinitions) { - workflowService.restartWorkflow(workflowId, useLatestDefinitions); - } - - @PostMapping("/{workflowId}/retry") - @Operation(summary = "Retries the last failed task") - @ResponseStatus( - value = HttpStatus.NO_CONTENT) // for backwards compatibility with 2.x client which - // expects a 204 for this request - public void retry( - @PathVariable("workflowId") String workflowId, - @RequestParam( - value = "resumeSubworkflowTasks", - defaultValue = "false", - required = false) - boolean resumeSubworkflowTasks) { - workflowService.retryWorkflow(workflowId, resumeSubworkflowTasks); - } - - @PostMapping("/{workflowId}/resetcallbacks") - @Operation(summary = "Resets callback times of all non-terminal SIMPLE tasks to 0") - @ResponseStatus( - value = HttpStatus.NO_CONTENT) // for backwards compatibility with 2.x client which - // expects a 204 for this request - public void resetWorkflow(@PathVariable("workflowId") String workflowId) { - workflowService.resetWorkflow(workflowId); - } - - @DeleteMapping("/{workflowId}") - @Operation(summary = "Terminate workflow execution") - public void terminate( - @PathVariable("workflowId") String workflowId, - @RequestParam(value = "reason", required = false) String reason) { - workflowService.terminateWorkflow(workflowId, reason); - } - - @Operation( - summary = "Search for workflows based on payload and other parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC.") - @GetMapping(value = "/search") - public SearchResult search( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return workflowService.searchWorkflows(start, size, sort, freeText, query); - } - - @Operation( - summary = "Search for workflows based on payload and other parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC.") - @GetMapping(value = "/search-v2") - public SearchResult searchV2( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return workflowService.searchWorkflowsV2(start, size, sort, freeText, query); - } - - @Operation( - summary = "Search for workflows based on task parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC") - @GetMapping(value = "/search-by-tasks") - public SearchResult searchWorkflowsByTasks( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return workflowService.searchWorkflowsByTasks(start, size, sort, freeText, query); - } - - @Operation( - summary = "Search for workflows based on task parameters", - description = - "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." - + " If order is not specified, defaults to ASC") - @GetMapping(value = "/search-by-tasks-v2") - public SearchResult searchWorkflowsByTasksV2( - @RequestParam(value = "start", defaultValue = "0", required = false) int start, - @RequestParam(value = "size", defaultValue = "100", required = false) int size, - @RequestParam(value = "sort", required = false) String sort, - @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, - @RequestParam(value = "query", required = false) String query) { - return workflowService.searchWorkflowsByTasksV2(start, size, sort, freeText, query); - } - - @Operation( - summary = - "Get the uri and path of the external storage where the workflow payload is to be stored") - @GetMapping("/externalstoragelocation") - public ExternalStorageLocation getExternalStorageLocation( - @RequestParam("path") String path, - @RequestParam("operation") String operation, - @RequestParam("payloadType") String payloadType) { - return workflowService.getExternalStorageLocation(path, operation, payloadType); - } -} diff --git a/rest/src/main/java/com/netflix/conductor/rest/startup/KitchenSinkInitializer.java b/rest/src/main/java/com/netflix/conductor/rest/startup/KitchenSinkInitializer.java deleted file mode 100644 index e69d9cada..000000000 --- a/rest/src/main/java/com/netflix/conductor/rest/startup/KitchenSinkInitializer.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.startup; - -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.boot.web.client.RestTemplateBuilder; -import org.springframework.context.event.EventListener; -import org.springframework.core.io.Resource; -import org.springframework.http.HttpEntity; -import org.springframework.stereotype.Component; -import org.springframework.util.FileCopyUtils; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; -import org.springframework.web.client.RestTemplate; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; - -import static org.springframework.http.HttpHeaders.CONTENT_TYPE; -import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; - -@Component -public class KitchenSinkInitializer { - - private static final Logger LOGGER = LoggerFactory.getLogger(KitchenSinkInitializer.class); - - private final RestTemplate restTemplate; - - @Value("${loadSample:false}") - private boolean loadSamples; - - @Value("${server.port:8080}") - private int port; - - @Value("classpath:./kitchensink/kitchensink.json") - private Resource kitchenSink; - - @Value("classpath:./kitchensink/sub_flow_1.json") - private Resource subFlow; - - @Value("classpath:./kitchensink/kitchenSink-ephemeralWorkflowWithStoredTasks.json") - private Resource ephemeralWorkflowWithStoredTasks; - - @Value("classpath:./kitchensink/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json") - private Resource ephemeralWorkflowWithEphemeralTasks; - - public KitchenSinkInitializer(RestTemplateBuilder restTemplateBuilder) { - this.restTemplate = restTemplateBuilder.build(); - } - - @EventListener(ApplicationReadyEvent.class) - public void setupKitchenSink() { - try { - if (loadSamples) { - LOGGER.info("Loading Kitchen Sink examples"); - createKitchenSink(); - } - } catch (Exception e) { - LOGGER.error("Error initializing kitchen sink", e); - } - } - - private void createKitchenSink() throws Exception { - List taskDefs = new LinkedList<>(); - TaskDef taskDef; - for (int i = 0; i < 40; i++) { - taskDef = new TaskDef("task_" + i, "task_" + i, 1, 0); - taskDef.setOwnerEmail("example@email.com"); - taskDefs.add(taskDef); - } - - taskDef = new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0); - taskDef.setOwnerEmail("example@email.com"); - taskDefs.add(taskDef); - - restTemplate.postForEntity(url("/api/metadata/taskdefs"), taskDefs, Object.class); - - /* - * Kitchensink example (stored workflow with stored tasks) - */ - MultiValueMap headers = new LinkedMultiValueMap<>(); - headers.add(CONTENT_TYPE, APPLICATION_JSON_VALUE); - HttpEntity request = new HttpEntity<>(readToString(kitchenSink), headers); - restTemplate.postForEntity(url("/api/metadata/workflow/"), request, Map.class); - - request = new HttpEntity<>(readToString(subFlow), headers); - restTemplate.postForEntity(url("/api/metadata/workflow/"), request, Map.class); - - restTemplate.postForEntity( - url("/api/workflow/kitchensink"), - Collections.singletonMap("task2Name", "task_5"), - String.class); - LOGGER.info("Kitchen sink workflow is created!"); - - /* - * Kitchensink example with ephemeral workflow and stored tasks - */ - request = new HttpEntity<>(readToString(ephemeralWorkflowWithStoredTasks), headers); - restTemplate.postForEntity(url("/api/workflow/"), request, String.class); - LOGGER.info("Ephemeral Kitchen sink workflow with stored tasks is created!"); - - /* - * Kitchensink example with ephemeral workflow and ephemeral tasks - */ - request = new HttpEntity<>(readToString(ephemeralWorkflowWithEphemeralTasks), headers); - restTemplate.postForEntity(url("/api/workflow/"), request, String.class); - LOGGER.info("Ephemeral Kitchen sink workflow with ephemeral tasks is created!"); - } - - private String readToString(Resource resource) throws IOException { - return FileCopyUtils.copyToString(new InputStreamReader(resource.getInputStream())); - } - - private String url(String path) { - return "http://localhost:" + port + path; - } -} diff --git a/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json b/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json deleted file mode 100644 index d7f3000c6..000000000 --- a/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "name": "kitchenSink-ephemeralWorkflowWithEphemeralTasks", - "workflowDef": { - "name": "ephemeralKitchenSinkEphemeralTasks", - "description": "Kitchensink ephemeral workflow with ephemeral tasks", - "version": 1, - "tasks": [ - { - "name": "task_10001", - "taskReferenceName": "task_10001", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "task_10001", - "description": "task_10001", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "event_task", - "taskReferenceName": "event_0", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "EVENT", - "sink": "conductor" - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${task_2.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_10004", - "taskReferenceName": "task_10004", - "inputParameters": { - "mod": "${task_2.output.mod}", - "oddEven": "${task_2.output.oddEven}" - }, - "type": "SIMPLE", - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "task_10004", - "description": "task_10004", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${task_10004.output.dynamicTasks}", - "input": "${task_10004.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input" - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN" - } - ], - "1": [ - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_100010", - "taskReferenceName": "task_100010", - "type": "SIMPLE", - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "task_100010", - "description": "task_100010", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${task_10001.output.mod}", - "oddEven": "${task_10001.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - [ - { - "name": "task_100011", - "taskReferenceName": "task_100011", - "type": "SIMPLE", - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "task_100011", - "description": "task_100011", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${task_10001.output.mod}", - "oddEven": "${task_10001.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "join2", - "type": "JOIN", - "joinOn": [ - "wf3", - "wf4" - ] - } - ] - } - }, - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_1", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_100030", - "taskReferenceName": "task_100030", - "inputParameters": { - "statuses": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "type": "SIMPLE", - "taskDefinition": { - "ownerApp": null, - "createTime": null, - "updateTime": null, - "createdBy": null, - "updatedBy": null, - "name": "task_100030", - "description": "task_100030", - "retryCount": 1, - "timeoutSeconds": 0, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "concurrentExecLimit": null, - "inputTemplate": {} - } - } - ], - "outputParameters": { - "statues": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "schemaVersion": 2, - "ownerEmail": "example@email.com" - }, - "input": { - "task2Name": "task_10005" - } -} diff --git a/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithStoredTasks.json b/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithStoredTasks.json deleted file mode 100644 index 4392f740f..000000000 --- a/rest/src/main/resources/kitchensink/kitchenSink-ephemeralWorkflowWithStoredTasks.json +++ /dev/null @@ -1,163 +0,0 @@ -{ - "name": "kitchenSink-ephemeralWorkflowWithStoredTasks", - "workflowDef": { - "name": "ephemeralKitchenSinkStoredTasks", - "description": "kitchensink workflow definition", - "version": 1, - "tasks": [ - { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "event_task", - "taskReferenceName": "event_0", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "EVENT", - "sink": "conductor" - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${task_2.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_4", - "taskReferenceName": "task_4", - "inputParameters": { - "mod": "${task_2.output.mod}", - "oddEven": "${task_2.output.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${task_4.output.dynamicTasks}", - "input": "${task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input" - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN" - } - ], - "1": [ - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_10", - "taskReferenceName": "task_10", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - [ - { - "name": "task_11", - "taskReferenceName": "task_11", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "join2", - "type": "JOIN", - "joinOn": [ - "wf3", - "wf4" - ] - } - ] - } - }, - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_1", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "inputParameters": { - "statuses": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "type": "SIMPLE" - } - ], - "outputParameters": { - "statues": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "schemaVersion": 2, - "ownerEmail": "example@email.com" - }, - "input": { - "task2Name": "task_5" - } -} diff --git a/rest/src/main/resources/kitchensink/kitchensink.json b/rest/src/main/resources/kitchensink/kitchensink.json deleted file mode 100644 index 2b74589dd..000000000 --- a/rest/src/main/resources/kitchensink/kitchensink.json +++ /dev/null @@ -1,157 +0,0 @@ -{ - "name": "kitchensink", - "description": "kitchensink workflow", - "version": 1, - "tasks": [ - { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "event_task", - "taskReferenceName": "event_0", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "EVENT", - "sink": "conductor" - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute" - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${task_2.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_4", - "taskReferenceName": "task_4", - "inputParameters": { - "mod": "${task_2.output.mod}", - "oddEven": "${task_2.output.oddEven}" - }, - "type": "SIMPLE" - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${task_4.output.dynamicTasks}", - "input": "${task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input" - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN" - } - ], - "1": [ - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_10", - "taskReferenceName": "task_10", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - [ - { - "name": "task_11", - "taskReferenceName": "task_11", - "type": "SIMPLE" - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${task_1.output.mod}", - "oddEven": "${task_1.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "join2", - "type": "JOIN", - "joinOn": [ - "wf3", - "wf4" - ] - } - ] - } - }, - { - "name": "search_elasticsearch", - "taskReferenceName": "get_es_1", - "inputParameters": { - "http_request": { - "uri": "http://localhost:9200/conductor/_search?size=10", - "method": "GET" - } - }, - "type": "HTTP" - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "inputParameters": { - "statuses": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "type": "SIMPLE" - } - ], - "outputParameters": { - "statues": "${get_es_1.output..status}", - "workflowIds": "${get_es_1.output..workflowId}" - }, - "ownerEmail": "example@email.com", - "schemaVersion": 2 -} diff --git a/rest/src/main/resources/kitchensink/sub_flow_1.json b/rest/src/main/resources/kitchensink/sub_flow_1.json deleted file mode 100644 index 4b3dd81ab..000000000 --- a/rest/src/main/resources/kitchensink/sub_flow_1.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "sub_flow_1", - "description": "A Simple sub-workflow with 2 tasks", - "version": 1, - "tasks": [ - { - "name": "task_5", - "taskReferenceName": "task_5", - "inputParameters": {}, - "type": "SIMPLE" - }, - { - "name": "task_6", - "taskReferenceName": "task_6", - "type": "SIMPLE" - } - ], - "outputParameters": {}, - "schemaVersion": 2, - "ownerEmail": "example@email.com" -} \ No newline at end of file diff --git a/rest/src/main/resources/kitchensink/wf1.json b/rest/src/main/resources/kitchensink/wf1.json deleted file mode 100644 index 7684c1f42..000000000 --- a/rest/src/main/resources/kitchensink/wf1.json +++ /dev/null @@ -1,372 +0,0 @@ -{ - "createTime": 1477681181098, - "updateTime": 1478835878290, - "name": "main_workflow", - "description": "Kitchensink workflow", - "version": 1, - "tasks": [ - { - "name": "task_1", - "taskReferenceName": "task_1", - "inputParameters": { - "mod": "workflow.input.mod", - "oddEven": "workflow.input.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "dyntask", - "taskReferenceName": "task_2", - "inputParameters": { - "taskToExecute": "workflow.input.task2Name" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_3", - "taskReferenceName": "task_3", - "inputParameters": { - "mod": "task_2.output.mod", - "oddEven": "task_2.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "task_3.output.oddEven" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "task_4", - "taskReferenceName": "task_4", - "inputParameters": { - "mod": "task_3.output.mod", - "oddEven": "task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "task_4.output.dynamicTasks", - "input": "task_4.output.inputs" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_5", - "taskReferenceName": "task_5", - "inputParameters": { - "mod": "task_4.output.mod", - "oddEven": "task_4.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_6", - "taskReferenceName": "task_6", - "inputParameters": { - "mod": "task_5.output.mod", - "oddEven": "task_5.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - "1": [ - { - "name": "task_7", - "taskReferenceName": "task_7", - "inputParameters": { - "mod": "task_3.output.mod", - "oddEven": "task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_8", - "taskReferenceName": "task_8", - "inputParameters": { - "mod": "task_7.output.mod", - "oddEven": "task_7.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_9", - "taskReferenceName": "task_9", - "inputParameters": { - "mod": "task_8.output.mod", - "oddEven": "task_8.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "task_8.output.mod" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "task_12", - "taskReferenceName": "task_12", - "inputParameters": { - "mod": "task_9.output.mod", - "oddEven": "task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_13", - "taskReferenceName": "task_13", - "inputParameters": { - "mod": "task_12.output.mod", - "oddEven": "task_12.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "task_12.output.mod", - "oddEven": "task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "callbackFromWorker": true, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "1": [ - { - "name": "task_15", - "taskReferenceName": "task_15", - "inputParameters": { - "mod": "task_9.output.mod", - "oddEven": "task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_16", - "taskReferenceName": "task_16", - "inputParameters": { - "mod": "task_15.output.mod", - "oddEven": "task_15.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "task_12.output.mod", - "oddEven": "task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "callbackFromWorker": true, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "4": [ - { - "name": "task_18", - "taskReferenceName": "task_18", - "inputParameters": { - "mod": "task_9.output.mod", - "oddEven": "task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_19", - "taskReferenceName": "task_19", - "inputParameters": { - "mod": "task_18.output.mod", - "oddEven": "task_18.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - "5": [ - { - "name": "task_21", - "taskReferenceName": "task_21", - "inputParameters": { - "mod": "task_9.output.mod", - "oddEven": "task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "task_12.output.mod", - "oddEven": "task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "callbackFromWorker": true, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "task_22", - "taskReferenceName": "task_22", - "inputParameters": { - "mod": "task_21.output.mod", - "oddEven": "task_21.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ] - }, - "defaultCase": [ - { - "name": "task_24", - "taskReferenceName": "task_24", - "inputParameters": { - "mod": "task_9.output.mod", - "oddEven": "task_9.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "task_12.output.mod", - "oddEven": "task_12.output.oddEven" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "callbackFromWorker": true, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "task_25", - "taskReferenceName": "task_25", - "inputParameters": { - "mod": "task_24.output.mod", - "oddEven": "task_24.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - "startDelay": 0, - "callbackFromWorker": true - } - ] - }, - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_28", - "taskReferenceName": "task_28", - "inputParameters": { - "mod": "task_3.output.mod", - "oddEven": "task_3.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_29", - "taskReferenceName": "task_29", - "inputParameters": { - "mod": "task_28.output.mod", - "oddEven": "task_28.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "inputParameters": { - "mod": "task_29.output.mod", - "oddEven": "task_29.output.oddEven" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - "schemaVersion": 1 -} \ No newline at end of file diff --git a/rest/src/main/resources/kitchensink/wf2.json b/rest/src/main/resources/kitchensink/wf2.json deleted file mode 100644 index b07115d77..000000000 --- a/rest/src/main/resources/kitchensink/wf2.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "createTime": 1477681181098, - "updateTime": 1478837752600, - "name": "sub_flow_1", - "description": "sub workflow", - "version": 1, - "tasks": [ - { - "name": "task_5", - "taskReferenceName": "task_5", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_28", - "taskReferenceName": "task_28", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "fork_join", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "task_10", - "taskReferenceName": "task_10", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_11", - "taskReferenceName": "task_11", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - [ - { - "name": "task_20", - "taskReferenceName": "task_20", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "task_21", - "taskReferenceName": "task_21", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ] - ], - "startDelay": 0, - "callbackFromWorker": true - }, - { - "name": "join", - "taskReferenceName": "join", - "type": "JOIN", - "startDelay": 0, - "joinOn": [ - "task_21", - "task_11" - ], - "callbackFromWorker": true - }, - { - "name": "task_30", - "taskReferenceName": "task_30", - "type": "SIMPLE", - "startDelay": 0, - "callbackFromWorker": true - } - ], - "outputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "schemaVersion": 2 -} \ No newline at end of file diff --git a/rest/src/main/resources/static/favicon.ico b/rest/src/main/resources/static/favicon.ico deleted file mode 100644 index b08367220..000000000 Binary files a/rest/src/main/resources/static/favicon.ico and /dev/null differ diff --git a/rest/src/main/resources/static/index.html b/rest/src/main/resources/static/index.html deleted file mode 100644 index ef25cbcf6..000000000 --- a/rest/src/main/resources/static/index.html +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - Netflix Conductor - - - - - -

    -
    - Conductor Logo -
    -

    - - -
    - - diff --git a/rest/src/main/resources/static/logo.png b/rest/src/main/resources/static/logo.png deleted file mode 100644 index 132d52cde..000000000 Binary files a/rest/src/main/resources/static/logo.png and /dev/null differ diff --git a/rest/src/test/java/com/netflix/conductor/rest/controllers/AdminResourceTest.java b/rest/src/test/java/com/netflix/conductor/rest/controllers/AdminResourceTest.java deleted file mode 100644 index 83639be47..000000000 --- a/rest/src/test/java/com/netflix/conductor/rest/controllers/AdminResourceTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.service.AdminService; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class AdminResourceTest { - - @Mock private AdminService mockAdminService; - - @Mock private AdminResource adminResource; - - @Before - public void before() { - this.mockAdminService = mock(AdminService.class); - this.adminResource = new AdminResource(mockAdminService); - } - - @Test - public void testGetAllConfig() { - Map configs = new HashMap<>(); - configs.put("config1", "test"); - when(mockAdminService.getAllConfig()).thenReturn(configs); - assertEquals(configs, adminResource.getAllConfig()); - } - - @Test - public void testView() { - Task task = new Task(); - task.setReferenceTaskName("test"); - List listOfTask = new ArrayList<>(); - listOfTask.add(task); - when(mockAdminService.getListOfPendingTask(anyString(), anyInt(), anyInt())) - .thenReturn(listOfTask); - assertEquals(listOfTask, adminResource.view("testTask", 0, 100)); - } - - @Test - public void testRequeueSweep() { - String workflowId = "w123"; - when(mockAdminService.requeueSweep(anyString())).thenReturn(workflowId); - assertEquals(workflowId, adminResource.requeueSweep(workflowId)); - } - - @Test - public void testGetEventQueues() { - adminResource.getEventQueues(false); - verify(mockAdminService, times(1)).getEventQueues(anyBoolean()); - } -} diff --git a/rest/src/test/java/com/netflix/conductor/rest/controllers/EventResourceTest.java b/rest/src/test/java/com/netflix/conductor/rest/controllers/EventResourceTest.java deleted file mode 100644 index ae1adffb6..000000000 --- a/rest/src/test/java/com/netflix/conductor/rest/controllers/EventResourceTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.service.EventService; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class EventResourceTest { - - private EventResource eventResource; - - @Mock private EventService mockEventService; - - @Before - public void setUp() { - this.mockEventService = mock(EventService.class); - this.eventResource = new EventResource(this.mockEventService); - } - - @Test - public void testAddEventHandler() { - EventHandler eventHandler = new EventHandler(); - eventResource.addEventHandler(eventHandler); - verify(mockEventService, times(1)).addEventHandler(any(EventHandler.class)); - } - - @Test - public void testUpdateEventHandler() { - EventHandler eventHandler = new EventHandler(); - eventResource.updateEventHandler(eventHandler); - verify(mockEventService, times(1)).updateEventHandler(any(EventHandler.class)); - } - - @Test - public void testRemoveEventHandlerStatus() { - eventResource.removeEventHandlerStatus("testEvent"); - verify(mockEventService, times(1)).removeEventHandlerStatus(anyString()); - } - - @Test - public void testGetEventHandlersForEvent() { - EventHandler eventHandler = new EventHandler(); - eventResource.addEventHandler(eventHandler); - List listOfEventHandler = new ArrayList<>(); - listOfEventHandler.add(eventHandler); - when(mockEventService.getEventHandlersForEvent(anyString(), anyBoolean())) - .thenReturn(listOfEventHandler); - assertEquals(listOfEventHandler, eventResource.getEventHandlersForEvent("testEvent", true)); - } - - @Test - public void testGetEventHandlers() { - EventHandler eventHandler = new EventHandler(); - eventResource.addEventHandler(eventHandler); - List listOfEventHandler = new ArrayList<>(); - listOfEventHandler.add(eventHandler); - when(mockEventService.getEventHandlers()).thenReturn(listOfEventHandler); - assertEquals(listOfEventHandler, eventResource.getEventHandlers()); - } -} diff --git a/rest/src/test/java/com/netflix/conductor/rest/controllers/MetadataResourceTest.java b/rest/src/test/java/com/netflix/conductor/rest/controllers/MetadataResourceTest.java deleted file mode 100644 index 803933af1..000000000 --- a/rest/src/test/java/com/netflix/conductor/rest/controllers/MetadataResourceTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.service.MetadataService; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyList; -import static org.mockito.Mockito.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class MetadataResourceTest { - - private MetadataResource metadataResource; - - private MetadataService mockMetadataService; - - @Before - public void before() { - this.mockMetadataService = mock(MetadataService.class); - this.metadataResource = new MetadataResource(this.mockMetadataService); - } - - @Test - public void testCreateWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - metadataResource.create(workflowDef); - verify(mockMetadataService, times(1)).registerWorkflowDef(any(WorkflowDef.class)); - } - - @Test - public void testUpdateWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - List listOfWorkflowDef = new ArrayList<>(); - listOfWorkflowDef.add(workflowDef); - metadataResource.update(listOfWorkflowDef); - verify(mockMetadataService, times(1)).updateWorkflowDef(anyList()); - } - - @Test - public void testGetWorkflowDef() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - workflowDef.setVersion(1); - workflowDef.setDescription("test"); - - when(mockMetadataService.getWorkflowDef(anyString(), any())).thenReturn(workflowDef); - assertEquals(workflowDef, metadataResource.get("test", 1)); - } - - @Test - public void testGetAllWorkflowDef() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - workflowDef.setVersion(1); - workflowDef.setDescription("test"); - - List listOfWorkflowDef = new ArrayList<>(); - listOfWorkflowDef.add(workflowDef); - - when(mockMetadataService.getWorkflowDefs()).thenReturn(listOfWorkflowDef); - assertEquals(listOfWorkflowDef, metadataResource.getAll()); - } - - @Test - public void testUnregisterWorkflowDef() throws Exception { - metadataResource.unregisterWorkflowDef("test", 1); - verify(mockMetadataService, times(1)).unregisterWorkflowDef(anyString(), any()); - } - - @Test - public void testRegisterListOfTaskDef() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setDescription("desc"); - List listOfTaskDefs = new ArrayList<>(); - listOfTaskDefs.add(taskDef); - - metadataResource.registerTaskDef(listOfTaskDefs); - verify(mockMetadataService, times(1)).registerTaskDef(listOfTaskDefs); - } - - @Test - public void testRegisterTaskDef() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setDescription("desc"); - metadataResource.registerTaskDef(taskDef); - verify(mockMetadataService, times(1)).updateTaskDef(taskDef); - } - - @Test - public void testGetAllTaskDefs() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setDescription("desc"); - List listOfTaskDefs = new ArrayList<>(); - listOfTaskDefs.add(taskDef); - - when(mockMetadataService.getTaskDefs()).thenReturn(listOfTaskDefs); - assertEquals(listOfTaskDefs, metadataResource.getTaskDefs()); - } - - @Test - public void testGetTaskDef() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("test"); - taskDef.setDescription("desc"); - - when(mockMetadataService.getTaskDef(anyString())).thenReturn(taskDef); - assertEquals(taskDef, metadataResource.getTaskDef("test")); - } - - @Test - public void testUnregisterTaskDef() { - metadataResource.unregisterTaskDef("test"); - verify(mockMetadataService, times(1)).unregisterTaskDef(anyString()); - } -} diff --git a/rest/src/test/java/com/netflix/conductor/rest/controllers/TaskResourceTest.java b/rest/src/test/java/com/netflix/conductor/rest/controllers/TaskResourceTest.java deleted file mode 100644 index b9dd18d23..000000000 --- a/rest/src/test/java/com/netflix/conductor/rest/controllers/TaskResourceTest.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.http.ResponseEntity; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.service.TaskService; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class TaskResourceTest { - - private TaskService mockTaskService; - - private TaskResource taskResource; - - @Before - public void before() { - this.mockTaskService = mock(TaskService.class); - this.taskResource = new TaskResource(this.mockTaskService); - } - - @Test - public void testPoll() { - Task task = new Task(); - task.setTaskType("SIMPLE"); - task.setWorkerId("123"); - task.setDomain("test"); - - when(mockTaskService.poll(anyString(), anyString(), anyString())).thenReturn(task); - assertEquals(ResponseEntity.ok(task), taskResource.poll("SIMPLE", "123", "test")); - } - - @Test - public void testBatchPoll() { - Task task = new Task(); - task.setTaskType("SIMPLE"); - task.setWorkerId("123"); - task.setDomain("test"); - List listOfTasks = new ArrayList<>(); - listOfTasks.add(task); - - when(mockTaskService.batchPoll(anyString(), anyString(), anyString(), anyInt(), anyInt())) - .thenReturn(listOfTasks); - assertEquals( - ResponseEntity.ok(listOfTasks), - taskResource.batchPoll("SIMPLE", "123", "test", 1, 100)); - } - - @Test - public void testUpdateTask() { - TaskResult taskResult = new TaskResult(); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId("123"); - when(mockTaskService.updateTask(any(TaskResult.class))).thenReturn("123"); - assertEquals("123", taskResource.updateTask(taskResult)); - } - - @Test - public void testLog() { - taskResource.log("123", "test log"); - verify(mockTaskService, times(1)).log(anyString(), anyString()); - } - - @Test - public void testGetTaskLogs() { - List listOfLogs = new ArrayList<>(); - listOfLogs.add(new TaskExecLog("test log")); - when(mockTaskService.getTaskLogs(anyString())).thenReturn(listOfLogs); - assertEquals(listOfLogs, taskResource.getTaskLogs("123")); - } - - @Test - public void testGetTask() { - Task task = new Task(); - task.setTaskType("SIMPLE"); - task.setWorkerId("123"); - task.setDomain("test"); - task.setStatus(Task.Status.IN_PROGRESS); - when(mockTaskService.getTask(anyString())).thenReturn(task); - ResponseEntity entity = taskResource.getTask("123"); - assertNotNull(entity); - assertEquals(task, entity.getBody()); - } - - @Test - public void testSize() { - Map map = new HashMap<>(); - map.put("test1", 1); - map.put("test2", 2); - - List list = new ArrayList<>(); - list.add("test1"); - list.add("test2"); - - when(mockTaskService.getTaskQueueSizes(anyList())).thenReturn(map); - assertEquals(map, taskResource.size(list)); - } - - @Test - public void testAllVerbose() { - Map map = new HashMap<>(); - map.put("queue1", 1L); - map.put("queue2", 2L); - - Map> mapOfMap = new HashMap<>(); - mapOfMap.put("queue", map); - - Map>> queueSizeMap = new HashMap<>(); - queueSizeMap.put("queue", mapOfMap); - - when(mockTaskService.allVerbose()).thenReturn(queueSizeMap); - assertEquals(queueSizeMap, taskResource.allVerbose()); - } - - @Test - public void testQueueDetails() { - Map map = new HashMap<>(); - map.put("queue1", 1L); - map.put("queue2", 2L); - - when(mockTaskService.getAllQueueDetails()).thenReturn(map); - assertEquals(map, taskResource.all()); - } - - @Test - public void testGetPollData() { - PollData pollData = new PollData("queue", "test", "w123", 100); - List listOfPollData = new ArrayList<>(); - listOfPollData.add(pollData); - - when(mockTaskService.getPollData(anyString())).thenReturn(listOfPollData); - assertEquals(listOfPollData, taskResource.getPollData("w123")); - } - - @Test - public void testGetAllPollData() { - PollData pollData = new PollData("queue", "test", "w123", 100); - List listOfPollData = new ArrayList<>(); - listOfPollData.add(pollData); - - when(mockTaskService.getAllPollData()).thenReturn(listOfPollData); - assertEquals(listOfPollData, taskResource.getAllPollData()); - } - - @Test - public void testRequeueTaskType() { - when(mockTaskService.requeuePendingTask(anyString())).thenReturn("1"); - assertEquals("1", taskResource.requeuePendingTask("SIMPLE")); - } - - @Test - public void testSearch() { - Task task = new Task(); - task.setTaskType("SIMPLE"); - task.setWorkerId("123"); - task.setDomain("test"); - task.setStatus(Task.Status.IN_PROGRESS); - TaskSummary taskSummary = new TaskSummary(task); - List listOfTaskSummary = Collections.singletonList(taskSummary); - SearchResult searchResult = new SearchResult<>(100, listOfTaskSummary); - - when(mockTaskService.search(0, 100, "asc", "*", "*")).thenReturn(searchResult); - assertEquals(searchResult, taskResource.search(0, 100, "asc", "*", "*")); - } - - @Test - public void testSearchV2() { - Task task = new Task(); - task.setTaskType("SIMPLE"); - task.setWorkerId("123"); - task.setDomain("test"); - task.setStatus(Task.Status.IN_PROGRESS); - List listOfTasks = Collections.singletonList(task); - SearchResult searchResult = new SearchResult<>(100, listOfTasks); - - when(mockTaskService.searchV2(0, 100, "asc", "*", "*")).thenReturn(searchResult); - assertEquals(searchResult, taskResource.searchV2(0, 100, "asc", "*", "*")); - } - - @Test - public void testGetExternalStorageLocation() { - ExternalStorageLocation externalStorageLocation = mock(ExternalStorageLocation.class); - when(mockTaskService.getExternalStorageLocation("path", "operation", "payloadType")) - .thenReturn(externalStorageLocation); - assertEquals( - externalStorageLocation, - taskResource.getExternalStorageLocation("path", "operation", "payloadType")); - } -} diff --git a/rest/src/test/java/com/netflix/conductor/rest/controllers/WorkflowResourceTest.java b/rest/src/test/java/com/netflix/conductor/rest/controllers/WorkflowResourceTest.java deleted file mode 100644 index d12896214..000000000 --- a/rest/src/test/java/com/netflix/conductor/rest/controllers/WorkflowResourceTest.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.rest.controllers; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.service.WorkflowService; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class WorkflowResourceTest { - - @Mock private WorkflowService mockWorkflowService; - - private WorkflowResource workflowResource; - - @Before - public void before() { - this.mockWorkflowService = mock(WorkflowService.class); - this.workflowResource = new WorkflowResource(this.mockWorkflowService); - } - - @Test - public void testStartWorkflow() { - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName("w123"); - Map input = new HashMap<>(); - input.put("1", "abc"); - startWorkflowRequest.setInput(input); - String workflowID = "w112"; - when(mockWorkflowService.startWorkflow(any(StartWorkflowRequest.class))) - .thenReturn(workflowID); - assertEquals("w112", workflowResource.startWorkflow(startWorkflowRequest)); - } - - @Test - public void testStartWorkflowParam() { - Map input = new HashMap<>(); - input.put("1", "abc"); - String workflowID = "w112"; - when(mockWorkflowService.startWorkflow( - anyString(), anyInt(), anyString(), anyInt(), anyMap())) - .thenReturn(workflowID); - assertEquals("w112", workflowResource.startWorkflow("test1", 1, "c123", 0, input)); - } - - @Test - public void getWorkflows() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("123"); - ArrayList listOfWorkflows = - new ArrayList<>() { - { - add(workflow); - } - }; - when(mockWorkflowService.getWorkflows(anyString(), anyString(), anyBoolean(), anyBoolean())) - .thenReturn(listOfWorkflows); - assertEquals(listOfWorkflows, workflowResource.getWorkflows("test1", "123", true, true)); - } - - @Test - public void testGetWorklfowsMultipleCorrelationId() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - List workflowArrayList = - new ArrayList<>() { - { - add(workflow); - } - }; - - List correlationIdList = - new ArrayList<>() { - { - add("c123"); - } - }; - - Map> workflowMap = new HashMap<>(); - workflowMap.put("c123", workflowArrayList); - - when(mockWorkflowService.getWorkflows(anyString(), anyBoolean(), anyBoolean(), anyList())) - .thenReturn(workflowMap); - assertEquals( - workflowMap, workflowResource.getWorkflows("test", true, true, correlationIdList)); - } - - @Test - public void testGetExecutionStatus() { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("c123"); - - when(mockWorkflowService.getExecutionStatus(anyString(), anyBoolean())) - .thenReturn(workflow); - assertEquals(workflow, workflowResource.getExecutionStatus("w123", true)); - } - - @Test - public void testDelete() { - workflowResource.delete("w123", true); - verify(mockWorkflowService, times(1)).deleteWorkflow(anyString(), anyBoolean()); - } - - @Test - public void testGetRunningWorkflow() { - List listOfWorklfows = - new ArrayList<>() { - { - add("w123"); - } - }; - when(mockWorkflowService.getRunningWorkflows(anyString(), anyInt(), anyLong(), anyLong())) - .thenReturn(listOfWorklfows); - assertEquals(listOfWorklfows, workflowResource.getRunningWorkflow("w123", 1, 12L, 13L)); - } - - @Test - public void testDecide() { - workflowResource.decide("w123"); - verify(mockWorkflowService, times(1)).decideWorkflow(anyString()); - } - - @Test - public void testPauseWorkflow() { - workflowResource.pauseWorkflow("w123"); - verify(mockWorkflowService, times(1)).pauseWorkflow(anyString()); - } - - @Test - public void testResumeWorkflow() { - workflowResource.resumeWorkflow("test"); - verify(mockWorkflowService, times(1)).resumeWorkflow(anyString()); - } - - @Test - public void testSkipTaskFromWorkflow() { - workflowResource.skipTaskFromWorkflow("test", "testTask", null); - verify(mockWorkflowService, times(1)) - .skipTaskFromWorkflow(anyString(), anyString(), isNull()); - } - - @Test - public void testRerun() { - RerunWorkflowRequest request = new RerunWorkflowRequest(); - workflowResource.rerun("test", request); - verify(mockWorkflowService, times(1)) - .rerunWorkflow(anyString(), any(RerunWorkflowRequest.class)); - } - - @Test - public void restart() { - workflowResource.restart("w123", false); - verify(mockWorkflowService, times(1)).restartWorkflow(anyString(), anyBoolean()); - } - - @Test - public void testRetry() { - workflowResource.retry("w123", false); - verify(mockWorkflowService, times(1)).retryWorkflow(anyString(), anyBoolean()); - } - - @Test - public void testResetWorkflow() { - workflowResource.resetWorkflow("w123"); - verify(mockWorkflowService, times(1)).resetWorkflow(anyString()); - } - - @Test - public void testTerminate() { - workflowResource.terminate("w123", "test"); - verify(mockWorkflowService, times(1)).terminateWorkflow(anyString(), anyString()); - } - - @Test - public void testSearch() { - workflowResource.search(0, 100, "asc", "*", "*"); - verify(mockWorkflowService, times(1)) - .searchWorkflows(anyInt(), anyInt(), anyString(), anyString(), anyString()); - } - - @Test - public void testSearchV2() { - workflowResource.searchV2(0, 100, "asc", "*", "*"); - verify(mockWorkflowService).searchWorkflowsV2(0, 100, "asc", "*", "*"); - } - - @Test - public void testSearchWorkflowsByTasks() { - workflowResource.searchWorkflowsByTasks(0, 100, "asc", "*", "*"); - verify(mockWorkflowService, times(1)) - .searchWorkflowsByTasks(anyInt(), anyInt(), anyString(), anyString(), anyString()); - } - - @Test - public void testSearchWorkflowsByTasksV2() { - workflowResource.searchWorkflowsByTasksV2(0, 100, "asc", "*", "*"); - verify(mockWorkflowService).searchWorkflowsByTasksV2(0, 100, "asc", "*", "*"); - } - - @Test - public void testGetExternalStorageLocation() { - workflowResource.getExternalStorageLocation("path", "operation", "payloadType"); - verify(mockWorkflowService).getExternalStorageLocation("path", "operation", "payloadType"); - } -} diff --git a/server/build.gradle b/server/build.gradle deleted file mode 100644 index 5a9183081..000000000 --- a/server/build.gradle +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -plugins { - id 'org.springframework.boot' -} - -dependencies { - - implementation project(':conductor-rest') - implementation project(':conductor-core') - implementation project(':conductor-redis-persistence') - implementation project(':conductor-cassandra-persistence') - implementation project(':conductor-es6-persistence') - implementation project(':conductor-grpc-server') - implementation project(':conductor-redis-lock') - implementation project(':conductor-redis-concurrency-limit') - implementation project(':conductor-http-task') - implementation project(':conductor-json-jq-task') - implementation project(':conductor-awss3-storage') - implementation project(':conductor-awssqs-event-queue') - - implementation 'org.springframework.boot:spring-boot-starter' - implementation 'org.springframework.boot:spring-boot-starter-validation' - implementation 'org.springframework.boot:spring-boot-starter-web' - implementation 'org.springframework.retry:spring-retry' - - implementation 'org.springframework.boot:spring-boot-starter-log4j2' - implementation 'org.apache.logging.log4j:log4j-web' - - implementation 'org.springframework.boot:spring-boot-starter-actuator' - - implementation "org.springdoc:springdoc-openapi-ui:${revOpenapi}" - - runtimeOnly "org.glassfish.jaxb:jaxb-runtime:${revJAXB}" - - testImplementation project(':conductor-rest') - testImplementation project(':conductor-common') - testImplementation "io.grpc:grpc-testing:${revGrpc}" - testImplementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - testImplementation "io.grpc:grpc-protobuf:${revGrpc}" - testImplementation "io.grpc:grpc-stub:${revGrpc}" -} - -jar { - enabled = true -} - -bootJar { - mainClass = 'com.netflix.conductor.Conductor' - classifier = 'boot' -} - -springBoot { - buildInfo() -} diff --git a/server/dependencies.lock b/server/dependencies.lock deleted file mode 100644 index 664e41897..000000000 --- a/server/dependencies.lock +++ /dev/null @@ -1,1481 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "com.netflix.conductor:conductor-awss3-storage": { - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - }, - "productionRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue" - ], - "locked": "1.11.86" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "31.0.1-android" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.19.2" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-awss3-storage": { - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.20" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.thoughtworks.xstream:xstream": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-http-task" - ], - "locked": "1.1.1" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "locked": "2.3.3" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.13.3" - }, - "org.springdoc:springdoc-openapi-ui": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.7.1" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue" - ], - "locked": "1.11.86" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "31.0.1-android" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.19.2" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-awss3-storage": { - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.20" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.thoughtworks.xstream:xstream": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-http-task" - ], - "locked": "1.1.1" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "locked": "2.3.3" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.13.3" - }, - "org.springdoc:springdoc-openapi-ui": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.7.1" - } - }, - "testCompileClasspath": { - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-awss3-storage": { - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "io.grpc:grpc-protobuf": { - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "locked": "1.47.0" - }, - "io.grpc:grpc-testing": { - "locked": "1.47.0" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.springdoc:springdoc-openapi-ui": { - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue" - ], - "locked": "1.11.86" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "31.0.1-android" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-awss3-storage": { - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.20" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.thoughtworks.xstream:xstream": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-testing": { - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-http-task" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.17.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "locked": "2.3.3" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.13.3" - }, - "org.springdoc:springdoc-openapi-ui": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.7.1" - } - } -} \ No newline at end of file diff --git a/server/src/main/java/com/netflix/conductor/Conductor.java b/server/src/main/java/com/netflix/conductor/Conductor.java deleted file mode 100644 index de253a7e2..000000000 --- a/server/src/main/java/com/netflix/conductor/Conductor.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor; - -import java.io.IOException; -import java.util.Properties; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; -import org.springframework.core.io.FileSystemResource; - -// Prevents from the datasource beans to be loaded, AS they are needed only for specific databases. -// In case that SQL database is selected this class will be imported back in the appropriate -// database persistence module. -@SpringBootApplication(exclude = DataSourceAutoConfiguration.class) -public class Conductor { - - private static final Logger log = LoggerFactory.getLogger(Conductor.class); - - public static void main(String[] args) throws IOException { - loadExternalConfig(); - - SpringApplication.run(Conductor.class, args); - } - - /** - * Reads properties from the location specified in CONDUCTOR_CONFIG_FILE and sets - * them as system properties so they override the default properties. - * - *

    Spring Boot property hierarchy is documented here, - * https://docs.spring.io/spring-boot/docs/current/reference/html/spring-boot-features.html#boot-features-external-config - * - * @throws IOException if file can't be read. - */ - private static void loadExternalConfig() throws IOException { - String configFile = System.getProperty("CONDUCTOR_CONFIG_FILE"); - if (StringUtils.isNotBlank(configFile)) { - FileSystemResource resource = new FileSystemResource(configFile); - if (resource.exists()) { - Properties properties = new Properties(); - properties.load(resource.getInputStream()); - properties.forEach( - (key, value) -> System.setProperty((String) key, (String) value)); - log.info("Loaded {} properties from {}", properties.size(), configFile); - } else { - log.warn("Ignoring {} since it does not exist", configFile); - } - } - } -} diff --git a/server/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/server/src/main/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index 76a8097fe..000000000 --- a/server/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "properties": [ - { - "name": "conductor.db.type", - "type": "java.lang.String", - "description": "The type of database to be used while running the Conductor application." - }, - { - "name": "conductor.indexing.enabled", - "type": "java.lang.Boolean", - "description": "Enable indexing to elasticsearch. If set to false, a no-op implementation will be used." - }, - { - "name": "conductor.grpc-server.enabled", - "type": "java.lang.Boolean", - "description": "Enable the gRPC server." - } - ], - "hints": [ - { - "name": "conductor.db.type", - "values": [ - { - "value": "memory", - "description": "Use in-memory redis as the database implementation." - }, - { - "value": "cassandra", - "description": "Use cassandra as the database implementation." - }, - { - "value": "mysql", - "description": "Use MySQL as the database implementation." - }, - { - "value": "postgres", - "description": "Use Postgres as the database implementation." - }, - { - "value": "dynomite", - "description": "Use Dynomite as the database implementation." - }, - { - "value": "redis_cluster", - "description": "Use Redis Cluster configuration as the database implementation." - }, - { - "value": "redis_sentinel", - "description": "Use Redis Sentinel configuration as the database implementation." - }, - { - "value": "redis_standalone", - "description": "Use Redis Standalone configuration as the database implementation." - } - ] - } - ] -} diff --git a/server/src/main/resources/application.properties b/server/src/main/resources/application.properties deleted file mode 100644 index 4ffb2cd26..000000000 --- a/server/src/main/resources/application.properties +++ /dev/null @@ -1,125 +0,0 @@ -# -# Copyright 2021 Netflix, Inc. -#

    -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -#

    -# http://www.apache.org/licenses/LICENSE-2.0 -#

    -# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -# - -spring.application.name=conductor -springdoc.api-docs.path=/api-docs - -conductor.db.type=memory - -conductor.indexing.enabled=false - -#Redis configuration details. -#format is host:port:rack separated by semicolon -#Auth is supported. Password is taken from host[0]. format: host:port:rack:password -conductor.redis.hosts=host1:port:rack;host2:port:rack:host3:port:rack - -#namespace for the keys stored in Dynomite/Redis -conductor.redis.workflowNamespacePrefix= - -#namespace prefix for the dyno queues -conductor.redis.queueNamespacePrefix= - -#no. of threads allocated to dyno-queues -queues.dynomite.threads=10 - -# By default with dynomite, we want the repair service enabled -conductor.workflow-repair-service.enabled=true - -#non-quorum port used to connect to local redis. Used by dyno-queues -conductor.redis.queuesNonQuorumPort=22122 - -# For a single node dynomite or redis server, make sure the value below is set to same as rack specified in the "workflow.dynomite.cluster.hosts" property. -conductor.redis.availabilityZone=us-east-1c -#conductor.redis.maxIdleConnections=8 -#conductor.redis.minIdleConnections=5 -#conductor.redis.minEvictableIdleTimeMillis = 1800000 -#conductor.redis.timeBetweenEvictionRunsMillis = -1L -#conductor.redis.testWhileIdle = false -#conductor.redis.numTestsPerEvictionRun = 3 - -#Transport address to elasticsearch -conductor.elasticsearch.url=localhost:9300 - -#Name of the elasticsearch cluster -conductor.elasticsearch.indexName=conductor - -#Elasticsearch major release version. -conductor.elasticsearch.version=6 -#conductor.elasticsearch.version=7 - -# Default event queue type to listen on for wait task -conductor.default-event-queue.type=sqs - -#zookeeper -# conductor.zookeeper-lock.connectionString=host1.2181,host2:2181,host3:2181 -# conductor.zookeeper-lock.sessionTimeoutMs -# conductor.zookeeper-lock.connectionTimeoutMs -# conductor.zookeeper-lock.namespace - -#disable locking during workflow execution -conductor.app.workflow-execution-lock-enabled=false -conductor.workflow-execution-lock.type=noop_lock - -#Redis cluster settings for locking module -# conductor.redis-lock.serverType=single -#Comma separated list of server nodes -# conductor.redis-lock.serverAddress=redis://127.0.0.1:6379 -#Redis sentinel master name -# conductor.redis-lock.serverMasterName=master -# conductor.redis-lock.namespace - -#Following properties set for using AMQP events and tasks with conductor: -#(To enable support of AMQP queues) -#conductor.event-queues.amqp.enabled=true - -# Here are the settings with default values: -#conductor.event-queues.amqp.hosts= -#conductor.event-queues.amqp.username= -#conductor.event-queues.amqp.password= - -#conductor.event-queues.amqp.virtualHost=/ -#conductor.event-queues.amqp.port=5672 -#conductor.event-queues.amqp.useNio=false -#conductor.event-queues.amqp.batchSize=1 -#conductor.event-queues.amqp.pollTimeDuration=100ms -#conductor.event-queues.amqp.queueType=classic -#conductor.event-queues.amqp.sequentialMsgProcessing=true -#conductor.event-queues.amqp.connectionTimeoutInMilliSecs=180000 -#conductor.event-queues.amqp.networkRecoveryIntervalInMilliSecs=5000 -#conductor.event-queues.amqp.requestHeartbeatTimeoutInSecs=30 -#conductor.event-queues.amqp.handshakeTimeoutInMilliSecs=180000 -#conductor.event-queues.amqp.maxChannelCount=5000 -#conductor.event-queues.amqp.limit=50 -#conductor.event-queues.amqp.duration=1000 -#conductor.event-queues.amqp.retryType=REGULARINTERVALS - -#conductor.event-queues.amqp.useExchange=true( exchange or queue) -#conductor.event-queues.amqp.listenerQueuePrefix=myqueue -# Use durable queue ? -#conductor.event-queues.amqp.durable=false -# Use exclusive queue ? -#conductor.event-queues.amqp.exclusive=false -# Enable support of priorities on queue. Set the max priority on message. -# Setting is ignored if the value is lower or equals to 0 -#conductor.event-queues.amqp.maxPriority=-1 - -# To enable Workflow/Task Summary Input/Output JSON Serialization, use the following: -# conductor.app.summary-input-output-json-serialization.enabled=true - -# Additional modules for metrics collection exposed to Prometheus (optional) -# conductor.metrics-prometheus.enabled=true -# management.endpoints.web.exposure.include=prometheus - -# Additional modules for metrics collection exposed to Datadog (optional) -management.metrics.export.datadog.enabled=${conductor.metrics-datadog.enabled:false} -management.metrics.export.datadog.api-key=${conductor.metrics-datadog.api-key:} diff --git a/server/src/main/resources/banner.txt b/server/src/main/resources/banner.txt deleted file mode 100644 index 3f3501878..000000000 --- a/server/src/main/resources/banner.txt +++ /dev/null @@ -1,7 +0,0 @@ - ______ ______ .__ __. _______ __ __ ______ .___________. ______ .______ - / | / __ \ | \ | | | \ | | | | / || | / __ \ | _ \ -| ,----'| | | | | \| | | .--. || | | | | ,----'`---| |----`| | | | | |_) | -| | | | | | | . ` | | | | || | | | | | | | | | | | | / -| `----.| `--' | | |\ | | '--' || `--' | | `----. | | | `--' | | |\ \----. - \______| \______/ |__| \__| |_______/ \______/ \______| |__| \______/ | _| `._____| -${application.formatted-version} :::Spring Boot:::${spring-boot.formatted-version} diff --git a/server/src/main/resources/log4j2.xml b/server/src/main/resources/log4j2.xml deleted file mode 100644 index cab346657..000000000 --- a/server/src/main/resources/log4j2.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/server/src/test/java/com/netflix/conductor/common/config/ConductorObjectMapperTest.java b/server/src/test/java/com/netflix/conductor/common/config/ConductorObjectMapperTest.java deleted file mode 100644 index 9fe058d20..000000000 --- a/server/src/test/java/com/netflix/conductor/common/config/ConductorObjectMapperTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.common.config; - -import java.io.IOException; -import java.io.StringWriter; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.protobuf.Any; -import com.google.protobuf.Struct; -import com.google.protobuf.Value; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -/** - * Tests the customized {@link ObjectMapper} that is used by {@link com.netflix.conductor.Conductor} - * application. - */ -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE) -@RunWith(SpringRunner.class) -public class ConductorObjectMapperTest { - - @Autowired ObjectMapper objectMapper; - - @Test - public void testSimpleMapping() throws IOException { - assertTrue(objectMapper.canSerialize(Any.class)); - - Struct struct1 = - Struct.newBuilder() - .putFields( - "some-key", Value.newBuilder().setStringValue("some-value").build()) - .build(); - - Any source = Any.pack(struct1); - - StringWriter buf = new StringWriter(); - objectMapper.writer().writeValue(buf, source); - - Any dest = objectMapper.reader().forType(Any.class).readValue(buf.toString()); - assertEquals(source.getTypeUrl(), dest.getTypeUrl()); - - Struct struct2 = dest.unpack(Struct.class); - assertTrue(struct2.containsFields("some-key")); - assertEquals( - struct1.getFieldsOrThrow("some-key").getStringValue(), - struct2.getFieldsOrThrow("some-key").getStringValue()); - } - - @Test - public void testNullOnWrite() throws JsonProcessingException { - Map data = new HashMap<>(); - data.put("someKey", null); - data.put("someId", "abc123"); - String result = objectMapper.writeValueAsString(data); - assertTrue(result.contains("null")); - } - - @Test - public void testWorkflowSerDe() throws IOException { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("testDef"); - workflowDef.setVersion(2); - - Workflow workflow = new Workflow(); - workflow.setWorkflowDefinition(workflowDef); - workflow.setWorkflowId("test-workflow-id"); - workflow.setStatus(Workflow.WorkflowStatus.RUNNING); - workflow.setStartTime(10L); - workflow.setInput(null); - - Map data = new HashMap<>(); - data.put("someKey", null); - data.put("someId", "abc123"); - workflow.setOutput(data); - - String workflowPayload = objectMapper.writeValueAsString(workflow); - Workflow workflow1 = objectMapper.readValue(workflowPayload, Workflow.class); - - assertTrue(workflow1.getOutput().containsKey("someKey")); - assertNull(workflow1.getOutput().get("someKey")); - assertNotNull(workflow1.getInput()); - } -} diff --git a/settings.gradle b/settings.gradle deleted file mode 100644 index ba3cc1fe5..000000000 --- a/settings.gradle +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -plugins { - id "com.gradle.enterprise" version "3.10.1" -} - -gradleEnterprise { - buildScan { - termsOfServiceUrl = "https://gradle.com/terms-of-service" - termsOfServiceAgree = "yes" - publishAlways() - - buildScanPublished { scan -> - file("buildscan.log") << "${new Date()} - ${scan.buildScanUri}\n" - } - } -} - -rootProject.name = 'conductor' - -include 'annotations' -include 'annotations-processor' - -include 'server' -include 'common' -include 'core' -include 'client' -include 'client-spring' - -include 'cassandra-persistence' -include 'redis-persistence' - -include 'es6-persistence' - -include 'redis-lock' - -include 'awss3-storage' -include 'awssqs-event-queue' - -include 'redis-concurrency-limit' - -include 'json-jq-task' -include 'http-task' - -include 'rest' -include 'grpc' -include 'grpc-server' -include 'grpc-client' - -include 'java-sdk' - -include 'test-harness' - -rootProject.children.each {it.name="conductor-${it.name}"} diff --git a/shared-libraries.html b/shared-libraries.html new file mode 100644 index 000000000..afed9bbdb --- /dev/null +++ b/shared-libraries.html @@ -0,0 +1,94 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + +

    + + +
    +
    +
    + View More on GitHub +

    Shared Libraries (SL)

    +
    +

    If microservices are coupled with a common library, that library should be refactored into a separate module. This reduces the fragility of the application by migrating the shared functionality behind a common, unchanging interface. This will make the system resistant to ripples from changes within the library.

    +
    +
    + +
    +
    + + + + + + + + + + + + + +
    LibraryDependenciesMicroservices
    + +
    +
    +
    + + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/shared-persistence.html b/shared-persistence.html new file mode 100644 index 000000000..b3711b3b7 --- /dev/null +++ b/shared-persistence.html @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + + + + +
    +
    +
    + View More on GitHub +

    Shared Persistence (SP)

    +
    +

    When two microservice application modules access the same database, it breaks the microservice definition. Each microservice should have autonomy and control over its data and database.

    +
    +
    + +
    +
    + + +
    +
    +
    + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + + diff --git a/springboot-bom-overrides.gradle b/springboot-bom-overrides.gradle deleted file mode 100644 index 33853586f..000000000 --- a/springboot-bom-overrides.gradle +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ - -// Contains overrides for Spring Boot Dependency Management plugin -// Dependency version override properties can be found at https://docs.spring.io/spring-boot/docs/2.6.7/reference/htmlsingle/#appendix.dependency-versions.properties - -// Conductor's default is ES6, but SB brings in ES7 -ext['elasticsearch.version'] = revElasticSearch6 - -// SB brings groovy 3.0.x which is not compatible with Spock -ext['groovy.version'] = revGroovy diff --git a/test-harness/build.gradle b/test-harness/build.gradle deleted file mode 100644 index 2a249ac0b..000000000 --- a/test-harness/build.gradle +++ /dev/null @@ -1,39 +0,0 @@ -apply plugin: 'groovy' - -dependencies { - testImplementation project(':conductor-server') - testImplementation project(':conductor-common') - testImplementation project(':conductor-rest') - testImplementation project(':conductor-core') - testImplementation project(':conductor-redis-persistence') - testImplementation project(':conductor-cassandra-persistence') - testImplementation project(':conductor-es6-persistence') - testImplementation project(':conductor-grpc-server') - testImplementation project(':conductor-client') - testImplementation project(':conductor-grpc-client') - testImplementation project(':conductor-json-jq-task') - testImplementation project(':conductor-http-task') - - testImplementation "org.springframework.retry:spring-retry" - - testImplementation "com.fasterxml.jackson.core:jackson-databind" - testImplementation "com.fasterxml.jackson.core:jackson-core" - - testImplementation "org.apache.commons:commons-lang3" - - testImplementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - testImplementation "com.google.guava:guava:${revGuava}" - testImplementation "org.springframework:spring-web" - - testImplementation "redis.clients:jedis:${revJedis}" - testImplementation "com.netflix.dyno-queues:dyno-queues-redis:${revDynoQueues}" - - testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" - testImplementation "org.spockframework:spock-core:${revSpock}" - testImplementation "org.spockframework:spock-spring:${revSpock}" - - testImplementation "org.elasticsearch.client:elasticsearch-rest-client" - testImplementation "org.elasticsearch.client:elasticsearch-rest-high-level-client" - - testImplementation "org.testcontainers:elasticsearch:${revTestContainer}" -} diff --git a/test-harness/dependencies.lock b/test-harness/dependencies.lock deleted file mode 100644 index 5a578c3ff..000000000 --- a/test-harness/dependencies.lock +++ /dev/null @@ -1,728 +0,0 @@ -{ - "annotationProcessor": { - "org.springframework.boot:spring-boot-configuration-processor": { - "locked": "2.6.7" - } - }, - "compileClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "runtimeClasspath": { - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - } - }, - "testCompileClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "locked": "2.13.2.1" - }, - "com.google.guava:guava": { - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "locked": "3.13.0" - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "locked": "2.0.20" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "org.apache.commons:commons-lang3": { - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "6.8.12" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "locked": "1.3.3" - }, - "org.springframework:spring-web": { - "locked": "5.3.19" - }, - "org.testcontainers:elasticsearch": { - "locked": "1.15.3" - }, - "redis.clients:jedis": { - "locked": "3.3.0" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue" - ], - "locked": "1.11.86" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.10.2" - }, - "com.fasterxml.jackson.core:jackson-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.13.2.1" - }, - "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.13.2" - }, - "com.github.ben-manes.caffeine:caffeine": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "2.9.3" - }, - "com.google.guava:guava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-client" - ], - "locked": "30.0-jre" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client" - ], - "locked": "3.13.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.6.0" - }, - "com.netflix.conductor:conductor-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "project": true - }, - "com.netflix.conductor:conductor-awss3-storage": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-awssqs-event-queue": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-http-task": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-json-jq-task": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-concurrency-limit": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-rest": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.20" - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.10.10" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest" - ], - "locked": "1.1.4" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core" - ], - "locked": "0.122.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.3" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "com.thoughtworks.xstream:xstream": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "1.4.19" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.7" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.47.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client" - ], - "locked": "1.47.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-core" - ], - "locked": "1.3.8" - }, - "jakarta.activation:jakarta.activation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "jakarta.xml.bind:jakarta.xml.bind-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.3.3" - }, - "javax.annotation:javax.annotation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.3.2" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-http-task" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.13.2" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-json-jq-task" - ], - "locked": "0.0.13" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.5" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.12.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-jul": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-slf4j-impl": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.17.1" - }, - "org.apache.logging.log4j:log4j-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-annotations", - "com.netflix.conductor:conductor-awss3-storage", - "com.netflix.conductor:conductor-awssqs-event-queue", - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-http-task", - "com.netflix.conductor:conductor-json-jq-task", - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.17.1" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.5.13" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "6.8.12" - }, - "org.glassfish.jaxb:jaxb-runtime": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "2.3.6" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.13.3" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-grpc-client" - ], - "locked": "1.7.36" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.5" - }, - "org.spockframework:spock-spring": { - "locked": "1.3-groovy-2.5" - }, - "org.springdoc:springdoc-openapi-ui": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "1.6.9" - }, - "org.springframework.boot:spring-boot-starter": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-actuator": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-log4j2": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-test": { - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-validation": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "2.6.7" - }, - "org.springframework.boot:spring-boot-starter-web": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-rest", - "com.netflix.conductor:conductor-server" - ], - "locked": "2.6.7" - }, - "org.springframework.retry:spring-retry": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.3.3" - }, - "org.springframework:spring-web": { - "locked": "5.3.19" - }, - "org.testcontainers:elasticsearch": { - "locked": "1.15.3" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-concurrency-limit", - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.3.0" - } - } -} \ No newline at end of file diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy deleted file mode 100644 index d3270c447..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.base - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty -import org.springframework.context.annotation.Bean -import org.springframework.context.annotation.Configuration -import org.springframework.context.annotation.Primary -import org.springframework.test.context.TestPropertySource - -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.redis.dao.DynoQueueDAO -import com.netflix.conductor.redis.jedis.JedisMock -import com.netflix.dyno.connectionpool.Host -import com.netflix.dyno.queues.ShardSupplier -import com.netflix.dyno.queues.redis.RedisQueues - -import redis.clients.jedis.commands.JedisCommands -import spock.mock.DetachedMockFactory - -@TestPropertySource(properties = [ - "conductor.system-task-workers.enabled=false", - "conductor.workflow-repair-service.enabled=true", - "conductor.workflow-reconciler.enabled=false", - "conductor.integ-test.queue-spy.enabled=true" -]) -abstract class AbstractResiliencySpecification extends AbstractSpecification { - - @Configuration - static class TestQueueConfiguration { - - @Primary - @Bean - @ConditionalOnProperty(name = "conductor.integ-test.queue-spy.enabled", havingValue = "true") - QueueDAO SpyQueueDAO() { - DetachedMockFactory detachedMockFactory = new DetachedMockFactory() - JedisCommands jedisMock = new JedisMock() - ShardSupplier shardSupplier = new ShardSupplier() { - @Override - Set getQueueShards() { - return new HashSet<>(Collections.singletonList("a")) - } - - @Override - String getCurrentShard() { - return "a" - } - - @Override - String getShardForHost(Host host) { - return "a" - } - } - RedisQueues redisQueues = new RedisQueues(jedisMock, jedisMock, "mockedQueues", shardSupplier, 60000, 120000) - DynoQueueDAO dynoQueueDAO = new DynoQueueDAO(redisQueues) - - return detachedMockFactory.Spy(dynoQueueDAO) - } - } - - @Autowired - QueueDAO queueDAO -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy deleted file mode 100644 index 81a927a3a..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.base - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.test.context.TestPropertySource - -import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.core.reconciliation.WorkflowSweeper -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil - -import spock.lang.Specification - -@SpringBootTest -@TestPropertySource(locations = "classpath:application-integrationtest.properties") -abstract class AbstractSpecification extends Specification { - - @Autowired - ExecutionService workflowExecutionService - - @Autowired - MetadataService metadataService - - @Autowired - WorkflowExecutor workflowExecutor - - @Autowired - WorkflowTestUtil workflowTestUtil - - @Autowired - WorkflowSweeper workflowSweeper - - @Autowired - AsyncSystemTaskExecutor asyncSystemTaskExecutor - - def cleanup() { - workflowTestUtil.clearWorkflows() - } - - void sweep(String workflowId) { - workflowSweeper.sweep(workflowId) - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy deleted file mode 100644 index 66012e6fb..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared -import spock.lang.Unroll - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class DecisionTaskSpec extends AbstractSpecification { - - @Shared - def DECISION_WF = "DecisionWorkflow" - - @Shared - def FORK_JOIN_DECISION_WF = "ForkConditionalTest" - - @Shared - def COND_TASK_WF = "ConditionalTaskWF" - - def setup() { - //initialization code for each feature - workflowTestUtil.registerWorkflows('simple_decision_task_integration_test.json', - 'decision_and_fork_join_integration_test.json', - 'conditional_task_workflow_integration_test.json') - } - - def "Test simple decision workflow"() { - given: "Workflow an input of a workflow with decision task" - Map input = new HashMap() - input['param1'] = 'p1' - input['param2'] = 'p2' - input['case'] = 'c' - - when: "A decision workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(DECISION_WF, 1, - 'decision_workflow', input, - null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DECISION' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'DECISION' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_20' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_20' is polled and completed" - def polledAndCompletedTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask20Try1) - - and: "verify that the 'integration_task_20' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[3].taskType == 'integration_task_20' - tasks[3].status == Task.Status.COMPLETED - } - } - - def "Test a workflow that has a decision task that leads to a fork join"() { - given: "Workflow an input of a workflow with decision task" - Map input = new HashMap() - input['param1'] = 'p1' - input['param2'] = 'p2' - input['case'] = 'c' - - when: "A decision workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_DECISION_WF, 1, - 'decision_forkjoin', input, - null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - } - - when: "the tasks 'integration_task_1' and 'integration_task_10' are polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - def polledAndCompletedTask10Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_10', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - verifyPolledAndAcknowledgedTask(polledAndCompletedTask10Try1) - - and: "verify that the 'integration_task_1' and 'integration_task_10' are COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_20' - tasks[6].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_20' is polled and completed" - def polledAndCompletedTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask20Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_20' - tasks[6].status == Task.Status.COMPLETED - } - } - - def "Test default case condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the default case is executed" - Map input = new HashMap() - input['param1'] = 'xxx' - input['param2'] = 'two' - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - 'conditional_default', input, - null, null, null) - - then: "verify that the workflow is running and the default condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DECISION' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['caseOutput'] == ['xxx'] - tasks[1].taskType == 'integration_task_10' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_10' is polled and completed" - def polledAndCompletedTask10Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_10', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask10Try1) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_10' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'DECISION' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['caseOutput'] == ['null'] - } - } - - @Unroll - def "Test case 'nested' and '#caseValue' condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the 'nested' and '#caseValue' decision tree is executed" - Map input = new HashMap() - input['param1'] = 'nested' - input['param2'] = caseValue - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - workflowCorrelationId, input, - null, null, null) - - then: "verify that the workflow is running and the 'nested' and '#caseValue' condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'DECISION' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['caseOutput'] == ['nested'] - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[1].outputData['caseOutput'] == [caseValue] - tasks[2].taskType == expectedTaskName - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task '#expectedTaskName' is polled and completed" - def polledAndCompletedTaskTry1 = workflowTestUtil.pollAndCompleteTask(expectedTaskName, 'task.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTaskTry1) - - and: - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[2].taskType == expectedTaskName - tasks[2].status == endTaskStatus - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[3].outputData['caseOutput'] == ['null'] - } - - where: - caseValue | expectedTaskName | workflowCorrelationId || endTaskStatus - 'two' | 'integration_task_2' | 'conditional_nested_two' || Task.Status.COMPLETED - 'one' | 'integration_task_1' | 'conditional_nested_one' || Task.Status.COMPLETED - } - - def "Test 'three' case condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the default case is executed" - Map input = new HashMap() - input['param1'] = 'three' - input['param2'] = 'two' - input['finalCase'] = 'notify' - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - 'conditional_three', input, - null, null, null) - - then: "verify that the workflow is running and the 'three' condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DECISION' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['caseOutput'] == ['three'] - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_3' is polled and completed" - def polledAndCompletedTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask3Try1) - - and: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'DECISION' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['caseOutput'] == ['notify'] - tasks[3].taskType == 'integration_task_4' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_4' is polled and completed" - def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'DECISION' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['caseOutput'] == ['notify'] - tasks[3].taskType == 'integration_task_4' - tasks[3].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy deleted file mode 100644 index 6b2f556dd..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy +++ /dev/null @@ -1,900 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.common.utils.TaskUtils -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.test.base.AbstractSpecification - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class DoWhileSpec extends AbstractSpecification { - - @Autowired - SubWorkflow subWorkflowTask - - def setup() { - workflowTestUtil.registerWorkflows("do_while_integration_test.json", - "do_while_multiple_integration_test.json", - "do_while_as_subtask_integration_test.json", - 'simple_one_task_sub_workflow_integration_test.json', - 'do_while_iteration_fix_test.json', - "do_while_sub_workflow_integration_test.json") - } - - def "Test workflow with a single iteration Do While task"() { - given: "Number of iterations of the loop is set to 1" - def workflowInput = new HashMap() - workflowInput['loop'] = 1 - - when: "A do_while workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Workflow", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polling and completing first task" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second task" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - verifyTaskIteration(polledAndCompletedTask2[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - } - } - - def "Test workflow with a single iteration Do While task with Sub workflow"() { - given: "Number of iterations of the loop is set to 1" - def workflowInput = new HashMap() - workflowInput['loop'] = 1 - - when: "A do_while workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Sub_Workflow", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polling and completing first task" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second task" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - verifyTaskIteration(polledAndCompletedTask2[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'SUB_WORKFLOW' - tasks[6].status == Task.Status.SCHEDULED - } - - when: "the sub workflow is started by issuing a system task call" - def parentWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = parentWorkflow.getTaskByRefName('st1__1').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - - then: "verify that the sub workflow task is in a IN PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'SUB_WORKFLOW' - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "sub workflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowInstanceId = workflow.getTaskByRefName('st1__1').subWorkflowId - - then: "verify that the sub workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the 'simple_task_in_sub_wf' belonging to the sub workflow is polled and completed" - def polledAndCompletedSubWorkflowTask = workflowTestUtil.pollAndCompleteTask('simple_task_in_sub_wf', 'subworkflow.task.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndCompletedSubWorkflowTask) - - and: "verify that the sub workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - and: "the parent workflow is swept" - sweep(workflowInstanceId) - - and: "verify that the workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'SUB_WORKFLOW' - tasks[6].status == Task.Status.COMPLETED - } - } - - def "Test workflow with multiple Do While tasks with multiple iterations"() { - given: "Number of iterations of the first loop is set to 2 and second loop is set to 1" - def workflowInput = new HashMap() - workflowInput['loop'] = 2 - workflowInput['loop2'] = 1 - - when: "A workflow with multiple do while tasks with multiple iterations is started" - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Multiple", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polling and completing first task" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second task" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - verifyTaskIteration(polledAndCompletedTask2[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0' - tasks[6].status == Task.Status.SCHEDULED - } - - when: "Polling and completing second iteration of first task" - Tuple polledAndCompletedSecondIterationTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedSecondIterationTask0, [:]) - verifyTaskIteration(polledAndCompletedSecondIterationTask0[0] as Task, 2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 11 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0' - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == 'FORK' - tasks[7].status == Task.Status.COMPLETED - tasks[8].taskType == 'integration_task_1' - tasks[8].status == Task.Status.SCHEDULED - tasks[9].taskType == 'integration_task_2' - tasks[9].status == Task.Status.SCHEDULED - tasks[10].taskType == 'JOIN' - tasks[10].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second iteration of second task" - Tuple polledAndCompletedSecondIterationTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedSecondIterationTask1) - verifyTaskIteration(polledAndCompletedSecondIterationTask1[0] as Task, 2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 11 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0' - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == 'FORK' - tasks[7].status == Task.Status.COMPLETED - tasks[8].taskType == 'integration_task_1' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'integration_task_2' - tasks[9].status == Task.Status.SCHEDULED - tasks[10].taskType == 'JOIN' - tasks[10].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second iteration of third task" - Tuple polledAndCompletedSecondIterationTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedSecondIterationTask2) - verifyTaskIteration(polledAndCompletedSecondIterationTask2[0] as Task, 2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 13 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0' - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == 'FORK' - tasks[7].status == Task.Status.COMPLETED - tasks[8].taskType == 'integration_task_1' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'integration_task_2' - tasks[9].status == Task.Status.COMPLETED - tasks[10].taskType == 'JOIN' - tasks[10].status == Task.Status.COMPLETED - tasks[11].taskType == 'DO_WHILE' - tasks[11].status == Task.Status.IN_PROGRESS - tasks[12].taskType == 'integration_task_3' - tasks[12].status == Task.Status.SCHEDULED - } - - when: "Polling and completing task within the second do while" - Tuple polledAndCompletedIntegrationTask3 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedIntegrationTask3) - verifyTaskIteration(polledAndCompletedIntegrationTask3[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 13 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_1' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0' - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == 'FORK' - tasks[7].status == Task.Status.COMPLETED - tasks[8].taskType == 'integration_task_1' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'integration_task_2' - tasks[9].status == Task.Status.COMPLETED - tasks[10].taskType == 'JOIN' - tasks[10].status == Task.Status.COMPLETED - tasks[11].taskType == 'DO_WHILE' - tasks[11].status == Task.Status.COMPLETED - tasks[12].taskType == 'integration_task_3' - tasks[12].status == Task.Status.COMPLETED - } - } - - def "Test retrying a failed do while workflow"() { - setup: "Update the task definition with no retries" - def taskName = 'integration_task_0' - def persistedTaskDefinition = workflowTestUtil.getPersistedTaskDefinition(taskName).get() - def modifiedTaskDefinition = new TaskDef(persistedTaskDefinition.name, persistedTaskDefinition.description, - persistedTaskDefinition.ownerEmail, 0, persistedTaskDefinition.timeoutSeconds, - persistedTaskDefinition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "A do while workflow is started" - def workflowInput = new HashMap() - workflowInput['loop'] = 1 - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Workflow", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polling and failing first task" - Tuple polledAndFailedTask0 = workflowTestUtil.pollAndFailTask('integration_task_0', 'integration.test.worker', "induced..failure") - - then: "Verify that the task was polled and acknowledged and workflow is in failed state" - verifyPolledAndAcknowledgedTask(polledAndFailedTask0) - verifyTaskIteration(polledAndFailedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.CANCELED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - } - - when: "The workflow is retried" - workflowExecutor.retry(workflowInstanceId, false) - - then: "Verify that workflow is running" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "Polling and completing first task" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second task" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - verifyTaskIteration(polledAndCompletedTask2[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.COMPLETED - } - - cleanup: "Reset the task definition" - metadataService.updateTaskDef(persistedTaskDefinition) - } - - def "Test auto retrying a failed do while workflow"() { - setup: "Update the task definition with retryCount to 1 and retryDelaySeconds to 0" - def taskName = 'integration_task_0' - def persistedTaskDefinition = workflowTestUtil.getPersistedTaskDefinition(taskName).get() - def modifiedTaskDefinition = new TaskDef(persistedTaskDefinition.name, persistedTaskDefinition.description, - persistedTaskDefinition.ownerEmail, 1, persistedTaskDefinition.timeoutSeconds, - persistedTaskDefinition.responseTimeoutSeconds) - modifiedTaskDefinition.setRetryDelaySeconds(0) - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "A do while workflow is started" - def workflowInput = new HashMap() - workflowInput['loop'] = 1 - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Workflow", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polling and failing first task" - Tuple polledAndFailedTask0 = workflowTestUtil.pollAndFailTask('integration_task_0', 'integration.test.worker', "induced..failure") - - then: "Verify that the task was polled and acknowledged and retried task was generated and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndFailedTask0) - verifyTaskIteration(polledAndFailedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retryCount == 1 - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "Polling and completing first task" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing second task" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.IN_PROGRESS - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - verifyTaskIteration(polledAndCompletedTask2[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'FORK' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_1' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.COMPLETED - } - - cleanup: "Reset the task definition" - metadataService.updateTaskDef(persistedTaskDefinition) - } - - def "Test workflow with a iteration Do While task as subtask of a forkjoin task"() { - given: "Number of iterations of the loop is set to 1" - def workflowInput = new HashMap() - workflowInput['loop'] = 1 - - when: "A do_while workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_SubTask", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DO_WHILE' - tasks[1].status == Task.Status.IN_PROGRESS - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_0' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - } - - when: "Polling and completing first task in DO While" - Tuple polledAndCompletedTask0 = workflowTestUtil.pollAndCompleteTask('integration_task_0', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask0) - verifyTaskIteration(polledAndCompletedTask0[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DO_WHILE' - tasks[1].status == Task.Status.IN_PROGRESS - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_0' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_1' - tasks[5].status == Task.Status.SCHEDULED - } - - when: "Polling and completing second task in DO While" - Tuple polledAndCompletedTask1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in running state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1) - verifyTaskIteration(polledAndCompletedTask1[0] as Task, 1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DO_WHILE' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_0' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_1' - tasks[5].status == Task.Status.COMPLETED - } - - when: "Polling and completing third task" - Tuple polledAndCompletedTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'integration.test.worker') - - then: "Verify that the task was polled and acknowledged and workflow is in completed state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DO_WHILE' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_0' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_1' - tasks[5].status == Task.Status.COMPLETED - } - } - - def "Test workflow with Do While task contains loop over task that use iteration in script expression"() { - given: "Number of iterations of the loop is set to 2" - def workflowInput = new HashMap() - workflowInput['loop'] = 2 - - when: "A do_while workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow("Do_While_Workflow_Iteration_Fix", 1, "looptest", workflowInput, null, null) - - then: "Verify that the workflow has competed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'DO_WHILE' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'LAMBDA' - tasks[1].status == Task.Status.COMPLETED - tasks[1].outputData.get("result") == 0 - tasks[2].taskType == 'LAMBDA' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData.get("result") == 1 - } - } - - void verifyTaskIteration(Task task, int iteration) { - assert task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())) - assert task.iteration == iteration - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy deleted file mode 100644 index 782996a48..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy +++ /dev/null @@ -1,676 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -class DynamicForkJoinSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - @Shared - def DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest" - - def setup() { - workflowTestUtil.registerWorkflows('dynamic_fork_join_integration_test.json', - 'simple_workflow_3_integration_test.json') - } - - def "Test dynamic fork join success flow"() { - when: " a dynamic fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, - 'dynamic_fork_join_workflow', [:], - null, null, null) - - then: "verify that the workflow has been successfully started and the first task is in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: " the first task is 'integration_task_1' output has a list of dynamic tasks" - WorkflowTask workflowTask2 = new WorkflowTask() - workflowTask2.name = 'integration_task_2' - workflowTask2.taskReferenceName = 'xdt1' - - WorkflowTask workflowTask3 = new WorkflowTask() - workflowTask3.name = 'integration_task_3' - workflowTask3.taskReferenceName = 'xdt2' - - def dynamicTasksInput = ['xdt1': ['k1': 'v1'], 'xdt2': ['k2': 'v2']] - - and: "The 'integration_task_1' is polled and completed" - def pollAndCompleteTask1Try = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker', - ['dynamicTasks': [workflowTask2, workflowTask3], 'dynamicTasksInput': dynamicTasksInput]) - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try) - - and: "verify that workflow has progressed further ahead and new dynamic tasks have been scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "Poll and complete 'integration_task_2' and 'integration_task_3'" - def pollAndCompleteTask2Try = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.worker', - ['ok1': 'ov1']) - def pollAndCompleteTask3Try = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3.worker', - ['ok1': 'ov1']) - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try, ['k1': 'v1']) - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask3Try, ['k2': 'v2']) - - and: "verify that the workflow has progressed and the 'integration_task_2' and 'integration_task_3' are complete" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['xdt1', 'xdt2'] - tasks[4].status == Task.Status.COMPLETED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - tasks[4].outputData['xdt1']['ok1'] == 'ov1' - tasks[4].outputData['xdt2']['ok1'] == 'ov1' - tasks[5].taskType == 'integration_task_4' - tasks[5].status == Task.Status.SCHEDULED - } - - when: "Poll and complete 'integration_task_4'" - def pollAndCompleteTask4Try = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4.worker') - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask4Try) - - and: "verify that the workflow is complete" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[5].taskType == 'integration_task_4' - tasks[5].status == Task.Status.COMPLETED - } - } - - - def "Test dynamic fork join failure of dynamic forked task flow"() { - setup: "Make sure that the integration_task_2 does not have any retry count" - def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, - persistedTask2Definition.description, persistedTask2Definition.ownerEmail, 0, - persistedTask2Definition.timeoutSeconds, persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - when: " a dynamic fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, - 'dynamic_fork_join_workflow', [:], - null, null, null) - - then: "verify that the workflow has been successfully started and the first task is in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: " the first task is 'integration_task_1' output has a list of dynamic tasks" - WorkflowTask workflowTask2 = new WorkflowTask() - workflowTask2.name = 'integration_task_2' - workflowTask2.taskReferenceName = 'xdt1' - - WorkflowTask workflowTask3 = new WorkflowTask() - workflowTask3.name = 'integration_task_3' - workflowTask3.taskReferenceName = 'xdt2' - - def dynamicTasksInput = ['xdt1': ['k1': 'v1'], 'xdt2': ['k2': 'v2']] - - and: "The 'integration_task_1' is polled and completed" - def pollAndCompleteTask1Try = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker', - ['dynamicTasks': [workflowTask2, workflowTask3], 'dynamicTasksInput': dynamicTasksInput]) - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try) - - and: "verify that workflow has progressed further ahead and new dynamic tasks have been scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "Poll and fail 'integration_task_2'" - def pollAndCompleteTask2Try = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.worker', 'it is a failure..') - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try, ['k1': 'v1']) - - and: "verify that the workflow is in failed state and 'integration_task_2' has also failed and other tasks are canceled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 5 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.CANCELED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['xdt1', 'xdt2'] - tasks[4].status == Task.Status.CANCELED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - cleanup: "roll back the change made to integration_task_2 definition" - metadataService.updateTaskDef(persistedTask2Definition) - } - - - def "Retry a failed dynamic fork join workflow"() { - setup: "Make sure that the integration_task_2 does not have any retry count" - def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, - persistedTask2Definition.description, persistedTask2Definition.ownerEmail, 0, - persistedTask2Definition.timeoutSeconds, persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - when: " a dynamic fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, - 'dynamic_fork_join_workflow', [:], - null, null, null) - - then: "verify that the workflow has been successfully started and the first task is in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: " the first task is 'integration_task_1' output has a list of dynamic tasks" - WorkflowTask workflowTask2 = new WorkflowTask() - workflowTask2.name = 'integration_task_2' - workflowTask2.taskReferenceName = 'xdt1' - - WorkflowTask workflowTask3 = new WorkflowTask() - workflowTask3.name = 'integration_task_3' - workflowTask3.taskReferenceName = 'xdt2' - - def dynamicTasksInput = ['xdt1': ['k1': 'v1'], 'xdt2': ['k2': 'v2']] - - and: "The 'integration_task_1' is polled and completed" - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker', - ['dynamicTasks': [workflowTask2, workflowTask3], 'dynamicTasksInput': dynamicTasksInput]) - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that workflow has progressed further ahead and new dynamic tasks have been scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "Poll and fail 'integration_task_2'" - def pollAndCompleteTask2Try1 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.worker', 'it is a failure..') - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try1, ['k1': 'v1']) - - and: "verify that the workflow is in failed state and 'integration_task_2' has also failed and other tasks are canceled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 5 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.CANCELED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['xdt1', 'xdt2'] - tasks[4].status == Task.Status.CANCELED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "The workflow is retried" - workflowExecutor.retry(workflowInstanceId, false) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.CANCELED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['xdt1', 'xdt2'] - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'integration_task_3' - tasks[6].status == Task.Status.SCHEDULED - } - - when: "Poll and complete 'integration_task_2' and 'integration_task_3'" - def pollAndCompleteTask2Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.worker', - ['ok1': 'ov1']) - def pollAndCompleteTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3.worker', - ['ok1': 'ov1']) - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try2, ['k1': 'v1']) - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask3Try1, ['k2': 'v2']) - - and: "verify that the workflow has progressed and the 'integration_task_2' and 'integration_task_3' are complete" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 8 - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['xdt1', 'xdt2'] - tasks[4].status == Task.Status.COMPLETED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - tasks[4].outputData['xdt1']['ok1'] == 'ov1' - tasks[4].outputData['xdt2']['ok1'] == 'ov1' - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_3' - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == 'integration_task_4' - tasks[7].status == Task.Status.SCHEDULED - } - - when: "Poll and complete 'integration_task_4'" - def pollAndCompleteTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4.worker') - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask4Try1) - - and: "verify that the workflow is complete" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 8 - tasks[7].taskType == 'integration_task_4' - tasks[7].status == Task.Status.COMPLETED - } - - cleanup: "roll back the change made to integration_task_2 definition" - metadataService.updateTaskDef(persistedTask2Definition) - } - - def "Retry a failed dynamic fork join workflow with forked subworkflow"() { - setup: "Make sure that the integration_task_2 does not have any retry count" - def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, - persistedTask2Definition.description, persistedTask2Definition.ownerEmail, 0, - persistedTask2Definition.timeoutSeconds, persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - when: "the dynamic fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, - 'dynamic_fork_join_wf_subwf', [:], null, null, null) - - then: "verify that the workflow is started and first task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - } - - when: "the first task's output has a list of dynamically forked tasks including a subworkflow" - WorkflowTask workflowTask2 = new WorkflowTask() - workflowTask2.name = 'sub_wf_task' - workflowTask2.taskReferenceName = 'xdt1' - workflowTask2.workflowTaskType = TaskType.SUB_WORKFLOW - SubWorkflowParams subWorkflowParams = new SubWorkflowParams() - subWorkflowParams.setName("integration_test_wf3") - subWorkflowParams.setVersion(1) - workflowTask2.subWorkflowParam = subWorkflowParams - - WorkflowTask workflowTask3 = new WorkflowTask() - workflowTask3.name = 'integration_task_10' - workflowTask3.taskReferenceName = 'xdt10' - - def dynamicTasksInput = ['xdt1': ['p1': 'q1', 'p2': 'q2'], 'xdt10': ['k2': 'v2']] - - and: "The 'integration_task_1' is polled and completed" - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker', - ['dynamicTasks': [workflowTask2, workflowTask3], 'dynamicTasksInput': dynamicTasksInput]) - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that workflow has progressed further ahead and new dynamic tasks have been scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "the subworkflow is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - String subworkflowTaskId = polledTaskIds.get(0) - asyncSystemTaskExecutor.execute(subWorkflowTask, subworkflowTaskId) - - then: "verify that the sub workflow task is in a IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "subworkflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.tasks[2].subWorkflowId - - then: "verify that the sub workflow is RUNNING, and first task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "The 'integration_task_10' is polled and completed" - def pollAndCompleteTask10Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_10', 'task10.worker') - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask10Try1) - - and: "verify that the workflow is updated" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "The task within sub workflow is polled and completed" - pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker') - - then: "verify that the task was completed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "the next task in the subworkflow is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Poll and fail 'integration_task_2'" - def pollAndCompleteTask2Try1 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.worker', "failure") - - and: "the workflow is evaluated" - sweep(workflowInstanceId) - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try1) - - and: "the subworkflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - and: "the workflow is also in FAILED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "The workflow is retried" - workflowExecutor.retry(workflowInstanceId, true) - - then: "verify that the workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.IN_PROGRESS - tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - and: "the subworkflow is retried and in RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the workflow is evaluated" - sweep(workflowInstanceId) - - then: "verify that the JOIN is updated" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.IN_PROGRESS - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - } - - when: "Poll and complete 'integration_task_2'" - def pollAndCompleteTask2Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try2) - - and: "the sub workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "Poll and complete 'integration_task_3'" - def pollAndCompleteTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask3Try1) - - and: "the sub workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.COMPLETED - } - - when: "the workflow is evaluated" - sweep(workflowInstanceId) - - then: "the workflow has progressed beyond the join task" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.COMPLETED - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - tasks[5].taskType == 'integration_task_4' - tasks[5].status == Task.Status.SCHEDULED - } - - when: "Poll and complete 'integration_task_4'" - def pollAndCompleteTask4Try = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4.worker') - - then: "verify that the tasks were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(pollAndCompleteTask4Try) - - and: "verify that the workflow is complete" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.COMPLETED - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[4].referenceTaskName == 'dynamicfanouttask_join' - tasks[5].taskType == 'integration_task_4' - tasks[5].status == Task.Status.COMPLETED - } - - cleanup: "roll back the change made to integration_task_2 definition" - metadataService.updateTaskDef(persistedTask2Definition) - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy deleted file mode 100644 index 43c227640..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.Event -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class EventTaskSpec extends AbstractSpecification { - - def EVENT_BASED_WORKFLOW = 'test_event_workflow' - - @Autowired - Event eventTask - - @Autowired - QueueDAO queueDAO - - def setup() { - workflowTestUtil.registerWorkflows('event_workflow_integration_test.json') - } - - def "Verify that a event based simple workflow is executed"() { - when: "Start a event based workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(EVENT_BASED_WORKFLOW, 1, - '', [:], null, null, null) - - then: "Retrieve the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == TaskType.EVENT.name() - tasks[0].status == Task.Status.COMPLETED - } - - then: "Retrieve the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == TaskType.EVENT.name() - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['event_produced'] - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "The integration_task_1 is polled and completed" - def polledAndCompletedTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task was polled and completed and the workflow is in a complete state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTry1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy deleted file mode 100644 index c445daf46..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class ExclusiveJoinSpec extends AbstractSpecification { - - @Shared - def EXCLUSIVE_JOIN_WF = "ExclusiveJoinTestWorkflow" - - def setup() { - workflowTestUtil.registerWorkflows('exclusive_join_integration_test.json') - } - - def setTaskResult(String workflowInstanceId, String taskId, TaskResult.Status status, - Map output) { - TaskResult taskResult = new TaskResult(); - taskResult.setTaskId(taskId) - taskResult.setWorkflowInstanceId(workflowInstanceId) - taskResult.setStatus(status) - taskResult.setOutputData(output) - return taskResult - } - - def "Test that the default decision is run"() { - given: "The input parameter required to make decision_1 is null to ensure that the default decision is run" - def input = ["decision_1": "null"] - - when: "An exclusive join workflow is started with then workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', - input, null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + - '.integration.worker', ["taskReferenceName": "task1"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'EXCLUSIVE_JOIN' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['taskReferenceName'] == 'task1' - } - } - - def "Test when the one decision is true and the other is decision null"() { - given: "The input parameter required to make decision_1 true and decision_2 null" - def input = ["decision_1": "true", "decision_2": "null"] - - when: "An exclusive join workflow is started with then workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', - input, null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + - '.integration.worker', ["taskReferenceName": "task1"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2' + - '.integration.worker', ["taskReferenceName": "task2"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'EXCLUSIVE_JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[4].outputData['taskReferenceName'] == 'task2' - } - } - - def "Test when both the decisions, decision_1 and decision_2 are true"() { - given: "The input parameters to ensure that both the decisions are true" - def input = ["decision_1": "true", "decision_2": "true"] - - when: "An exclusive join workflow is started with then workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', - input, null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + - '.integration.worker', ["taskReferenceName": "task1"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2' + - '.integration.worker', ["taskReferenceName": "task2"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_3' - tasks[4].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_3' is polled and completed" - def polledAndCompletedTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3' + - '.integration.worker', ["taskReferenceName": "task3"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask3Try1) - - and: "verify that the 'integration_task_3' is COMPLETED and the workflow has COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_3' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'EXCLUSIVE_JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[5].outputData['taskReferenceName'] == 'task3' - } - } - - def "Test when decision_1 is false and decision_3 is default"() { - given: "The input parameter required to make decision_1 false and decision_3 default" - def input = ["decision_1": "false", "decision_3": "null"] - - when: "An exclusive join workflow is started with then workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', - input, null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + - '.integration.worker', ["taskReferenceName": "task1"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_4' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_4' is polled and completed" - def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4' + - '.integration.worker', ["taskReferenceName": "task4"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) - - and: "verify that the 'integration_task_4' is COMPLETED and the workflow has COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_4' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'EXCLUSIVE_JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[4].outputData['taskReferenceName'] == 'task4' - } - } - - def "Test when decision_1 is false and decision_3 is true"() { - given: "The input parameter required to make decision_1 false and decision_3 true" - def input = ["decision_1": "false", "decision_3": "true"] - - when: "An exclusive join workflow is started with then workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', - input, null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + - '.integration.worker', ["taskReferenceName": "task1"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_4' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_4' is polled and completed" - def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4' + - '.integration.worker', ["taskReferenceName": "task4"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) - - and: "verify that the 'integration_task_4' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_4' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_5' - tasks[4].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_5' is polled and completed" - def polledAndCompletedTask5Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_5', 'task5' + - '.integration.worker', ["taskReferenceName": "task5"]) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask5Try1) - - and: "verify that the 'integration_task_4' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_4' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'DECISION' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_5' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'EXCLUSIVE_JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[5].outputData['taskReferenceName'] == 'task5' - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy deleted file mode 100644 index 44907f3df..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy +++ /dev/null @@ -1,765 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.test.base.AbstractSpecification -import com.netflix.conductor.test.utils.MockExternalPayloadStorage -import com.netflix.conductor.test.utils.UserTask - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedLargePayloadTask -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class ExternalPayloadStorageSpec extends AbstractSpecification { - - @Shared - def LINEAR_WORKFLOW_T1_T2 = 'integration_test_wf' - - @Shared - def CONDITIONAL_SYSTEM_TASK_WORKFLOW = 'ConditionalSystemWorkflow' - - @Shared - def FORK_JOIN_WF = 'FanInOutTest' - - @Shared - def DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest" - - @Shared - def WORKFLOW_WITH_INLINE_SUB_WF = 'WorkflowWithInlineSubWorkflow' - - @Shared - def WORKFLOW_WITH_DECISION_AND_TERMINATE = 'ConditionalTerminateWorkflow' - - @Shared - def WORKFLOW_WITH_SYNCHRONOUS_SYSTEM_TASK = 'workflow_with_synchronous_system_task' - - @Autowired - UserTask userTask - - @Autowired - SubWorkflow subWorkflowTask - - @Autowired - MockExternalPayloadStorage mockExternalPayloadStorage - - def setup() { - workflowTestUtil.registerWorkflows('simple_workflow_1_integration_test.json', - 'conditional_system_task_workflow_integration_test.json', - 'fork_join_integration_test.json', - 'simple_workflow_with_sub_workflow_inline_def_integration_test.json', - 'decision_and_terminate_integration_test.json', - 'workflow_with_synchronous_system_task.json', - 'dynamic_fork_join_integration_test.json' - ) - } - - def "Test simple workflow using external payload storage"() { - - given: "An existing simple workflow definition" - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - and: "input required to start large payload workflow" - def correlationId = 'wf_external_storage' - String workflowInputPath = uploadInitialWorkflowInput() - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_2' with external payload storage" - pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask("integration_task_2", "task2.integration.worker", "") - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - then: "verify that the 'integration_task_2' is complete and the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - output.isEmpty() - - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - - } - } - - def "Test workflow with synchronous system task using external payload storage"() { - given: "An existing workflow definition with sync system task followed by a simple task" - metadataService.getWorkflowDef(WORKFLOW_WITH_SYNCHRONOUS_SYSTEM_TASK, 1) - - and: "input required to start large payload workflow" - def correlationId = 'wf_external_storage' - String workflowInputPath = uploadInitialWorkflowInput() - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SYNCHRONOUS_SYSTEM_TASK, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == 'JSON_JQ_TRANSFORM' - tasks[1].status == Task.Status.COMPLETED - - tasks[1].outputData['result'] == 104 // output of .tp2.TEST_SAMPLE | length expression from output.json. On assertion failure, check workflow definition and output.json - } - } - - def "Test conditional workflow with system task using external payload storage"() { - - given: "An existing workflow definition" - metadataService.getWorkflowDef(CONDITIONAL_SYSTEM_TASK_WORKFLOW, 1) - - and: "input required to start large payload workflow" - String workflowInputPath = uploadInitialWorkflowInput() - def correlationId = "conditional_system_external_storage" - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(CONDITIONAL_SYSTEM_TASK_WORKFLOW, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == "DECISION" - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == "USER_TASK" - tasks[2].status == Task.Status.SCHEDULED - tasks[2].inputData.isEmpty() - - } - - when: "the system task 'USER_TASK' is started by issuing a system task call" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def taskId = workflow.getTaskByRefName('user_task').taskId - asyncSystemTaskExecutor.execute(userTask, taskId) - - then: "verify that the user task is in a COMPLETED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == "DECISION" - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == "USER_TASK" - tasks[2].status == Task.Status.COMPLETED - tasks[2].inputData.isEmpty() - - tasks[2].outputData.get("size") == 104 - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "poll and complete and 'integration_task_3'" - def pollAndCompleteTask3 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3.integration.worker', - ['op': 'success_task3']) - - then: "verify that the 'integration_task_3' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask3) - - then: "verify that the 'integration_task_3' is complete and the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - output.isEmpty() - - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == "DECISION" - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == "USER_TASK" - tasks[2].status == Task.Status.COMPLETED - tasks[2].inputData.isEmpty() - - tasks[2].outputData.get("size") == 104 - tasks[3].taskType == 'integration_task_3' - tasks[3].status == Task.Status.COMPLETED - } - } - - def "Test fork join workflow using external payload storage"() { - - given: "An existing fork join workflow definition" - metadataService.getWorkflowDef(FORK_JOIN_WF, 1) - - and: "input required to start large payload workflow" - def correlationId = 'fork_join_external_storage' - String workflowInputPath = uploadInitialWorkflowInput() - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - } - - when: "the first task of the left fork is polled and completed" - def polledAndAckTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndAckTask) - - and: "task is completed and the next task in the fork is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_3' - } - - when: "the first task of the right fork is polled and completed with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def polledAndAckLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_2', 'task2.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(polledAndAckLargePayloadTask) - - and: "task is completed and the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_3' - } - - when: "the second task of the left fork is polled and completed with external payload storage" - polledAndAckLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_3', 'task3.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_3' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(polledAndAckLargePayloadTask) - - and: "task is completed and the next task after join in scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].outputData.isEmpty() - - tasks[3].status == Task.Status.COMPLETED - tasks[3].taskType == 'JOIN' - tasks[3].outputData.isEmpty() - - tasks[4].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_3' - tasks[4].outputData.isEmpty() - - tasks[5].status == Task.Status.SCHEDULED - tasks[5].taskType == 'integration_task_4' - } - - when: "the task 'integration_task_4' is polled and completed" - polledAndAckTask = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4.integration.worker') - - then: "verify that the 'integration_task_4' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndAckTask) - - and: "task is completed and the workflow is in completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].outputData.isEmpty() - - tasks[3].status == Task.Status.COMPLETED - tasks[3].taskType == 'JOIN' - tasks[3].outputData.isEmpty() - - tasks[4].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_3' - tasks[4].outputData.isEmpty() - - tasks[5].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_4' - } - } - - def "Test workflow with subworkflow using external payload storage"() { - - given: "An existing workflow definition" - metadataService.getWorkflowDef(WORKFLOW_WITH_INLINE_SUB_WF, 1) - - and: "input required to start large payload workflow" - String workflowInputPath = uploadInitialWorkflowInput() - def correlationId = "workflow_with_inline_sub_wf_external_storage" - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_INLINE_SUB_WF, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.SCHEDULED - tasks[1].inputData.isEmpty() - - } - - when: "the subworkflow is started by issuing a system task call" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = workflow.getTaskByRefName('swt').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - - then: "verify that the sub workflow task is in a IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].inputData.isEmpty() - - } - - when: "sub workflow is retrieved" - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowInstanceId = workflow.getTaskByRefName('swt').subWorkflowId - - then: "verify that the sub workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - input.isEmpty() - - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'integration_task_3' - } - - when: "poll and complete the 'integration_task_3' with external payload storage" - pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_3', 'task3.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_3' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the sub workflow is completed" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - input.isEmpty() - - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_3' - tasks[0].outputData.isEmpty() - - output.isEmpty() - - } - - and: "the subworkflow task is completed and the workflow is in running state" - sweep(workflowInstanceId) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.COMPLETED - tasks[1].inputData.isEmpty() - - tasks[1].outputData.isEmpty() - - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].inputData.isEmpty() - - } - - when: "poll and complete the 'integration_task_2' with external payload storage" - pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_2', 'task2.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the task is completed and the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - output.isEmpty() - - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.COMPLETED - tasks[1].inputData.isEmpty() - - tasks[1].outputData.isEmpty() - - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].inputData.isEmpty() - - tasks[2].outputData.isEmpty() - - } - } - - def "Test retry workflow using external payload storage"() { - - setup: "Modify the task definition" - def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 2, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - modifiedTask2Definition.setRetryDelaySeconds(0) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing simple workflow definition" - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - and: "input required to start large payload workflow" - def correlationId = 'retry_wf_external_storage' - String workflowInputPath = uploadInitialWorkflowInput() - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - - } - - when: "poll and fail the 'integration_task_2'" - def pollAndFailTask2Try1 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndFailTask2Try1) - - and: "verify that task is retried and workflow is still running" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].inputData.isEmpty() - - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].inputData.isEmpty() - - } - - when: "poll and complete the retried 'integration_task_2'" - def pollAndCompleteTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'success_task2']) - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask2) - - and: "verify that the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - output.isEmpty() - - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].inputData.isEmpty() - - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].inputData.isEmpty() - - } - - cleanup: - metadataService.updateTaskDef(persistedTask2Definition) - } - - def "Test workflow with terminate in decision branch using external payload storage"() { - - given: "An existing workflow definition" - metadataService.getWorkflowDef(WORKFLOW_WITH_DECISION_AND_TERMINATE, 1) - - and: "input required to start large payload workflow" - String workflowInputPath = uploadInitialWorkflowInput() - def correlationId = "decision_terminate_external_storage" - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_DECISION_AND_TERMINATE, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - tasks[0].seq == 1 - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = uploadLargeTaskOutput() - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has FAILED due to terminate task" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 3 - output.isEmpty() - - reasonForIncompletion.contains('Workflow is FAILED by TERMINATE task') - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.isEmpty() - - tasks[0].seq == 1 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[1].seq == 2 - tasks[2].taskType == 'TERMINATE' - tasks[2].status == Task.Status.COMPLETED - tasks[2].inputData.isEmpty() - - tasks[2].seq == 3 - tasks[2].outputData.isEmpty() - } - } - - def "Test dynamic fork join workflow with subworkflow using external payload storage"() { - given: "An existing dynamic fork join workflow definition" - metadataService.getWorkflowDef(DYNAMIC_FORK_JOIN_WF, 1) - - and: "input required to start large payload workflow" - def correlationId = "dynamic_fork_join_subworkflow_external_storage" - String workflowInputPath = uploadInitialWorkflowInput() - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, correlationId, null, workflowInputPath, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - input.isEmpty() - - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_1' with external payload storage" - String taskOutputPath = "${UUID.randomUUID()}.json" - mockExternalPayloadStorage.upload(taskOutputPath, mockExternalPayloadStorage.curateDynamicForkLargePayload()) - def pollAndCompleteLargePayloadTask = workflowTestUtil.pollAndCompleteLargePayloadTask('integration_task_1', 'task1.integration.worker', taskOutputPath) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedLargePayloadTask(pollAndCompleteLargePayloadTask) - - and: "verify that workflow has progressed further ahead and new dynamic tasks have been scheduled with externalized payloads" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - with(workflow) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - !tasks[0].outputData - tasks[1].taskType == 'FORK' - !tasks[1].inputData - - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SUB_WORKFLOW' - !tasks[2].inputData - - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].referenceTaskName == 'dynamicfanouttask_join' - } - } - - private String uploadLargeTaskOutput() { - String taskOutputPath = "${UUID.randomUUID()}.json" - mockExternalPayloadStorage.upload(taskOutputPath, mockExternalPayloadStorage.readOutputDotJson(), 0) - return taskOutputPath - } - - private String uploadInitialWorkflowInput() { - String workflowInputPath = "${UUID.randomUUID()}.json" - mockExternalPayloadStorage.upload(workflowInputPath, ['param1': 'p1 value', 'param2': 'p2 value', 'case': 'two']) - return workflowInputPath - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/FailureWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/FailureWorkflowSpec.groovy deleted file mode 100644 index 7b453c134..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/FailureWorkflowSpec.groovy +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -class FailureWorkflowSpec extends AbstractSpecification { - - @Shared - def WORKFLOW_WITH_TERMINATE_TASK_FAILED = 'test_terminate_task_failed_wf' - - @Shared - def PARENT_WORKFLOW_WITH_FAILURE_TASK = 'test_task_failed_parent_wf' - - @Autowired - SubWorkflow subWorkflowTask - - def setup() { - workflowTestUtil.registerWorkflows( - 'failure_workflow_for_terminate_task_workflow.json', - 'terminate_task_failed_workflow_integration.json', - 'test_task_failed_parent_workflow.json', - 'test_task_failed_sub_workflow.json' - ) - } - - def "Test workflow with a task that failed"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the failed task" - def testId = 'testId' - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_TERMINATE_TASK_FAILED, 1, - testId, workflowInput, null, null, null) - - then: "Verify that the workflow has failed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - reasonForIncompletion == "Early exit in terminate" - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'TERMINATE' - tasks[1].seq == 2 - output - def failedWorkflowId = output['conductor.failure_workflow'] as String - def workflowCorrelationId = correlationId - def workflowFailureTaskId = tasks[1].taskId - with(workflowExecutionService.getExecutionStatus(failedWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - correlationId == workflowCorrelationId - input['workflowId'] == workflowInstanceId - input['failureTaskId'] == workflowFailureTaskId - tasks.size() == 1 - tasks[0].taskType == 'LAMBDA' - } - } - } - - def "Test workflow with a task failed in subworkflow"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the subworkflow task" - def workflowInstanceId = workflowExecutor.startWorkflow(PARENT_WORKFLOW_WITH_FAILURE_TASK, 1, - '', workflowInput, null, null, null) - - then: "verify that the workflow has started and the tasks are as expected" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].referenceTaskName == 'lambdaTask1' - tasks[0].seq == 1 - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].seq == 2 - } - - when: "subworkflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = workflow.getTaskByRefName("test_task_failed_sub_wf").getTaskId() - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.getTaskByRefName("test_task_failed_sub_wf").subWorkflowId - - then: "verify that the sub workflow has failed" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - reasonForIncompletion.contains('Workflow is FAILED by TERMINATE task') - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'TERMINATE' - tasks[1].seq == 2 - } - - then: "Verify that the workflow has failed and correct inputs passed into the failure workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].referenceTaskName == 'lambdaTask1' - tasks[0].seq == 1 - tasks[1].status == Task.Status.FAILED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].seq == 2 - def failedWorkflowId = output['conductor.failure_workflow'] as String - def workflowCorrelationId = correlationId - def workflowFailureTaskId = tasks[1].taskId - with(workflowExecutionService.getExecutionStatus(failedWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - correlationId == workflowCorrelationId - input['workflowId'] == workflowInstanceId - input['failureTaskId'] == workflowFailureTaskId - tasks.size() == 1 - tasks[0].taskType == 'LAMBDA' - } - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy deleted file mode 100644 index 2351badb2..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy +++ /dev/null @@ -1,1045 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -class ForkJoinSpec extends AbstractSpecification { - - @Shared - def FORK_JOIN_WF = 'FanInOutTest' - - @Shared - def FORK_JOIN_NESTED_WF = 'FanInOutNestedTest' - - @Shared - def FORK_JOIN_NESTED_SUB_WF = 'FanInOutNestedSubWorkflowTest' - - @Shared - def WORKFLOW_FORK_JOIN_OPTIONAL_SW = "integration_test_fork_join_optional_sw" - - @Shared - def FORK_JOIN_SUB_WORKFLOW = 'integration_test_fork_join_sw' - - @Autowired - SubWorkflow subWorkflowTask - - def setup() { - workflowTestUtil.registerWorkflows('fork_join_integration_test.json', - 'fork_join_with_no_task_retry_integration_test.json', - 'nested_fork_join_integration_test.json', - 'simple_workflow_1_integration_test.json', - 'nested_fork_join_with_sub_workflow_integration_test.json', - 'simple_one_task_sub_workflow_integration_test.json', - 'fork_join_with_optional_sub_workflow_forks_integration_test.json', - 'fork_join_sub_workflow.json' - ) - } - - /** - * start - * | - * fork - * / \ - * task1 task2 - * | / - * task3 / - * \ / - * \ / - * join - * | - * task4 - * | - * End - */ - def "Test a simple workflow with fork join success flow"() { - when: "A fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, - 'fanoutTest', [:], - null, null, null) - - then: "verify that the workflow has started and the starting nodes of the each fork are in scheduled state" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - } - - when: "The first task of the fork is polled and completed" - def polledAndAckTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker') - - then: "verify that the 'integration_task_1' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask1Try1) - - and: "The workflow has been updated and has all the required tasks in the right status to move forward" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_3' - } - - when: "The 'integration_task_3' is polled and completed" - def polledAndAckTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task1.worker') - - then: "verify that the 'integration_task_3' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask3Try1) - - and: "The workflow has been updated with the task status and task list" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_3' - } - - when: "The other node of the fork is completed by completing 'integration_task_2'" - def polledAndAckTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.worker') - - then: "verify that the 'integration_task_2' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask2Try1) - - and: "The workflow has been updated with the task status and task list" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[3].taskType == 'JOIN' - tasks[5].status == Task.Status.SCHEDULED - tasks[5].taskType == 'integration_task_4' - } - - when: "The last task of the workflow is then polled and completed integration_task_4'" - def polledAndAckTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task1.worker') - - then: "verify that the 'integration_task_4' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask4Try1) - - and: "Then verify that the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 6 - tasks[5].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_4' - } - } - - def "Test a simple workflow with fork join failure flow"() { - setup: "Ensure that 'integration_task_2' has a retry count of 0" - def persistedIntegrationTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedIntegrationTask2Definition = new TaskDef(persistedIntegrationTask2Definition.name, - persistedIntegrationTask2Definition.description, persistedIntegrationTask2Definition.ownerEmail, 0, - 0, persistedIntegrationTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedIntegrationTask2Definition) - - when: "A fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, - 'fanoutTest', [:], - null, null, null) - - then: "verify that the workflow has started and the starting nodes of the each fork are in scheduled state" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - } - - when: "The first task of the fork is polled and completed" - def polledAndAckTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker') - - then: "verify that the 'integration_task_1' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask1Try1) - - and: "The workflow has been updated and has all the required tasks in the right status to move forward" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_3' - } - - when: "The other node of the fork is completed by completing 'integration_task_2'" - def polledAndAckTask2Try1 = workflowTestUtil.pollAndFailTask('integration_task_2', - 'task1.worker', 'Failed....') - - then: "verify that the 'integration_task_2' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask2Try1) - - and: "the workflow is in the failed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 5 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[2].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[3].status == Task.Status.CANCELED - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].taskType == 'integration_task_3' - } - - cleanup: "Restore the task definitions that were modified as part of this feature testing" - metadataService.updateTaskDef(persistedIntegrationTask2Definition) - } - - def "Test retrying a failed fork join workflow"() { - - when: "A fork join workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_WF + '_2', 1, - 'fanoutTest', [:], - null, null, null) - - then: "verify that the workflow has started and the starting nodes of the each fork are in scheduled state" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'integration_task_0_RT_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_0_RT_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - } - - when: "The first task of the fork is polled and completed" - def polledAndAckTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_0_RT_1', 'task1.worker') - - then: "verify that the 'integration_task_0_RT_1' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask1Try1) - - and: "The workflow has been updated and has all the required tasks in the right status to move forward" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0_RT_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_0_RT_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_0_RT_3' - } - - when: "The other node of the fork is completed by completing 'integration_task_0_RT_2'" - def polledAndAckTask2Try1 = workflowTestUtil.pollAndFailTask('integration_task_0_RT_2', - 'task1.worker', 'Failed....') - - then: "verify that the 'integration_task_0_RT_1' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask2Try1) - - and: "the workflow is in the failed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 5 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0_RT_1' - tasks[2].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0_RT_2' - tasks[3].status == Task.Status.CANCELED - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].taskType == 'integration_task_0_RT_3' - } - - when: "The workflow is retried" - workflowExecutor.retry(workflowInstanceId, false) - - then: "verify that all the workflow is retried and new tasks are added in place of the failed tasks" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0_RT_1' - tasks[2].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0_RT_2' - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].taskType == 'integration_task_0_RT_3' - tasks[5].status == Task.Status.SCHEDULED - tasks[5].taskType == 'integration_task_0_RT_2' - tasks[6].status == Task.Status.SCHEDULED - tasks[6].taskType == 'integration_task_0_RT_3' - } - - when: "The 'integration_task_0_RT_3' is polled and completed" - def polledAndAckTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_0_RT_3', 'task1.worker') - - then: "verify that the 'integration_task_3' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask3Try1) - - - when: "The other node of the fork is completed by completing 'integration_task_0_RT_2'" - def polledAndAckTask2Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_0_RT_2', 'task1.worker') - - then: "verify that the 'integration_task_2' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask2Try2) - - when: "The last task of the workflow is then polled and completed integration_task_0_RT_4'" - def polledAndAckTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_0_RT_4', 'task1.worker') - - then: "verify that the 'integration_task_0_RT_4' was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask4Try1) - - and: "Then verify that the workflow is completed and the task list of execution is as expected" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 8 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_0_RT_1' - tasks[2].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_0_RT_2' - tasks[3].status == Task.Status.COMPLETED - tasks[3].taskType == 'JOIN' - tasks[4].status == Task.Status.CANCELED - tasks[4].taskType == 'integration_task_0_RT_3' - tasks[5].status == Task.Status.COMPLETED - tasks[5].taskType == 'integration_task_0_RT_2' - tasks[6].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_0_RT_3' - tasks[7].status == Task.Status.COMPLETED - tasks[7].taskType == 'integration_task_0_RT_4' - } - } - - def "Test nested fork join workflow success flow"() { - given: "Input for the nested fork join workflow" - Map input = new HashMap() - input["case"] = "a" - - when: "A nested workflow is started with the input" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_NESTED_WF, 1, - 'fork_join_nested_test', input, - null, null, null) - - then: "verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks.findAll { it.referenceTaskName in ['t11', 't12', 't13', 'fork1', 'fork2'] }.size() == 5 - tasks.findAll { it.referenceTaskName in ['t1', 't2', 't16'] }.size() == 0 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_11' - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_12' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_13' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - tasks[5].inputData['joinOn'] == ['t11', 'join2'] - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t14', 't20'] - } - - when: "Poll and Complete tasks: 'integration_task_11', 'integration_task_12' and 'integration_task_13'" - def polledAndAckTask11Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_11', 'task11.worker') - def polledAndAckTask12Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_12', 'task12.worker') - def polledAndAckTask13Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_13', 'task13.worker') - - - then: "verify that tasks 'integration_task_11', 'integration_task_12' and 'integration_task_13' were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask11Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask12Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask13Try1) - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 10 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_11' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_12' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_13' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - tasks[5].inputData['joinOn'] == ['t11', 'join2'] - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t14', 't20'] - tasks[7].taskType == 'integration_task_14' - tasks[7].status == Task.Status.SCHEDULED - tasks[8].taskType == 'DECISION' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'integration_task_16' - tasks[9].status == Task.Status.SCHEDULED - } - - when: "Poll and Complete tasks: 'integration_task_16' and 'integration_task_14'" - def polledAndAckTask16Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_16', 'task16.worker') - def polledAndAckTask14Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_14', 'task14.worker') - - then: "verify that tasks 'integration_task_16' and 'integration_task_14'were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask16Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask14Try1) - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 11 - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - tasks[5].inputData['joinOn'] == ['t11', 'join2'] - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t14', 't20'] - tasks[7].taskType == 'integration_task_14' - tasks[7].status == Task.Status.COMPLETED - tasks[8].taskType == 'DECISION' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'integration_task_16' - tasks[9].status == Task.Status.COMPLETED - tasks[10].taskType == 'integration_task_19' - tasks[10].status == Task.Status.SCHEDULED - } - - when: "Poll and Complete tasks: 'integration_task_19'" - def polledAndAckTask19Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_19', 'task19.worker') - - then: "verify that tasks 'integration_task_19' polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask19Try1) - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 12 - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.IN_PROGRESS - tasks[5].inputData['joinOn'] == ['t11', 'join2'] - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t14', 't20'] - tasks[10].taskType == 'integration_task_19' - tasks[10].status == Task.Status.COMPLETED - tasks[11].taskType == 'integration_task_20' - tasks[11].status == Task.Status.SCHEDULED - } - - when: "Poll and Complete tasks: 'integration_task_20'" - def polledAndAckTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task20.worker') - - then: "verify that tasks 'integration_task_20'polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask20Try1) - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 13 - tasks[5].taskType == 'JOIN' - tasks[5].status == Task.Status.COMPLETED - tasks[5].inputData['joinOn'] == ['t11', 'join2'] - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.COMPLETED - tasks[6].inputData['joinOn'] == ['t14', 't20'] - tasks[11].taskType == 'integration_task_20' - tasks[11].status == Task.Status.COMPLETED - tasks[12].taskType == 'integration_task_15' - tasks[12].status == Task.Status.SCHEDULED - } - - when: "Poll and Complete tasks: 'integration_task_15'" - def polledAndAckTask15Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_15', 'task15.worker') - - then: "verify that tasks 'integration_task_15' polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask15Try1) - - and: "verify that the workflow is in a complete state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 13 - tasks[12].taskType == 'integration_task_15' - tasks[12].status == Task.Status.COMPLETED - } - } - - def "Test nested workflow which contains a sub workflow task"() { - given: "Input for the nested fork join workflow" - Map input = new HashMap() - input["case"] = "a" - - when: "A nested workflow is started with the input" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_NESTED_SUB_WF, 1, - 'fork_join_nested_test', input, - null, null, null) - - then: "The workflow is in the running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 8 - tasks.findAll { it.referenceTaskName in ['t11', 't12', 't13', 'fork1', 'fork2', 'sw1'] }.size() == 6 - tasks.findAll { it.referenceTaskName in ['t1', 't2', 't16'] }.size() == 0 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_11' - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_12' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_13' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == 'SUB_WORKFLOW' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].inputData['joinOn'] == ['t14', 't20'] - } - - when: "Poll and Complete tasks: 'integration_task_11', 'integration_task_12' and 'integration_task_13'" - def polledAndAckTask11Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_11', 'task11.worker') - def polledAndAckTask12Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_12', 'task12.worker') - def polledAndAckTask13Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_13', 'task13.worker') - workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - - - then: "verify that tasks 'integration_task_11', 'integration_task_12' and 'integration_task_13' were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask11Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask12Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask13Try1) - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 11 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_11' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'FORK' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_12' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_13' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == 'SUB_WORKFLOW' - tasks[5].status == Task.Status.SCHEDULED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].inputData['joinOn'] == ['t14', 't20'] - tasks[8].taskType == 'integration_task_14' - tasks[8].status == Task.Status.SCHEDULED - tasks[9].taskType == 'DECISION' - tasks[9].status == Task.Status.COMPLETED - tasks[10].taskType == 'integration_task_16' - tasks[10].status == Task.Status.SCHEDULED - } - - when: "Poll and Complete tasks: 'integration_task_16' and 'integration_task_14'" - def polledAndAckTask16Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_16', 'task16.worker') - def polledAndAckTask14Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_14', 'task14.worker') - - and: "Get the sub workflow id associated with the SubWorkflow Task sw1 and start the system task" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId() - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - def updatedWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowInstanceId = updatedWorkflow.getTaskByRefName('sw1').subWorkflowId - - then: "verify that tasks 'integration_task_16' and 'integration_task_14'were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask16Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask14Try1) - with(updatedWorkflow) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 12 - tasks[5].taskType == 'SUB_WORKFLOW' - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].inputData['joinOn'] == ['t14', 't20'] - tasks[8].taskType == 'integration_task_14' - tasks[8].status == Task.Status.COMPLETED - tasks[9].taskType == 'DECISION' - tasks[9].status == Task.Status.COMPLETED - tasks[10].taskType == 'integration_task_16' - tasks[10].status == Task.Status.COMPLETED - tasks[11].taskType == 'integration_task_19' - tasks[11].status == Task.Status.SCHEDULED - } - - and: "verify that the simple Sub Workflow is in running state and the first task related to it is scheduled" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Poll and complete all the tasks associated with the sub workflow" - def polledAndAckTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.worker') - def polledAndAckTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.worker') - - then: "verify that tasks 'integration_task_1' and 'integration_task_2'were polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask1Try1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask2Try1) - - and: "verify that the simple Sub Workflow is in a COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - and: " verify that the sub workflow task is completed and other preceding tasks are added to the workflow task list" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 12 - tasks[5].taskType == 'SUB_WORKFLOW' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].inputData['joinOn'] == ['t14', 't20'] - tasks[11].taskType == 'integration_task_19' - tasks[11].status == Task.Status.SCHEDULED - } - - when: "Also the poll and complete the 'integration_task_19'" - def polledAndAckTask19Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_19', 'task19.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask19Try1) - - and: "verify that the integration_task_19 is completed and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 13 - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.IN_PROGRESS - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].inputData['joinOn'] == ['t14', 't20'] - tasks[11].taskType == 'integration_task_19' - tasks[11].status == Task.Status.COMPLETED - tasks[12].taskType == 'integration_task_20' - tasks[12].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_20'" - def polledAndAckTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task20.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask20Try1) - - and: "verify that the integration_task_20 is completed and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 14 - tasks[6].taskType == 'JOIN' - tasks[6].status == Task.Status.COMPLETED - tasks[6].inputData['joinOn'] == ['t11', 'join2', 'sw1'] - tasks[7].taskType == 'JOIN' - tasks[7].status == Task.Status.COMPLETED - tasks[7].inputData['joinOn'] == ['t14', 't20'] - tasks[12].taskType == 'integration_task_20' - tasks[12].status == Task.Status.COMPLETED - tasks[13].taskType == 'integration_task_15' - tasks[13].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 'integration_task_15'" - def polledAndAckTask15Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_15', 'task15.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckTask15Try1) - - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 14 - tasks[13].taskType == 'integration_task_15' - tasks[13].status == Task.Status.COMPLETED - } - } - - def "Test fork join with sub workflows containing optional tasks"() { - given: "A input to the workflow that has forks of sub workflows with an optional task" - Map workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "A workflow that has forks of sub workflows with an optional task is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_FORK_JOIN_OPTIONAL_SW, 1, - '', workflowInput, - null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "both the sub workflows are started by issuing a system task call" - def workflowWithScheduledSubWorkflows = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId1 = workflowWithScheduledSubWorkflows.getTaskByRefName('st1').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId1) - def subWorkflowTaskId2 = workflowWithScheduledSubWorkflows.getTaskByRefName('st2').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId2) - - then: "verify that the sub workflow tasks are in a IN PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.IN_PROGRESS - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 'st2'] - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "Also verify that the sub workflows are in a RUNNING state" - def workflowWithRunningSubWorkflows = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowInstanceId1 = workflowWithRunningSubWorkflows.getTaskByRefName('st1').subWorkflowId - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId1, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - def subWorkflowInstanceId2 = workflowWithRunningSubWorkflows.getTaskByRefName('st2').subWorkflowId - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId2, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - when: "The 'simple_task_in_sub_wf' belonging to both the sub workflows is polled and failed" - def polledAndAckSubWorkflowTask1 = workflowTestUtil.pollAndFailTask('simple_task_in_sub_wf', - 'task1.worker', 'Failed....') - def polledAndAckSubWorkflowTask2 = workflowTestUtil.pollAndFailTask('simple_task_in_sub_wf', - 'task1.worker', 'Failed....') - - then: "verify that both the tasks were polled and failed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckSubWorkflowTask1) - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndAckSubWorkflowTask2) - - and: "verify that both the sub workflows are in failed state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId1, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId2, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - sweep(workflowInstanceId) - - and: "verify that the workflow is in a COMPLETED state and the join task is also marked as COMPLETED_WITH_ERRORS" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.COMPLETED_WITH_ERRORS - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.COMPLETED_WITH_ERRORS - tasks[3].taskType == 'JOIN' - tasks[3].status == Task.Status.COMPLETED_WITH_ERRORS - } - - when: "do a rerun on the sub workflow" - def reRunSubWorkflowRequest = new RerunWorkflowRequest() - reRunSubWorkflowRequest.reRunFromWorkflowId = subWorkflowInstanceId1 - workflowExecutor.rerun(reRunSubWorkflowRequest) - - then: "verify that the sub workflows are in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId1, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - and: "parent workflow remains the same" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.COMPLETED_WITH_ERRORS - tasks[2].taskType == 'SUB_WORKFLOW' - tasks[2].status == Task.Status.COMPLETED_WITH_ERRORS - tasks[3].taskType == 'JOIN' - tasks[3].status == Task.Status.COMPLETED_WITH_ERRORS - } - } - - def "Test fork join with sub workflow task using task definition"() { - given: "A input to the workflow that has fork with sub workflow task" - Map workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "A workflow that has fork with sub workflow task is started" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_SUB_WORKFLOW, 1, '', workflowInput, null, - null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "the subworkflow is started by issuing a system task call" - def parentWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = parentWorkflow.getTaskByRefName('st1').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - - then: "verify that the sub workflow task is in a IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.IN_PROGRESS - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "sub workflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowInstanceId = workflow.getTaskByRefName('st1').subWorkflowId - - then: "verify that the sub workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - when: "the 'simple_task_in_sub_wf' belonging to the sub workflow is polled and failed" - def polledAndFailSubWorkflowTask = workflowTestUtil.pollAndFailTask('simple_task_in_sub_wf', - 'task1.worker', 'Failed....') - - then: "verify that the task was polled and failed" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndFailSubWorkflowTask) - - and: "verify that the sub workflow is in failed state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - and: "verify that the workflow is in a RUNNING state and sub workflow task is retried" - sweep(workflowInstanceId) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.IN_PROGRESS - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].status == Task.Status.SCHEDULED - } - - when: "the sub workflow is started by issuing a system task call" - parentWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - subWorkflowTaskId = parentWorkflow.getTaskByRefName('st1').taskId - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - - then: "verify that the sub workflow task is in a IN PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.IN_PROGRESS - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].status == Task.Status.IN_PROGRESS - } - - when: "sub workflow is retrieved" - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - subWorkflowInstanceId = workflow.getTaskByRefName('st1').subWorkflowId - - then: "verify that the sub workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - when: "the 'simple_task_in_sub_wf' belonging to the sub workflow is polled and completed" - def polledAndCompletedSubWorkflowTask = workflowTestUtil.pollAndCompleteTask('simple_task_in_sub_wf', 'subworkflow.task.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndCompletedSubWorkflowTask) - - and: "verify that the sub workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'simple_task_in_sub_wf' - } - - and: "verify that the workflow is in a RUNNING state and sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.IN_PROGRESS - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].status == Task.Status.COMPLETED - } - - when: "the simple task is polled and completed" - def polledAndCompletedSimpleTask = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.worker') - - then: "verify that the task was polled and acknowledged" - workflowTestUtil.verifyPolledAndAcknowledgedTask(polledAndCompletedSimpleTask) - - and: "verify that the workflow is in a COMPLETED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'JOIN' - tasks[3].inputData['joinOn'] == ['st1', 't2'] - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRerunSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRerunSpec.groovy deleted file mode 100644 index 62c703164..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRerunSpec.groovy +++ /dev/null @@ -1,506 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class HierarchicalForkJoinSubworkflowRerunSpec extends AbstractSpecification { - - @Shared - def FORK_JOIN_HIERARCHICAL_SUB_WF = 'hierarchical_fork_join_swf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('hierarchical_fork_join_swf.json', - 'simple_workflow_1_integration_test.json' - ) - - //region Test setup: 3 workflows reach FAILED state because task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(FORK_JOIN_HIERARCHICAL_SUB_WF, 1) - - and: "input required to start the workflow execution" - String correlationId = 'rerun_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : FORK_JOIN_HIERARCHICAL_SUB_WF, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(FORK_JOIN_HIERARCHICAL_SUB_WF, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "poll and complete the integration_task_2 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the leaf workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - //endregion - } - - def cleanup() { - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the root workflow. - * - * Expectation: The root workflow gets a new execution with the same id and spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the NEW leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test rerun on the root-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the root workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = rootWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the root workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task in the root workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(newMidLevelWorkflowId) - - then: "the root workflow is in COMPLETED state" - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow gets a new execution with the same id and spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the mid level workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = midLevelWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the mid workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "poll and complete the integration_task_2 task in the mid level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the leaf workflow. - * - * Expectation: The leaf workflow gets a new execution with the same id and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the leaf-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the leaf workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = leafWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the leaf workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the mid level and root workflows are sweeped" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify that the root workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete both tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - void assertWorkflowIsCompleted(String workflowId) { - assert with(workflowExecutionService.getExecutionStatus(workflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRestartSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRestartSpec.groovy deleted file mode 100644 index 0416a8c5f..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRestartSpec.groovy +++ /dev/null @@ -1,499 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class HierarchicalForkJoinSubworkflowRestartSpec extends AbstractSpecification { - - @Shared - def FORK_JOIN_HIERARCHICAL_SUB_WF = 'hierarchical_fork_join_swf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('hierarchical_fork_join_swf.json', - 'simple_workflow_1_integration_test.json' - ) - - //region Test setup: 3 workflows reach FAILED state. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(FORK_JOIN_HIERARCHICAL_SUB_WF, 1) - - and: "input required to start the workflow execution" - String correlationId = 'retry_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : FORK_JOIN_HIERARCHICAL_SUB_WF, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(FORK_JOIN_HIERARCHICAL_SUB_WF, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the leaf workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - //endregion - } - - def cleanup() { - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the root workflow. - * - * Expectation: The root workflow gets a new execution with the same id and spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the NEW leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test restart on the root in a 3-level subworkflow"() { - //region Test case - when: "do a restart on the root workflow" - workflowExecutor.restart(rootWorkflowId, false) - - then: "verify that the root workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task in the root workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(newMidLevelWorkflowId) - - then: "the root workflow is in COMPLETED state" - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow gets a new execution with the same id and spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test restart on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the mid level workflow" - workflowExecutor.restart(midLevelWorkflowId, false) - - then: "verify that the mid workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "poll and complete the integration_task_2 task in the mid level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the leaf workflow. - * - * Expectation: The leaf workflow gets a new execution with the same id and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test restart on the leaf in a 3-level subworkflow"() { - //region Test case - when: "do a restart on the leaf workflow" - workflowExecutor.restart(leafWorkflowId, false) - - then: "verify that the leaf workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the mid level and root workflows are sweeped" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify that the root workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete both tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - void assertWorkflowIsCompleted(String workflowId) { - assert with(workflowExecutionService.getExecutionStatus(workflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRetrySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRetrySpec.groovy deleted file mode 100644 index 8bfebd490..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/HierarchicalForkJoinSubworkflowRetrySpec.groovy +++ /dev/null @@ -1,880 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class HierarchicalForkJoinSubworkflowRetrySpec extends AbstractSpecification { - - @Shared - def FORK_JOIN_HIERARCHICAL_SUB_WF = 'hierarchical_fork_join_swf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('hierarchical_fork_join_swf.json', - 'simple_workflow_1_integration_test.json' - ) - - //region Test setup: 3 workflows reach FAILED state. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(FORK_JOIN_HIERARCHICAL_SUB_WF, 1) - - and: "input required to start the workflow execution" - String correlationId = 'retry_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : FORK_JOIN_HIERARCHICAL_SUB_WF, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(FORK_JOIN_HIERARCHICAL_SUB_WF, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the leaf workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - //endregion - } - - def cleanup() { - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the root workflow. - * - * Expectation: The root workflow spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test retry on the root in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(rootWorkflowId, false) - - then: "verify that the root workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - tasks[4].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[4].status == Task.Status.SCHEDULED - tasks[4].retriedTaskId == tasks[1].taskId - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(newMidLevelWorkflowId) - - then: "the root workflow is in COMPLETED state" - assertSubWorkflowTaskIsRetriedAndWorkflowCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the root workflow. - * - * Expectation: The leaf workflow is retried and both its parent (mid-level) and grand parent (root) workflows are also retried. - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the mid level workflow" - workflowExecutor.retry(midLevelWorkflowId, false) - - then: "verify that the mid workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - tasks[4].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[4].status == Task.Status.SCHEDULED - tasks[4].retriedTaskId == tasks[1].taskId - } - - and: "verify the SUB_WORKFLOW task in root workflow is IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertSubWorkflowTaskIsRetriedAndWorkflowCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the leaf in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the leaf workflow" - workflowExecutor.retry(leafWorkflowId, false) - - then: "verify that the leaf workflow is in RUNNING state and failed task is retried" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - then: "verify that the mid-level workflow's SUB_WORKFLOW task is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the root workflow's SUB_WORKFLOW task is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid-level workflow's JOIN task is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify that the root workflow's JOIN task is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the mid-level workflow. - * - * Expectation: The leaf workflow is retried and both its parent (mid-level) and grand parent (root) workflows are also retried. - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the root with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(rootWorkflowId, true) - - then: "verify that the sub workflow task in root workflow is IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the sub workflow task in mid level workflow is IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the previously failed task in leaf workflow is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify the mid level workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify the root workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - - and: "the root workflow is in COMPLETED state" - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the leaf workflow. - * - * Expectation: The leaf workflow resumes its FAILED task and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the mid-level with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(midLevelWorkflowId, true) - - then: "verify that the sub workflow task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the sub workflow task in mid level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the previously failed task in leaf workflow is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify the mid level workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify the root workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the previously failed integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - - and: "the root workflow is in COMPLETED state" - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the leaf workflow. - * - * Expectation: The leaf workflow resumes its FAILED task and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the leaf with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the leaf workflow" - workflowExecutor.retry(leafWorkflowId, true) - - then: "verify that the leaf workflow is in RUNNING state and failed task is retried" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - and: "verify that the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.CANCELED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify the mid level workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - and: "verify the root workflow's JOIN is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - assertWorkflowIsCompleted(midLevelWorkflowId) - - and: "the root workflow is in COMPLETED state" - assertWorkflowIsCompleted(rootWorkflowId) - //endregion - } - - void assertWorkflowIsCompleted(String workflowId) { - assert with(workflowExecutionService.getExecutionStatus(workflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.COMPLETED - } - } - - void assertSubWorkflowTaskIsRetriedAndWorkflowCompleted(String workflowId) { - assert with(workflowExecutionService.getExecutionStatus(workflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 5 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == TASK_TYPE_JOIN - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[4].status == Task.Status.COMPLETED - tasks[4].retriedTaskId == tasks[1].taskId - } - } - -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy deleted file mode 100644 index 4c5e916cd..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -class JsonJQTransformSpec extends AbstractSpecification { - - @Shared - def JSON_JQ_TRANSFORM_WF = 'test_json_jq_transform_wf' - - def setup() { - workflowTestUtil.registerWorkflows( - 'simple_json_jq_transform_integration_test.json', - ) - } - - /** - * Given the following input JSON - *{* "in1": {* "array": [ "a", "b" ] - *}, - * "in2": {* "array": [ "c", "d" ] - *}*}* expect the workflow task to transform to following result: - *{* out: [ "a", "b", "c", "d" ] - *}*/ - def "Test workflow with json jq transform task succeeds"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['in1'] = new HashMap() - workflowInput['in1']['array'] = ["a", "b"] - workflowInput['in2'] = new HashMap() - workflowInput['in2']['array'] = ["c", "d"] - - when: "workflow which has the json jq transform task has started" - def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, - '', workflowInput, null, null, null) - - then: "verify that the workflow and task are completed with expected output" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'JSON_JQ_TRANSFORM' - tasks[0].outputData.containsKey("result") && tasks[0].outputData.containsKey("resultList") - } - } - - /** - * Given the following input JSON - *{* "in1": "a", - * "in2": "b" - *}* using the same query from the success test, jq will try to get in1['array'] - * and fail since 'in1' is a string - */ - def "Test workflow with json jq transform task fails"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['in1'] = "a" - workflowInput['in2'] = "b" - - when: "workflow which has the json jq transform task has started" - def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, - '', workflowInput, null, null, null) - - then: "verify that the workflow and task failed with expected error" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'JSON_JQ_TRANSFORM' - tasks[0].reasonForIncompletion == 'Cannot index string with string \"array\"' - } - } - - /** - * Given the following invalid input JSON - *{* "in1": "a", - * "in2": "b" - *}* using the same query from the success test, jq will try to get in1['array'] - * and fail since 'in1' is a string. - * - * Re-run failed system task with the following valid input JSON will fix the workflow - *{* "in1": {* "array": [ "a", "b" ] - *}, - * "in2": {* "array": [ "c", "d" ] - *}*}* expect the workflow task to transform to following result: - *{* out: [ "a", "b", "c", "d" ] - *} - */ - def "Test rerun workflow with failed json jq transform task"() { - given: "workflow input" - def invalidInput = new HashMap() - invalidInput['in1'] = "a" - invalidInput['in2'] = "b" - - def validInput = new HashMap() - def input = new HashMap() - input['in1'] = new HashMap() - input['in1']['array'] = ["a", "b"] - input['in2'] = new HashMap() - input['in2']['array'] = ["c", "d"] - validInput['input'] = input - validInput['queryExpression'] = '.input as $_ | { out: ($_.in1.array + $_.in2.array) }' - - when: "workflow which has the json jq transform task started" - def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, - '', invalidInput, null, null, null) - - then: "verify that the workflow and task failed with expected error" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'JSON_JQ_TRANSFORM' - tasks[0].reasonForIncompletion == 'Cannot index string with string \"array\"' - } - - when: "workflow which has the json jq transform task reran" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = workflowInstanceId - def reRunTaskId = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0].taskId - reRunWorkflowRequest.reRunFromTaskId = reRunTaskId - reRunWorkflowRequest.taskInput = validInput - - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the workflow and task are completed with expected output" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'JSON_JQ_TRANSFORM' - tasks[0].outputData.containsKey("result") && tasks[0].outputData.containsKey("resultList") - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy deleted file mode 100644 index 365000d1b..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class LambdaAndTerminateTaskSpec extends AbstractSpecification { - - @Shared - def WORKFLOW_WITH_TERMINATE_TASK = 'test_terminate_task_wf' - - @Shared - def WORKFLOW_WITH_TERMINATE_TASK_FAILED = 'test_terminate_task_failed_wf' - - @Shared - def WORKFLOW_WITH_LAMBDA_TASK = 'test_lambda_wf' - - @Shared - def PARENT_WORKFLOW_WITH_TERMINATE_TASK = 'test_terminate_task_parent_wf' - - @Shared - def WORKFLOW_WITH_DECISION_AND_TERMINATE = "ConditionalTerminateWorkflow" - - @Autowired - SubWorkflow subWorkflowTask - - def setup() { - workflowTestUtil.registerWorkflows( - 'failure_workflow_for_terminate_task_workflow.json', - 'terminate_task_completed_workflow_integration_test.json', - 'terminate_task_failed_workflow_integration.json', - 'simple_lambda_workflow_integration_test.json', - 'terminate_task_parent_workflow.json', - 'terminate_task_sub_workflow.json', - 'decision_and_terminate_integration_test.json' - ) - } - - def "Test workflow with a terminate task when the status is completed"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the terminate task" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_TERMINATE_TASK, 1, - '', workflowInput, null, null, null) - - then: "Ensure that the workflow has started and the first task is in scheduled state and workflow output should be terminate task's output" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - reasonForIncompletion.contains('Workflow is COMPLETED by TERMINATE task') - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'TERMINATE' - tasks[1].seq == 2 - output.size() == 1 - output as String == "[result:[testvalue:true]]" - } - } - - def "Test workflow with a terminate task when the status is failed"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the terminate task" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_TERMINATE_TASK_FAILED, 1, - '', workflowInput, null, null, null) - - then: "Verify that the workflow has failed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - reasonForIncompletion == "Early exit in terminate" - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'TERMINATE' - tasks[1].seq == 2 - output - def failedWorkflowId = output['conductor.failure_workflow'] as String - with(workflowExecutionService.getExecutionStatus(failedWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - input['workflowId'] == workflowInstanceId - tasks.size() == 1 - tasks[0].taskType == 'LAMBDA' - } - } - } - - def "Test workflow with a terminate task when the workflow has a subworkflow"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the terminate task" - def workflowInstanceId = workflowExecutor.startWorkflow(PARENT_WORKFLOW_WITH_TERMINATE_TASK, 1, - '', workflowInput, null, null, null) - - then: "verify that the workflow has started and the tasks are as expected" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'LAMBDA' - tasks[1].referenceTaskName == 'lambdaTask1' - tasks[1].seq == 2 - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'LAMBDA' - tasks[2].referenceTaskName == 'lambdaTask2' - tasks[2].seq == 3 - tasks[3].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'JOIN' - tasks[3].seq == 4 - tasks[4].status == Task.Status.SCHEDULED - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].seq == 5 - tasks[5].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'WAIT' - tasks[5].seq == 6 - } - - when: "subworkflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowTaskId = workflow.getTaskByRefName("test_terminate_subworkflow").getTaskId() - asyncSystemTaskExecutor.execute(subWorkflowTask, subWorkflowTaskId) - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.getTaskByRefName("test_terminate_subworkflow").subWorkflowId - - then: "verify that the sub workflow is RUNNING, and the task within is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_3' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Complete the WAIT task that should cause the TERMINATE task to execute" - def waitTask = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[5] - waitTask.status = Task.Status.COMPLETED - workflowExecutor.updateTask(new TaskResult(waitTask)) - - then: "Verify that the workflow has completed and the SUB_WORKFLOW is not still IN_PROGRESS (should be SKIPPED)" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - reasonForIncompletion.contains('Workflow is COMPLETED by TERMINATE task') - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'FORK' - tasks[0].seq == 1 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'LAMBDA' - tasks[1].referenceTaskName == 'lambdaTask1' - tasks[1].seq == 2 - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'LAMBDA' - tasks[2].referenceTaskName == 'lambdaTask2' - tasks[2].seq == 3 - tasks[3].status == Task.Status.CANCELED - tasks[3].taskType == 'JOIN' - tasks[3].seq == 4 - tasks[4].status == Task.Status.CANCELED - tasks[4].taskType == 'SUB_WORKFLOW' - tasks[4].seq == 5 - tasks[5].status == Task.Status.COMPLETED - tasks[5].taskType == 'WAIT' - tasks[5].seq == 6 - tasks[6].status == Task.Status.COMPLETED - tasks[6].taskType == 'TERMINATE' - tasks[6].seq == 7 - } - - and: "ensure that the subworkflow is terminated" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - reasonForIncompletion.contains('Parent workflow has been terminated with reason: Workflow is COMPLETED by' + - ' TERMINATE task') - tasks[0].taskType == 'integration_task_3' - tasks[0].status == Task.Status.CANCELED - } - } - - def "Test workflow with a terminate task within a decision branch"() { - given: "workflow input" - Map workflowInput = new HashMap() - workflowInput['param1'] = 'p1' - workflowInput['param2'] = 'p2' - workflowInput['case'] = 'two' - - when: "The workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_DECISION_AND_TERMINATE, 1, '', - workflowInput, null, null, null) - - then: "verify that the workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - tasks[0].seq == 1 - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op':'task1 completed']) - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has FAILED due to terminate task" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 3 - output.size() == 1 - output as String == "[output:task1 completed]" - reasonForIncompletion.contains('Workflow is FAILED by TERMINATE task') - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['op'] == 'task1 completed' - tasks[0].seq == 1 - tasks[1].taskType == 'DECISION' - tasks[1].status == Task.Status.COMPLETED - tasks[1].seq == 2 - tasks[2].taskType == 'TERMINATE' - tasks[2].status == Task.Status.COMPLETED - tasks[2].seq == 3 - } - } - - def "Test workflow with lambda task"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['a'] = 1 - - when: "Start the workflow which has the terminate task" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_LAMBDA_TASK, 1, - '', workflowInput, null, null, null) - - then: "verify that the task is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'LAMBDA' - tasks[0].outputData as String == "[result:[testvalue:true]]" - tasks[0].seq == 1 - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/NestedForkJoinSubWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/NestedForkJoinSubWorkflowSpec.groovy deleted file mode 100644 index b1319a9c2..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/NestedForkJoinSubWorkflowSpec.groovy +++ /dev/null @@ -1,790 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW - -class NestedForkJoinSubWorkflowSpec extends AbstractSpecification { - - @Shared - def FORK_JOIN_NESTED_SUB_WF = 'nested_fork_join_swf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - String parentWorkflowId, subworkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('nested_fork_join_swf.json', - 'simple_workflow_1_integration_test.json' - ) - - //region Test setup: 3 workflows reach FAILED state. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(FORK_JOIN_NESTED_SUB_WF, 1) - - and: "input required to start the workflow execution" - String correlationId = 'retry_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : SIMPLE_WORKFLOW] - - when: "the workflow is started" - parentWorkflowId = workflowExecutor.startWorkflow(FORK_JOIN_NESTED_SUB_WF, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds.get(0)) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def parentWorkflowInstance = workflowExecutionService.getExecutionStatus(parentWorkflowId, true) - with(parentWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - subworkflowId = parentWorkflowInstance.tasks[2].subWorkflowId - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "poll and fail the integration_task_2 task in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'task2 failed') - - then: "the sub workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.CANCELED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.CANCELED - } - //endregion - } - - def cleanup() { - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a nested fork join workflow where all workflows reach FAILED state because of a FAILED task - * in the sub workflow. - * - * A restart is executed on the sub workflow. - * - * Expectation: The sub workflow spawns a execution with the same id. - * When the sub workflow completes successfully, the parent workflow also completes successfully. - */ - def "test restart on the sub workflow in a nested fork join workflow"() { - when: - workflowExecutor.restart(subworkflowId, false) - - then: "verify that the subworkflow is RUNNING state" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "verify that the parent workflow is updated" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.CANCELED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.CANCELED - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify that the flag is reset and JOIN is updated" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete both tasks in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the subworkflow completed" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - and: "verify that the parent workflow's sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify that the parent workflow reaches COMPLETED with all tasks completed" - assertParentWorkflowIsComplete() - } - - /** - * On a nested fork join workflow where all workflows reach FAILED state because of a FAILED task - * in the sub workflow. - * - * A restart is executed on the parent workflow. - * - * Expectation: The parent workflow spawns a execution with the same id, which in turn creates a new instance of the sub workflow. - * When the sub workflow completes successfully, the parent workflow also completes successfully. - */ - def "test restart on the parent workflow in a nested fork join workflow"() { - when: - workflowExecutor.restart(parentWorkflowId, false) - - then: "verify that the parent workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.SCHEDULED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds.get(0)) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that SUB_WORKFLOW task in in progress" - def parentWorkflowInstance = workflowExecutionService.getExecutionStatus(parentWorkflowId, true) - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - and: "verify that a new instance of the sub workflow is created" - def newSubWorkflowId = parentWorkflowInstance.tasks[2].subWorkflowId - newSubWorkflowId != subworkflowId - with(workflowExecutionService.getExecutionStatus(newSubWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete both tasks in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the subworkflow completed" - with(workflowExecutionService.getExecutionStatus(newSubWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - and: "verify that the parent workflow's sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify that the parent workflow reaches COMPLETED with all tasks completed" - assertParentWorkflowIsComplete() - } - - /** - * On a nested fork join workflow where all workflows reach FAILED state because of a FAILED task - * in the sub workflow. - * - * A retry is executed on the parent workflow. - * - * Expectation: The parent workflow spawns a execution with the same id, which in turn creates a new instance of the sub workflow. - * When the sub workflow completes successfully, the parent workflow also completes successfully. - */ - def "test retry on the parent workflow in a nested fork join workflow"() { - when: - workflowExecutor.retry(parentWorkflowId, false) - - then: "verify that the parent workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 8 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.FAILED - tasks[2].retried - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - tasks[7].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[7].status == Task.Status.SCHEDULED - tasks[7].retriedTaskId == tasks[2].taskId - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds.get(0)) - - then: "verify that SUB_WORKFLOW task in in progress" - def parentWorkflowInstance = workflowExecutionService.getExecutionStatus(parentWorkflowId, true) - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 8 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.FAILED - tasks[2].retried - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - tasks[7].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[7].status == Task.Status.IN_PROGRESS - tasks[7].retriedTaskId == tasks[2].taskId - } - - and: "verify that a new instance of the sub workflow is created" - def newSubWorkflowId = parentWorkflowInstance.tasks[7].subWorkflowId - newSubWorkflowId != subworkflowId - with(workflowExecutionService.getExecutionStatus(newSubWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete both tasks in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the subworkflow completed" - with(workflowExecutionService.getExecutionStatus(newSubWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - and: "verify that the parent workflow's sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 8 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.FAILED - tasks[2].retried - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - tasks[7].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[7].status == Task.Status.COMPLETED - tasks[7].retriedTaskId == tasks[2].taskId - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify that the parent workflow reaches COMPLETED with all tasks completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 8 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.FAILED - tasks[2].retried - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.COMPLETED - tasks[7].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[7].status == Task.Status.COMPLETED - tasks[7].retriedTaskId == tasks[2].taskId - } - } - - /** - * On a nested fork join workflow where all workflows reach FAILED state because of a FAILED task - * in the sub workflow. - * - * A retry with resume flag is executed on the parent workflow. - * - * Expectation: The parent workflow spawns a execution with the same id, which in turn creates a new instance of the sub workflow. - * When the sub workflow completes successfully, the parent workflow also completes successfully. - */ - def "test retry with resume on the parent workflow in a nested fork join workflow"() { - when: - workflowExecutor.retry(parentWorkflowId, true) - - then: "verify that the sub workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - and: "verify that the parent's SUB_WORKFLOW task is updated" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.CANCELED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.CANCELED - } - - when: "the parent is swept" - sweep(parentWorkflowId) - - then: "verify that parent's JOIN task in in progress" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the failed task in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the subworkflow completed" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - and: "verify that the parent workflow's sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify the parent workflow reaches COMPLETED state" - assertParentWorkflowIsComplete() - } - - /** - * On a nested fork join workflow where all workflows reach FAILED state because of a FAILED task - * in the sub workflow. - * - * A retry is executed on the parent workflow. - * - * Expectation: The parent workflow spawns a execution with the same id, which in turn creates a new instance of the sub workflow. - * When the sub workflow completes successfully, the parent workflow also completes successfully. - */ - def "test retry on the sub workflow in a nested fork join workflow"() { - when: - workflowExecutor.retry(subworkflowId, false) - - then: "verify that the sub workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - and: "verify that the parent's SUB_WORKFLOW task is updated" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.CANCELED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.CANCELED - } - - when: "the parent is swept" - sweep(parentWorkflowId) - - then: "verify that parent's JOIN task in in progress" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.IN_PROGRESS - !tasks[2].subworkflowChanged - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "poll and complete the failed task in the sub workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the subworkflow completed" - with(workflowExecutionService.getExecutionStatus(subworkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - and: "verify that the parent workflow's sub workflow task is completed" - with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.IN_PROGRESS - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.IN_PROGRESS - } - - when: "the parent workflow is swept" - sweep(parentWorkflowId) - - then: "verify the parent workflow reaches COMPLETED state" - assertParentWorkflowIsComplete() - } - - private void assertParentWorkflowIsComplete() { - assert with(workflowExecutionService.getExecutionStatus(parentWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[0].taskType == TASK_TYPE_FORK - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_FORK - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'integration_task_2' - tasks[4].status == Task.Status.COMPLETED - tasks[5].taskType == TASK_TYPE_JOIN - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == TASK_TYPE_JOIN - tasks[6].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy deleted file mode 100644 index 5f1e25296..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -class SetVariableTaskSpec extends AbstractSpecification { - - @Shared - def SET_VARIABLE_WF = 'test_set_variable_wf' - - def setup() { - workflowTestUtil.registerWorkflows( - 'simple_set_variable_workflow_integration_test.json' - ) - } - - def "Test workflow with set variable task"() { - given: "workflow input" - def workflowInput = new HashMap() - workflowInput['var'] = "var_test_value" - - when: "Start the workflow which has the set variable task" - def workflowInstanceId = workflowExecutor.startWorkflow(SET_VARIABLE_WF, 1, - '', workflowInput, null, null, null) - - then: "verify that the task is completed and variables were set" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'SET_VARIABLE' - tasks[0].status == Task.Status.COMPLETED - variables as String == '[var:var_test_value]' - output as String == '[variables:[var:var_test_value]]' - } - } - - def "Test workflow with set variable task passing variables payload size threshold"() { - given: "workflow input" - def workflowInput = new HashMap() - long maxThreshold = 2 - workflowInput['var'] = String.join("", - Collections.nCopies(1 + ((int) (maxThreshold * 1024 / 8)), "01234567")) - - when: "Start the workflow which has the set variable task" - def workflowInstanceId = workflowExecutor.startWorkflow(SET_VARIABLE_WF, 1, - '', workflowInput, null, null, null) - def EXTRA_HASHMAP_SIZE = 17 - def expectedErrorMessage = - String.format( - "The variables payload size: %d of workflow: %s is greater than the permissible limit: %d bytes", - EXTRA_HASHMAP_SIZE + maxThreshold * 1024 + 1, workflowInstanceId, maxThreshold) - - then: "verify that the task is completed and variables were set" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].taskType == 'SET_VARIABLE' - tasks[0].status == Task.Status.FAILED_WITH_TERMINAL_ERROR - tasks[0].reasonForIncompletion == expectedErrorMessage - variables as String == '[:]' - output as String == '[variables:[:]]' - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy deleted file mode 100644 index 276514742..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy +++ /dev/null @@ -1,1032 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.apache.commons.lang3.StringUtils -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.core.exception.ApplicationException.Code.CONFLICT -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SimpleWorkflowSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Shared - def LINEAR_WORKFLOW_T1_T2 = 'integration_test_wf' - - @Shared - def INTEGRATION_TEST_WF_NON_RESTARTABLE = "integration_test_wf_non_restartable" - - - def setup() { - //Register LINEAR_WORKFLOW_T1_T2, RTOWF, WORKFLOW_WITH_OPTIONAL_TASK - workflowTestUtil.registerWorkflows('simple_workflow_1_integration_test.json', - 'simple_workflow_with_resp_time_out_integration_test.json') - } - - def "Test simple workflow completion"() { - - given: "An existing simple workflow definition" - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - and: "input required to start the workflow execution" - String correlationId = 'unit_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - when: "Start a workflow based on the registered simple workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Poll and complete the 'integration_task_1' " - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete 'integration_task_2'" - def pollAndCompleteTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the 'integration_task_2' has been polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try1, ['tp1': inputParam1, 'tp2': 'task1.done']) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - output.containsKey('o3') - } - } - - def "Test simple workflow with null inputs"() { - - when: "An existing simple workflow definition" - def workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - then: - workflowDef.getTasks().get(0).getInputParameters().containsKey('someNullKey') - - when: "Start a workflow based on the registered simple workflow with one input param null" - String correlationId = "unit_test_1" - def input = new HashMap() - input.put("param1", "p1 value") - input.put("param2", null) - - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "verify the workflow has started and the input params have propagated" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - input['param2'] == null - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - !tasks[0].inputData['someNullKey'] - } - - when: "'integration_task_1' is polled and completed with output data" - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', - ['someOtherKey': ['a': 1, 'A': null], 'someKey': null]) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the task is completed and the output data has propagated as input data to 'integration_task_2'" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData.containsKey('someKey') - !tasks[0].outputData['someKey'] - def someOtherKey = tasks[0].outputData['someOtherKey'] as Map - someOtherKey.containsKey('A') - !someOtherKey['A'] - } - } - - def "Test simple workflow terminal error condition"() { - setup: "Modify the task definition and the workflow output definition" - def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTask1Definition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - persistedTask1Definition.ownerEmail, 1, persistedTask1Definition.timeoutSeconds, - persistedTask1Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask1Definition) - def workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - def outputParameters = workflowDef.outputParameters - outputParameters['validationErrors'] = '${t1.output.ErrorMessage}' - metadataService.updateWorkflowDef(workflowDef) - - when: "A simple workflow which is started" - String correlationId = "unit_test_1" - def input = new HashMap() - input.put("param1", "p1 value") - input.put("param2", "p2 value") - - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - } - - when: "Rewind the running workflow that was just started" - workflowExecutor.restart(workflowInstanceId, false) - - then: "Ensure that a exception is thrown when a running workflow is being rewind" - def exceptionThrown = thrown(ApplicationException) - exceptionThrown.code == CONFLICT - - when: "'integration_task_1' is polled and failed with terminal error" - def polledIntegrationTask1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - TaskResult taskResult = new TaskResult(polledIntegrationTask1) - taskResult.reasonForIncompletion = 'NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down' - taskResult.status = TaskResult.Status.FAILED_WITH_TERMINAL_ERROR - taskResult.addOutputData('TERMINAL_ERROR', 'Integration endpoint down: FOOBAR') - taskResult.addOutputData('ErrorMessage', 'There was a terminal error') - - workflowExecutionService.updateTask(taskResult) - sweep(workflowInstanceId) - - then: "The first polled task is integration_task_1 and the workflowInstanceId of the task is same as running workflowInstanceId" - polledIntegrationTask1 - polledIntegrationTask1.taskType == 'integration_task_1' - polledIntegrationTask1.workflowInstanceId == workflowInstanceId - - and: "verify that the workflow is in a failed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - reasonForIncompletion == 'NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down' - output['o1'] == 'p1 value' - output['validationErrors'] == 'There was a terminal error' - getTaskByRefName('t1').retryCount == 0 - failedReferenceTaskNames == ['t1'] as HashSet - } - - cleanup: - metadataService.updateTaskDef(modifiedTask1Definition) - outputParameters.remove('validationErrors') - metadataService.updateWorkflowDef(workflowDef) - } - - - def "Test Simple Workflow with response timeout "() { - given: 'Workflow input and correlationId' - def correlationId = 'unit_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "Start a workflow that has a response time out" - def workflowInstanceId = workflowExecutor.startWorkflow('RTOWF', 1, correlationId, workflowInput, - null, null, null) - - - then: "Workflow is in running state and the task 'task_rt' is ready to be polled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'task_rt' - tasks[0].status == Task.Status.SCHEDULED - } - queueDAO.getSize('task_rt') == 1 - - when: "Poll for a 'task_rt' task and then ack the task" - def polledTaskRtTry1 = workflowExecutionService.poll('task_rt', 'task1.integration.worker.testTimeout') - - then: "Verify that the 'task_rt' was polled" - polledTaskRtTry1 - polledTaskRtTry1.taskType == 'task_rt' - polledTaskRtTry1.workflowInstanceId == workflowInstanceId - polledTaskRtTry1.status == Task.Status.IN_PROGRESS - - when: "An additional poll is done wto retrieved another 'task_rt'" - def noTaskAvailable = workflowExecutionService.poll('task_rt', 'task1.integration.worker.testTimeout') - - then: "Ensure that there is no additional 'task_rt' available to poll" - !noTaskAvailable - - when: "The processing of the polled task takes more time than the response time out" - Thread.sleep(10000) - workflowExecutor.decide(workflowInstanceId) - - then: "Expect a new task to be added to the queue in place of the timed out task" - queueDAO.getSize('task_rt') == 1 - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].status == Task.Status.TIMED_OUT - tasks[1].status == Task.Status.SCHEDULED - } - - when: "The task_rt is polled again and the task is set to be called back after 2 seconds" - def polledTaskRtTry2 = workflowExecutionService.poll('task_rt', 'task1.integration.worker.testTimeout') - polledTaskRtTry2.callbackAfterSeconds = 2 - polledTaskRtTry2.status = Task.Status.IN_PROGRESS - workflowExecutionService.updateTask(new TaskResult(polledTaskRtTry2)) - - then: "verify that the polled task is not null" - polledTaskRtTry2 - - and: "verify the state of the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].status == Task.Status.SCHEDULED - } - - when: "induce the time for the call back for the task to expire and run the un ack process" - Thread.sleep(2010) - queueDAO.processUnacks(polledTaskRtTry2.taskDefName) - - and: "run the decide process on the workflow" - workflowExecutor.decide(workflowInstanceId) - - and: "poll for the task and then complete the task 'task_rt' " - def pollAndCompleteTaskTry3 = workflowTestUtil.pollAndCompleteTask('task_rt', 'task1.integration.worker.testTimeout', ['op': 'task1.done']) - - then: 'Verify that the task was polled ' - verifyPolledAndAcknowledgedTask(pollAndCompleteTaskTry3) - - when: "The next task of the workflow is polled and then completed" - def polledIntegrationTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker.testTimeout') - - then: "Verify that 'integration_task_2' is polled and acked" - verifyPolledAndAcknowledgedTask(polledIntegrationTask2Try1) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - } - } - - def "Test if the workflow definitions with and without schema version can be registered"() { - given: "A workflow definition with no schema version" - def workflowDef1 = new WorkflowDef() - workflowDef1.name = 'Test_schema_version1' - workflowDef1.version = 1 - workflowDef1.ownerEmail = "test@harness.com" - - and: "A new workflow task is created" - def workflowTask = new WorkflowTask() - workflowTask.name = 'integration_task_1' - workflowTask.taskReferenceName = 't1' - workflowDef1.tasks.add(workflowTask) - - and: "The workflow definition with no schema version is saved" - metadataService.updateWorkflowDef(workflowDef1) - - and: "A workflow definition with a schema version is created" - def workflowDef2 = new WorkflowDef() - workflowDef2.name = 'Test_schema_version2' - workflowDef2.version = 1 - workflowDef2.schemaVersion = 2 - workflowDef2.ownerEmail = "test@harness.com" - workflowDef2.tasks.add(workflowTask) - - and: "The workflow definition with schema version is persisted" - metadataService.updateWorkflowDef(workflowDef2) - - when: "The persisted workflow definitions are retrieved by their name" - def foundWorkflowDef1 = metadataService.getWorkflowDef(workflowDef1.getName(), 1) - def foundWorkflowDef2 = metadataService.getWorkflowDef(workflowDef2.getName(), 1) - - then: "Ensure that the schema version is by default 2" - foundWorkflowDef1 - foundWorkflowDef1.schemaVersion == 2 - foundWorkflowDef2 - foundWorkflowDef2.schemaVersion == 2 - } - - def "Test Simple workflow restart without using the latest definition"() { - setup: "Register a task definition with no retries" - def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - persistedTask1Definition.ownerEmail, 0, persistedTask1Definition.timeoutSeconds, - persistedTask1Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "Get the workflow definition associated with the simple workflow" - WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - then: "Ensure that there is a workflow definition" - workflowDefinition - workflowDefinition.failureWorkflow - StringUtils.isNotBlank(workflowDefinition.failureWorkflow) - - when: "Start a simple workflow with non null params" - def correlationId = 'integration_test_1' + UUID.randomUUID().toString() - def workflowInput = new HashMap() - String inputParam1 = 'p1 value' - workflowInput['param1'] = inputParam1 - workflowInput['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "A workflow instance has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - - when: "poll the task that is queued and fail the task" - def polledIntegrationTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failed..') - - then: "The workflow ends up in a failed state" - verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'integration_task_1' - } - - when: "Rewind the workflow which is in the failed state without the latest definition" - workflowExecutor.restart(workflowInstanceId, false) - - then: "verify that the rewound workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - - when: "Poll for the 'integration_task_1' " - def polledIntegrationTask1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task is polled and the workflow is in a running state" - verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - } - - when: - def polledIntegrationTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: - verifyPolledAndAcknowledgedTask(polledIntegrationTask2Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - } - - cleanup: - metadataService.updateTaskDef(persistedTask1Definition) - } - - def "Test Simple workflow restart with the latest definition"() { - - setup: "Register a task definition with no retries" - def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - persistedTask1Definition.ownerEmail, 0, persistedTask1Definition.timeoutSeconds, - persistedTask1Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "Get the workflow definition associated with the simple workflow" - WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - - then: "Ensure that there is a workflow definition" - workflowDefinition - workflowDefinition.failureWorkflow - StringUtils.isNotBlank(workflowDefinition.failureWorkflow) - - when: "Start a simple workflow with non null params" - def correlationId = 'integration_test_1' + UUID.randomUUID().toString() - def workflowInput = new HashMap() - String inputParam1 = 'p1 value' - workflowInput['param1'] = inputParam1 - workflowInput['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "A workflow instance has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - - when: "poll the task that is queued and fail the task" - def polledIntegrationTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failed..') - - then: "the workflow ends up in a failed state" - verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'integration_task_1' - } - - when: "A new version of the workflow definition is registered" - WorkflowTask workflowTask = new WorkflowTask() - workflowTask.name = 'integration_task_20' - workflowTask.taskReferenceName = 'task_added' - workflowTask.workflowTaskType = TaskType.SIMPLE - - workflowDefinition.tasks.add(workflowTask) - workflowDefinition.version = 2 - metadataService.updateWorkflowDef(workflowDefinition) - - and: "rewind/restart the workflow with the latest workflow definition" - workflowExecutor.restart(workflowInstanceId, true) - - then: "verify that the rewound workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - - when: "Poll and complete the 'integration_task_1' " - def polledIntegrationTask1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task is polled and the workflow is in a running state" - verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - } - - when: "Poll and complete the 'integration_task_2' " - def polledIntegrationTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledIntegrationTask2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - - when: "Poll and complete the 'integration_task_20' " - def polledIntegrationTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledIntegrationTask20Try1) - def polledIntegrationTask20 = polledIntegrationTask20Try1[0] as Task - polledIntegrationTask20.workflowInstanceId == workflowInstanceId - polledIntegrationTask20.referenceTaskName == 'task_added' - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - } - - cleanup: - metadataService.updateTaskDef(persistedTask1Definition) - metadataService.unregisterWorkflowDef(workflowDefinition.getName(), 2) - } - - def "Test simple workflow with task retries"() { - setup: "Change the task definition to ensure that it has retries and delay between retries" - def integrationTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTaskDefinition = new TaskDef(integrationTask2Definition.name, integrationTask2Definition.description, - integrationTask2Definition.ownerEmail, 3, integrationTask2Definition.timeoutSeconds, - integrationTask2Definition.responseTimeoutSeconds) - modifiedTaskDefinition.retryDelaySeconds = 2 - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "A new simple workflow is started" - def correlationId = 'integration_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow has started" - workflowInstanceId - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - workflow.status == Workflow.WorkflowStatus.RUNNING - - when: "Poll for the first task and complete the task" - def polledIntegrationTask1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - polledIntegrationTask1.status = Task.Status.COMPLETED - def polledIntegrationTask1Output = "task1.output -> " + polledIntegrationTask1.inputData['p1'] + "." + polledIntegrationTask1.inputData['p2'] - polledIntegrationTask1.outputData['op'] = polledIntegrationTask1Output - workflowExecutionService.updateTask(new TaskResult(polledIntegrationTask1)) - - then: "verify that the 'integration_task_1' is polled and completed" - with(polledIntegrationTask1) { - inputData.containsKey('p1') - inputData.containsKey('p2') - inputData['p1'] == 'p1 value' - inputData['p2'] == 'p2 value' - } - - //Need to figure out how to use expect and where here - when: " 'integration_task_2' is polled and marked as failed for the first time" - Tuple polledAndFailedTaskTry1 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failure...0', null, 2) - - then: "verify that the task was polled and the input params of the tasks are as expected" - verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry1, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) - - when: " 'integration_task_2' is polled and marked as failed for the second time" - Tuple polledAndFailedTaskTry2 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failure...0', null, 2) - - then: "verify that the task was polled and the input params of the tasks are as expected" - verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry2, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) - - when: "'integration_task_2' is polled and marked as completed for the third time" - def polledAndCompletedTry3 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the task was polled and the input params of the tasks are as expected" - verifyPolledAndAcknowledgedTask(polledAndCompletedTry3, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.FAILED - tasks[3].taskType == 'integration_task_2' - tasks[3].status == Task.Status.COMPLETED - tasks[1].taskId == tasks[2].retriedTaskId - tasks[2].taskId == tasks[3].retriedTaskId - failedReferenceTaskNames == ['t2'] as HashSet - } - - cleanup: - metadataService.updateTaskDef(integrationTask2Definition) - } - - def "Test simple workflow with retry at workflow level"() { - setup: "Change the task definition to ensure that it has retries and no delay between retries" - def integrationTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTaskDefinition = new TaskDef(integrationTask1Definition.name, integrationTask1Definition.description, - integrationTask1Definition.ownerEmail, 1, integrationTask1Definition.timeoutSeconds, - integrationTask1Definition.responseTimeoutSeconds) - modifiedTaskDefinition.retryDelaySeconds = 0 - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "Start a simple workflow with non null params" - def correlationId = 'retry_test' + UUID.randomUUID().toString() - def workflowInput = new HashMap() - String inputParam1 = 'p1 value' - workflowInput['param1'] = inputParam1 - workflowInput['param2'] = 'p2 value' - - and: "start a simple workflow with input params" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow has started and the next task is scheduled" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].getInputData().get("p3") == tasks[0].getTaskId() - } - with(metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1)) { - failureWorkflow - StringUtils.isNotBlank(failureWorkflow) - } - - when: "The first task 'integration_task_1' is polled and failed" - Tuple polledAndFailedTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failure...0') - - then: "verify that the task was polled and acknowledged and the workflow is still in a running state" - verifyPolledAndAcknowledgedTask(polledAndFailedTask1Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].status == Task.Status.FAILED - tasks[1].status == Task.Status.SCHEDULED - tasks[1].getInputData().get("p3") == tasks[1].getTaskId() - } - - when: "The first task 'integration_task_1' is polled and failed for the second time" - Tuple polledAndFailedTask1Try2 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failure...0') - - then: "verify that the task was polled and acknowledged and the workflow is still in a running state" - verifyPolledAndAcknowledgedTask(polledAndFailedTask1Try2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].status == Task.Status.FAILED - tasks[1].status == Task.Status.FAILED - } - - when: "The workflow is retried" - workflowExecutor.retry(workflowInstanceId, false) - - then: - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].status == Task.Status.FAILED - tasks[1].status == Task.Status.FAILED - tasks[2].status == Task.Status.SCHEDULED - tasks[2].getInputData().get("p3") == tasks[2].getTaskId() - } - - when: "The 'integration_task_1' task is polled and is completed" - def polledAndCompletedTry3 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task2.integration.worker') - - then: "verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTry3) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[2].status == Task.Status.COMPLETED - tasks[3].status == Task.Status.SCHEDULED - } - - when: "The 'integration_task_2' task is polled and is completed" - def polledAndCompletedTaskTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTaskTry1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[2].status == Task.Status.COMPLETED - tasks[3].status == Task.Status.COMPLETED - failedReferenceTaskNames == ['t1'] as HashSet - } - - cleanup: - metadataService.updateTaskDef(integrationTask1Definition) - } - - def "Test Long running simple workflow"() { - given: "A new simple workflow is started" - def correlationId = 'integration_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "start a new workflow with the input" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow is in running state and the task queue has an entry for the first task of the workflow" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - } - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - when: "the first task 'integration_task_1' is polled and then sent back with a callBack seconds" - def pollTaskTry1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - pollTaskTry1.outputData['op'] = 'task1.in.progress' - pollTaskTry1.callbackAfterSeconds = 5 - pollTaskTry1.status = Task.Status.IN_PROGRESS - workflowExecutionService.updateTask(new TaskResult(pollTaskTry1)) - - then: "verify that the task is polled and acknowledged" - pollTaskTry1 - - and: "the input data of the data is as expected" - pollTaskTry1.inputData.containsKey('p1') - pollTaskTry1.inputData['p1'] == 'p1 value' - pollTaskTry1.inputData.containsKey('p2') - pollTaskTry1.inputData['p1'] == 'p1 value' - - and: "the task queue reflects the presence of 'integration_task_1' " - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - when: "the 'integration_task_1' task is polled again" - def pollTaskTry2 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - - then: "verify that there was no task polled" - !pollTaskTry2 - - when: "the 'integration_task_1' is polled again after a delay of 5 seconds and completed" - Thread.sleep(5000) - def task1Try3Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_1', - 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(task1Try3Tuple, [:]) - - and: "verify that the workflow is updated with the latest task" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - tasks[0].outputData['op'] == 'task1.done' - } - - when: "the 'integration_task_1' is polled and completed" - def task2Try1Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the task was polled and completed with the expected inputData for the task that was polled" - verifyPolledAndAcknowledgedTask(task2Try1Tuple, ['tp2': 'task1.done', 'tp1': 'p1 value']) - - and: "The workflow is in a completed state and reflects the tasks that are completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - } - } - - - def "Test simple workflow when the task's call back after seconds are reset"() { - - given: "A new simple workflow is started" - def correlationId = 'integration_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "start a new workflow with the input" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow is in running state and the task queue has an entry for the first task of the workflow" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - } - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - when: "the first task 'integration_task_1' is polled and then sent back with a callBack seconds" - def pollTaskTry1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - pollTaskTry1.outputData['op'] = 'task1.in.progress' - pollTaskTry1.callbackAfterSeconds = 3600 - pollTaskTry1.status = Task.Status.IN_PROGRESS - workflowExecutionService.updateTask(new TaskResult(pollTaskTry1)) - - then: "verify that the task is polled and acknowledged" - pollTaskTry1 - - and: "the input data of the data is as expected" - pollTaskTry1.inputData.containsKey('p1') - pollTaskTry1.inputData['p1'] == 'p1 value' - pollTaskTry1.inputData.containsKey('p2') - pollTaskTry1.inputData['p1'] == 'p1 value' - - and: "the task queue reflects the presence of 'integration_task_1' " - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - when: "the 'integration_task_1' task is polled again" - def pollTaskTry2 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - - then: "verify that there was no task polled" - !pollTaskTry2 - - when: "the 'integration_task_1' task is polled again" - def pollTaskTry3 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - - then: "verify that there was no task polled" - !pollTaskTry3 - - when: "The callbackSeconds of the tasks in progress for the workflow are reset" - workflowExecutor.resetCallbacksForWorkflow(workflowInstanceId) - - and: "the 'integration_task_1' is polled again after all the in progress tasks are reset" - def task1Try4Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_1', - 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(task1Try4Tuple) - - and: "verify that the workflow is updated with the latest task" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - tasks[0].outputData['op'] == 'task1.done' - } - - when: "the 'integration_task_1' is polled and completed" - def task2Try1Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_2', - 'task2.integration.worker') - - then: "verify that the task was polled and completed with the expected inputData for the task that was polled" - verifyPolledAndAcknowledgedTask(task2Try1Tuple, ['tp2': 'task1.done', 'tp1': 'p1 value']) - - and: "The workflow is in a completed state and reflects the tasks that are completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - } - } - - def "Test non restartable simple workflow"() { - setup: "Change the task definition to ensure that it has no retries and register a non restartable workflow" - def integrationTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTaskDefinition = new TaskDef(integrationTask1Definition.name, integrationTask1Definition.description, - integrationTask1Definition.ownerEmail, 0, integrationTask1Definition.timeoutSeconds, - integrationTask1Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTaskDefinition) - - def simpleWorkflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - simpleWorkflowDefinition.name = INTEGRATION_TEST_WF_NON_RESTARTABLE - simpleWorkflowDefinition.restartable = false - metadataService.updateWorkflowDef(simpleWorkflowDefinition) - - when: "A non restartable workflow is started" - def correlationId = 'integration_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(INTEGRATION_TEST_WF_NON_RESTARTABLE, 1, - correlationId, workflowInput, - null, null, null) - - and: "the 'integration_task_1' is polled and failed" - Tuple polledAndFailedTaskTry1 = workflowTestUtil.pollAndFailTask('integration_task_1', - 'task1.integration.worker', 'failure...0') - - then: "verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'integration_task_1' - } - - when: "The failed workflow is rewound" - workflowExecutor.restart(workflowInstanceId, false) - - and: "The first task 'integration_task_1' is polled and completed" - def task1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', - ['op': 'task1.done']) - - then: "Verify that the task is polled and acknowledged" - verifyPolledAndAcknowledgedTask(task1Try2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - } - - when: "The second task 'integration_task_2' is polled and completed" - def task2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "Verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(task2Try1, ['tp2': 'task1.done', 'tp1': 'p1 value']) - - and: "The workflow is in a completed state and reflects the tasks that are completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[0].status == Task.Status.COMPLETED - tasks[0].taskType == 'integration_task_1' - output['o3'] == 'task1.done' - } - - when: "The successfully completed non restartable workflow is rewound" - workflowExecutor.restart(workflowInstanceId, false) - - then: "Ensure that an exception is thrown" - def exceptionThrown = thrown(ApplicationException) - exceptionThrown - - cleanup: "clean up the changes made to the task and workflow definition during start up" - metadataService.updateTaskDef(integrationTask1Definition) - simpleWorkflowDefinition.name = LINEAR_WORKFLOW_T1_T2 - simpleWorkflowDefinition.restartable = true - metadataService.updateWorkflowDef(simpleWorkflowDefinition) - } - - def "Test simple workflow when update task's result with call back after seconds"() { - - given: "A new simple workflow is started" - def correlationId = 'integration_test_1' - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "start a new workflow with the input" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow is in running state and the task queue has an entry for the first task of the workflow" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - } - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - when: "the first task 'integration_task_1' is polled and then sent back with no callBack seconds" - def pollTaskTry1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - pollTaskTry1.outputData['op'] = 'task1.in.progress' - pollTaskTry1.status = Task.Status.IN_PROGRESS - workflowExecutionService.updateTask(new TaskResult(pollTaskTry1)) - - then: "verify that the task is polled and acknowledged" - pollTaskTry1 - - and: "the input data of the data is as expected" - pollTaskTry1.inputData.containsKey('p1') - pollTaskTry1.inputData['p1'] == 'p1 value' - pollTaskTry1.inputData.containsKey('p2') - pollTaskTry1.inputData['p1'] == 'p1 value' - - and: "the task gets put back into the queue of 'integration_task_1' immediately for future poll" - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - and: "The task in in SCHEDULED status with workerId reset" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - tasks[0].callbackAfterSeconds == 0 - } - - when: "the 'integration_task_1' task is polled again" - def pollTaskTry2 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - pollTaskTry2.outputData['op'] = 'task1.in.progress' - pollTaskTry2.status = Task.Status.IN_PROGRESS - pollTaskTry2.callbackAfterSeconds = 3600 - workflowExecutionService.updateTask(new TaskResult(pollTaskTry2)) - - then: "verify that the task is polled and acknowledged" - pollTaskTry2 - - and: "the task gets put back into the queue of 'integration_task_1' with callbackAfterSeconds delay for future poll" - workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - - and: "The task in in SCHEDULED status with workerId reset" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - tasks[0].callbackAfterSeconds == pollTaskTry2.callbackAfterSeconds - } - - when: "the 'integration_task_1' task is polled again" - def pollTaskTry3 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - - then: "verify that there was no task polled" - !pollTaskTry3 - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/StartWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/StartWorkflowSpec.groovy deleted file mode 100644 index e216d07bb..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/StartWorkflowSpec.groovy +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.beans.factory.annotation.Value -import org.springframework.core.io.ClassPathResource - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.StartWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification -import com.netflix.conductor.test.utils.MockExternalPayloadStorage - -import spock.lang.Shared -import spock.lang.Unroll - -class StartWorkflowSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - StartWorkflow startWorkflowTask - - @Autowired - MockExternalPayloadStorage mockExternalPayloadStorage - - @Shared - def WORKFLOW_THAT_STARTS_ANOTHER_WORKFLOW = 'workflow_that_starts_another_workflow' - - static String workflowInputPath = "${UUID.randomUUID()}.json" - - def setup() { - workflowTestUtil.registerWorkflows('workflow_that_starts_another_workflow.json', - 'simple_workflow_1_integration_test.json') - mockExternalPayloadStorage.upload(workflowInputPath, StartWorkflowSpec.class.getResourceAsStream("/start_workflow_input.json"), 0) - } - - @Unroll - def "start another workflow using #testCase.name"() { - setup: 'create the correlationId for the starter workflow' - def correlationId = UUID.randomUUID().toString() - - when: "starter workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_THAT_STARTS_ANOTHER_WORKFLOW, 1, - correlationId, testCase.workflowInput, testCase.workflowInputPath, null, null) - - then: "verify that the starter workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'START_WORKFLOW' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the START_WORKFLOW task is started" - List polledTaskIds = queueDAO.pop("START_WORKFLOW", 1, 200) - String startWorkflowTaskId = polledTaskIds.get(0) - asyncSystemTaskExecutor.execute(startWorkflowTask, startWorkflowTaskId) - - then: "verify the START_WORKFLOW task and workflow are COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'START_WORKFLOW' - tasks[0].status == Task.Status.COMPLETED - } - - when: "the started workflow is retrieved" - def startWorkflowTask = workflowExecutionService.getTask(startWorkflowTaskId) - String startedWorkflowId = startWorkflowTask.outputData['workflowId'] - - then: "verify that the started workflow is RUNNING" - with(workflowExecutionService.getExecutionStatus(startedWorkflowId, false)) { - status == Workflow.WorkflowStatus.RUNNING - it.correlationId == correlationId - // when the "starter" workflow is started with input from external payload storage, - // it sends a large input to the "started" workflow - // see start_workflow_input.json - if(testCase.workflowInputPath) { - externalInputPayloadStoragePath != null - } else { - input != null - } - } - - where: - testCase << [workflowName(), workflowDef(), workflowRequestWithExternalPayloadStorage()] - } - - def "start_workflow does not conform to StartWorkflowRequest"() { - given: "start_workflow that does not conform to StartWorkflowRequest" - def startWorkflowParam = ['param1': 'value1', 'param2': 'value2'] - def workflowInput = ['start_workflow': startWorkflowParam] - - when: "starter workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_THAT_STARTS_ANOTHER_WORKFLOW, 1, - null, workflowInput, null, null, null) - - then: "verify that the starter workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'START_WORKFLOW' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the START_WORKFLOW task is started" - List polledTaskIds = queueDAO.pop("START_WORKFLOW", 1, 200) - String startWorkflowTaskId = polledTaskIds.get(0) - asyncSystemTaskExecutor.execute(startWorkflowTask, startWorkflowTaskId) - - then: "verify the START_WORKFLOW task and workflow FAILED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 1 - tasks[0].taskType == 'START_WORKFLOW' - tasks[0].status == Task.Status.FAILED - tasks[0].reasonForIncompletion != null - } - } - - /** - * Builds a TestCase for a StartWorkflowRequest with a WorkflowDef that contains two tasks. - */ - static workflowDef() { - def task1 = ['name': 'integration_task_1', 'taskReferenceName': 't1', 'type': 'SIMPLE', - 'inputParameters': ['tp1': '${workflow.input.param1}', 'tp2': '${workflow.input.param2}', 'tp3': '${CPEWF_TASK_ID}']] - def task2 = ['name': 'integration_task_2', 'taskReferenceName': 't2', 'type': 'SIMPLE', - 'inputParameters': ['tp1': '${workflow.input.param1}', 'tp2': '${t1.output.op}', 'tp3': '${CPEWF_TASK_ID}']] - def workflowDef = ['name': 'dynamic_wf', 'version': 1, 'tasks': [task1, task2], 'ownerEmail': 'abc@abc.com'] - - def startWorkflow = ['name': 'dynamic_wf', 'workflowDef': workflowDef] - - new TestCase(name: 'workflow definition', workflowInput: ['startWorkflow': startWorkflow]) - } - - /** - * Builds a TestCase for a StartWorkflowRequest with a workflow name. - */ - static workflowName() { - def startWorkflow = ['name': 'integration_test_wf', 'input': ['param1': 'value1', 'param2': 'value2']] - - new TestCase(name: 'name and version', workflowInput: ['startWorkflow': startWorkflow]) - } - - /** - * Builds a TestCase for a StartWorkflowRequest with a workflow name and input in external payload storage. - */ - static workflowRequestWithExternalPayloadStorage() { - new TestCase(name: 'name and version with external input', workflowInputPath: workflowInputPath) - } - - static class TestCase { - String name - Map workflowInput - String workflowInputPath - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRerunSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRerunSpec.groovy deleted file mode 100644 index e2f74c044..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRerunSpec.groovy +++ /dev/null @@ -1,735 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SubWorkflowRerunSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - @Shared - def WORKFLOW_WITH_SUBWORKFLOW = 'integration_test_wf_with_sub_wf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('simple_workflow_1_integration_test.json', - 'workflow_with_sub_workflow_1_integration_test.json') - - //region Test setup: 3 workflows. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'rerun_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : WORKFLOW_WITH_SUBWORKFLOW, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the leaf-level workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level subworkflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - //endregion - } - - def cleanup() { - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the root workflow. - * - * Expectation: The root workflow spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test rerun on the root-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the root workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = rootWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "poll and complete the 'integration_task_1' task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op1': 'task1.done']) - - and: "verify that the root workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - then: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed with taskId on the root workflow. - * - * Expectation: The root workflow gets a new execution with the same id and both the mid-level workflow and leaf workflows are also reran. - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the root-level with taskId in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the root workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = rootWorkflowId - def reRunTaskId = workflowExecutionService.getExecutionStatus(rootWorkflowId, true).tasks[0].taskId - reRunWorkflowRequest.reRunFromTaskId = reRunTaskId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "poll and complete the 'integration_task_1' task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op1': 'task1.done']) - - and: "verify that the root workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - then: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow gets a new execution with the same id and spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the mid level workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = midLevelWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the mid workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "verify the SUB_WORKFLOW task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the task in the mid level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'integration_task_1' - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the mid-level workflow with taskId. - * - * Expectation: The mid-level workflow gets a new execution with the same id and spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the mid-level with taskId in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the mid level workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = midLevelWorkflowId - def reRunTaskId = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true).tasks[0].taskId - reRunWorkflowRequest.reRunFromTaskId = reRunTaskId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the mid workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "verify the SUB_WORKFLOW task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the task in the mid level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'integration_task_1' - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the leaf workflow. - * - * Expectation: The leaf workflow gets a new execution with the same id and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the leaf-level in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the leaf workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = leafWorkflowId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the leaf workflow creates a new execution" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the root workflow's SUB_WORKFLOW is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A rerun is executed on the leaf workflow. - * - * Expectation: The leaf workflow gets a new execution with the same id and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test rerun on the leaf-level with taskId in a 3-level subworkflow"() { - //region Test case - when: "do a rerun on the leaf workflow" - def reRunWorkflowRequest = new RerunWorkflowRequest() - reRunWorkflowRequest.reRunFromWorkflowId = leafWorkflowId - def reRunTaskId = workflowExecutionService.getExecutionStatus(leafWorkflowId, true).tasks[0].taskId - reRunWorkflowRequest.reRunFromTaskId = reRunTaskId - workflowExecutor.rerun(reRunWorkflowRequest) - - then: "verify that the leaf workflow creates a new execution" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the root workflow's SUB_WORKFLOW is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRestartSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRestartSpec.groovy deleted file mode 100644 index b651b3718..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRestartSpec.groovy +++ /dev/null @@ -1,448 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SubWorkflowRestartSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - @Shared - def WORKFLOW_WITH_SUBWORKFLOW = 'integration_test_wf_with_sub_wf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('simple_one_task_sub_workflow_integration_test.json', - 'simple_workflow_1_integration_test.json', - 'workflow_with_sub_workflow_1_integration_test.json') - - //region Test setup: 3 workflows reach FAILED state. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'retry_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : WORKFLOW_WITH_SUBWORKFLOW, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level subworkflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - //endregion - } - - def cleanup() { - // Ensure that changes to the task def are reverted - metadataService.updateTaskDef(persistedTask2Definition) - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the root workflow. - * - * Expectation: The root workflow gets a new execution with the same id and spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the NEW leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test restart on the root in a 3-level subworkflow"() { - //region Test case - when: "do a restart on the root workflow" - workflowExecutor.restart(rootWorkflowId, false) - - then: "poll and complete the 'integration_task_1' task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op1': 'task1.done']) - - and: "verify that the root workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - then: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow gets a new execution with the same id and spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test restart on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a restart on the mid level workflow" - workflowExecutor.restart(midLevelWorkflowId, false) - - then: "verify that the mid workflow created a new execution" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "verify the SUB_WORKFLOW task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the task in the mid level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'integration_task_1' - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A restart is executed on the leaf workflow. - * - * Expectation: The leaf workflow gets a new execution with the same id and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test restart on the leaf in a 3-level subworkflow"() { - //region Test case - when: "do a restart on the leaf workflow" - workflowExecutor.restart(leafWorkflowId, false) - - then: "verify that the leaf workflow creates a new execution" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the root workflow's SUB_WORKFLOW is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRetrySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRetrySpec.groovy deleted file mode 100644 index 895d79da6..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowRetrySpec.groovy +++ /dev/null @@ -1,752 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SubWorkflowRetrySpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - @Shared - def WORKFLOW_WITH_SUBWORKFLOW = 'integration_test_wf_with_sub_wf' - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - String rootWorkflowId, midLevelWorkflowId, leafWorkflowId - - TaskDef persistedTask2Definition - - def setup() { - workflowTestUtil.registerWorkflows('simple_one_task_sub_workflow_integration_test.json', - 'simple_workflow_1_integration_test.json', - 'workflow_with_sub_workflow_1_integration_test.json') - - //region Test setup: 3 workflows reach FAILED state. Task 'integration_task_2' in leaf workflow is FAILED. - setup: "Modify task definition to 0 retries" - persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'retry_on_root_in_3level_wf' - def input = [ - 'param1' : 'p1 value', - 'param2' : 'p2 value', - 'subwf' : WORKFLOW_WITH_SUBWORKFLOW, - 'nextSubwf': SIMPLE_WORKFLOW] - - when: "the workflow is started" - rootWorkflowId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - def rootWorkflowInstance = workflowExecutionService.getExecutionStatus(rootWorkflowId, true) - with(rootWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - } - - and: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - midLevelWorkflowId = rootWorkflowInstance.tasks[1].subWorkflowId - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def midLevelWorkflowInstance = workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true) - - then: "verify that the mid-level workflow is RUNNING, and first task is in SCHEDULED state" - leafWorkflowId = midLevelWorkflowInstance.tasks[1].subWorkflowId - def leafWorkflowInstance = workflowExecutionService.getExecutionStatus(leafWorkflowId, true) - with(leafWorkflowInstance) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "the leaf workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - when: "the mid level workflow is 'decided'" - sweep(midLevelWorkflowId) - - then: "the mid level subworkflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - - when: "the root level workflow is 'decided'" - sweep(rootWorkflowId) - - then: "the root level workflow is in FAILED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the root workflow. - * - * Expectation: The root workflow spawns a NEW mid-level workflow, which in turn spawns a NEW leaf workflow. - * When the leaf workflow completes successfully, both the NEW mid-level and root workflows also complete successfully. - */ - def "Test retry on the root in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(rootWorkflowId, false) - - then: "poll and complete the 'integration_task_1' task" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op1': 'task1.done']) - - and: "verify that the root workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the subworkflow task should be in SCHEDULED state and is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newMidLevelWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new mid level workflow is created and is in RUNNING state" - newMidLevelWorkflowId != midLevelWorkflowId - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task in the mid-level workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - and: "poll and execute the sub workflow task" - polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the two tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "the new leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the new mid level and root workflows are 'decided'" - sweep(newMidLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newMidLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - then: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the root workflow. - * - * Expectation: The leaf workflow is retried and both its parent (mid-level) and grand parent (root) workflows are also retried. - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the root with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(rootWorkflowId, true) - - then: "verify that the sub workflow task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the sub workflow task in mid level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the previously failed task in leaf workflow is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the mid level workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after "decide" - } - - and: "the root workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after "decide" - } - - when: "poll and complete the integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the new mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - - and: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the mid-level workflow. - * - * Expectation: The mid-level workflow spawns a NEW leaf workflow and also updates its parent (root workflow). - * When the NEW leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the mid-level in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the mid level workflow" - workflowExecutor.retry(midLevelWorkflowId, false) - - then: "verify that the mid workflow created a new SUB_WORKFLOW task" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - and: "verify the SUB_WORKFLOW task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "the SUB_WORKFLOW task in mid level workflow is started by issuing a system task call" - def polledTaskIds = queueDAO.pop(TASK_TYPE_SUB_WORKFLOW, 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def newLeafWorkflowId = workflowExecutionService.getTask(polledTaskIds[0]).subWorkflowId - - then: "verify that a new leaf workflow is created and is in RUNNING state" - newLeafWorkflowId != leafWorkflowId - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the 2 tasks in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the new leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(newLeafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the mid-level workflow. - * - * Expectation: The leaf workflow is retried and both its parent (mid-level) and grand parent (root) workflows are also retried. - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the mid-level with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the root workflow" - workflowExecutor.retry(midLevelWorkflowId, true) - - then: "verify that the sub workflow task in root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the sub workflow task in mid level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the previously failed task in leaf workflow is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the mid level workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after "decide" - } - - and: "the root workflow is in RUNNING state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - !tasks[1].subworkflowChanged // flag is reset after "decide" - } - - when: "poll and complete the previously failed integration_task_2 task" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "the mid level workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - } - - and: "the root workflow is in COMPLETED state" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed on the leaf workflow. - * - * Expectation: The leaf workflow resumes its FAILED task and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the leaf in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the leaf workflow" - workflowExecutor.retry(leafWorkflowId, false) - - then: "verify that the leaf workflow is in RUNNING state and failed task is retried" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the root workflow' is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } - - /** - * On a 3-level workflow where all workflows reach FAILED state because of a FAILED task - * in the leaf workflow. - * - * A retry is executed with resume flag on the leaf workflow. - * - * Expectation: The leaf workflow resumes its FAILED task and updates both its parent (mid-level) and grandparent (root). - * When the leaf workflow completes successfully, both the mid-level and root workflows also complete successfully. - */ - def "Test retry on the leaf with resume flag in a 3-level subworkflow"() { - //region Test case - when: "do a retry on the leaf workflow" - workflowExecutor.retry(leafWorkflowId, true) - - then: "verify that the leaf workflow is in RUNNING state and failed task is retried" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].retriedTaskId == tasks[1].taskId - } - - then: "verify that the mid-level workflow is updated" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - and: "verify that the root workflow is updated" - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the scheduled task in the leaf workflow" - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the leaf workflow reached COMPLETED state" - with(workflowExecutionService.getExecutionStatus(leafWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[1].retried - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[2].retriedTaskId == tasks[1].taskId - } - - when: "the mid level and root workflows are 'decided'" - sweep(midLevelWorkflowId) - sweep(rootWorkflowId) - - then: "verify that the mid level and root workflows reach COMPLETED state" - with(workflowExecutionService.getExecutionStatus(midLevelWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - with(workflowExecutionService.getExecutionStatus(rootWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - (!tasks[1].subworkflowChanged) // flag is reset after decide - } - //endregion - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy deleted file mode 100644 index 095f40404..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy +++ /dev/null @@ -1,489 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SubWorkflowSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - SubWorkflow subWorkflowTask - - @Shared - def WORKFLOW_WITH_SUBWORKFLOW = 'integration_test_wf_with_sub_wf' - - @Shared - def SUB_WORKFLOW = "sub_workflow" - - @Shared - def SIMPLE_WORKFLOW = "integration_test_wf" - - def setup() { - workflowTestUtil.registerWorkflows('simple_one_task_sub_workflow_integration_test.json', - 'simple_workflow_1_integration_test.json', - 'workflow_with_sub_workflow_1_integration_test.json') - } - - def "Test retrying a subworkflow where parent workflow timed out due to workflowTimeout"() { - - setup: "Register a workflow definition with a timeout policy set to timeout workflow" - def persistedWorkflowDefinition = metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - def modifiedWorkflowDefinition = new WorkflowDef() - modifiedWorkflowDefinition.name = persistedWorkflowDefinition.name - modifiedWorkflowDefinition.version = persistedWorkflowDefinition.version - modifiedWorkflowDefinition.tasks = persistedWorkflowDefinition.tasks - modifiedWorkflowDefinition.inputParameters = persistedWorkflowDefinition.inputParameters - modifiedWorkflowDefinition.outputParameters = persistedWorkflowDefinition.outputParameters - modifiedWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.TIME_OUT_WF - modifiedWorkflowDefinition.timeoutSeconds = 10 - modifiedWorkflowDefinition.ownerEmail = persistedWorkflowDefinition.ownerEmail - metadataService.updateWorkflowDef([modifiedWorkflowDefinition]) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SUB_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'wf_with_subwf_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - input['subwf'] = 'sub_workflow' - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - and: "verify that the 'integration_task1' is complete and the next task (subworkflow) is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the subworkflow is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - String subworkflowTaskId = polledTaskIds.get(0) - asyncSystemTaskExecutor.execute(subWorkflowTask, subworkflowTaskId) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.IN_PROGRESS - } - - when: "subworkflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.tasks[1].subWorkflowId - - then: "verify that the sub workflow is RUNNING, and first task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "a delay of 10 seconds is introduced and the workflow is sweeped to run the evaluation" - Thread.sleep(10000) - sweep(workflowInstanceId) - - then: "ensure that the workflow has been TIMED OUT and subworkflow task is CANCELED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TIMED_OUT - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.CANCELED - } - - and: "ensure that the subworkflow is TERMINATED and task is CANCELED" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.CANCELED - } - - when: "the subworkflow is retried" - workflowExecutor.retry(subWorkflowId, false) - - then: "ensure that the subworkflow is RUNNING and task is retried" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.CANCELED - tasks[1].taskType == 'simple_task_in_sub_wf' - tasks[1].status == Task.Status.SCHEDULED - } - - and: "verify that change flag is set on the sub workflow task in parent" - workflowExecutionService.getTask(subworkflowTaskId).subworkflowChanged - - when: "Polled for simple_task_in_sub_wf task in subworkflow" - pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('simple_task_in_sub_wf', 'task1.integration.worker', ['op': 'simple_task_in_sub_wf.done']) - - then: "verify that the 'simple_task_in_sub_wf' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - and: "verify that the subworkflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.CANCELED - tasks[1].taskType == 'simple_task_in_sub_wf' - tasks[1].status == Task.Status.COMPLETED - } - - and: "subworkflow task is in a completed state" - with(workflowExecutionService.getTask(subworkflowTaskId)) { - status == Task.Status.COMPLETED - subworkflowChanged - } - - and: "the parent workflow is swept" - sweep(workflowInstanceId) - - and: "the parent workflow has been resumed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged - output['op'] == 'simple_task_in_sub_wf.done' - } - - cleanup: "Ensure that the changes to the workflow def are reverted" - metadataService.updateWorkflowDef([persistedWorkflowDefinition]) - } - - def "Test terminating a subworkflow terminates parent workflow"() { - given: "Existing workflow and subworkflow definitions" - metadataService.getWorkflowDef(SUB_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'wf_with_subwf_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - input['subwf'] = 'sub_workflow' - - when: "Start a workflow with subworkflow based on the registered definition" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, - null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Polled for integration_task_1 task" - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the 'integration_task1' is complete and the next task (subworkflow) is in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Polled for and executed subworkflow task" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.tasks[1].subWorkflowId - - then: "verify that the 'sub_workflow_task' is polled and IN_PROGRESS" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.IN_PROGRESS - } - - and: "verify that the sub workflow is RUNNING, and first task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "subworkflow is terminated" - def terminateReason = "terminating from a test case" - workflowExecutor.terminateWorkflow(subWorkflowId, terminateReason) - - then: "verify that sub workflow is in terminated state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'simple_task_in_sub_wf' - tasks[0].status == Task.Status.CANCELED - reasonForIncompletion == terminateReason - } - - and: - sweep(workflowInstanceId) - - and: "verify that parent workflow is in terminated state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.CANCELED - reasonForIncompletion && reasonForIncompletion.contains(terminateReason) - } - } - - def "Test retrying a workflow with subworkflow resume"() { - setup: "Modify task definition to 0 retries" - def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() - def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - persistedTask2Definition.ownerEmail, 0, persistedTask2Definition.timeoutSeconds, - persistedTask2Definition.responseTimeoutSeconds) - metadataService.updateTaskDef(modifiedTask2Definition) - - and: "an existing workflow with subworkflow and registered definitions" - metadataService.getWorkflowDef(SIMPLE_WORKFLOW, 1) - metadataService.getWorkflowDef(WORKFLOW_WITH_SUBWORKFLOW, 1) - - and: "input required to start the workflow execution" - String correlationId = 'wf_retry_with_subwf_resume_test' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - input['subwf'] = 'integration_test_wf' - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_SUBWORKFLOW, 1, - correlationId, input, null, null, null) - - then: "verify that the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - and: "verify that the 'integration_task_1' is complete and the next task (subworkflow) is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the subworkflow is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - asyncSystemTaskExecutor.execute(subWorkflowTask, polledTaskIds[0]) - - then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TaskType.SUB_WORKFLOW.name() - tasks[1].status == Task.Status.IN_PROGRESS - } - - when: "subworkflow is retrieved" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - def subWorkflowId = workflow.tasks[1].subWorkflowId - - then: "verify that the sub workflow is RUNNING, and first task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - and: "verify that the 'integration_task_1' is complete and the next task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and fail the integration_task_2 task" - def pollAndFailTask = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failed') - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndFailTask) - - then: "the sub workflow ends up in a FAILED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - } - - and: - sweep(workflowInstanceId) - - and: "the workflow is in a FAILED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.FAILED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SUB_WORKFLOW' - tasks[1].status == Task.Status.FAILED - } - - when: "the workflow is retried by resuming subworkflow task" - workflowExecutor.retry(workflowInstanceId, true) - - then: "the subworkflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - and: "the workflow is in a RUNNING state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.IN_PROGRESS - tasks[1].subworkflowChanged - } - - when: "poll and complete the integration_task_2 task" - pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker', ['op': 'task2.done']) - - then: "verify that the 'integration_task_2' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - then: "the integration_task_2 is complete sub workflow ends up in a COMPLETED state" - with(workflowExecutionService.getExecutionStatus(subWorkflowId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.FAILED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - } - - and: - sweep(workflowInstanceId) - - then: "the workflow is in a COMPLETED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == TASK_TYPE_SUB_WORKFLOW - tasks[1].status == Task.Status.COMPLETED - !tasks[1].subworkflowChanged - } - - cleanup: "Ensure that changes to the task def are reverted" - metadataService.updateTaskDef(persistedTask2Definition) - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SwitchTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SwitchTaskSpec.groovy deleted file mode 100644 index 64e62aa6a..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SwitchTaskSpec.groovy +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared -import spock.lang.Unroll - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SwitchTaskSpec extends AbstractSpecification { - - @Shared - def SWITCH_WF = "SwitchWorkflow" - - @Shared - def FORK_JOIN_SWITCH_WF = "ForkConditionalTest" - - @Shared - def COND_TASK_WF = "ConditionalTaskWF" - - def setup() { - //initialization code for each feature - workflowTestUtil.registerWorkflows('simple_switch_task_integration_test.json', - 'switch_and_fork_join_integration_test.json', - 'conditional_switch_task_workflow_integration_test.json') - } - - def "Test simple switch workflow"() { - given: "Workflow an input of a workflow with switch task" - Map input = new HashMap() - input['param1'] = 'p1' - input['param2'] = 'p2' - input['case'] = 'c' - - when: "A switch workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(SWITCH_WF, 1, - 'switch_workflow', input, - null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'SWITCH' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_1' is polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - - and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'SWITCH' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[2].taskType == 'integration_task_2' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_20' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_20' is polled and completed" - def polledAndCompletedTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask20Try1) - - and: "verify that the 'integration_task_20' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[3].taskType == 'integration_task_20' - tasks[3].status == Task.Status.COMPLETED - } - } - - def "Test a workflow that has a switch task that leads to a fork join"() { - given: "Workflow an input of a workflow with switch task" - Map input = new HashMap() - input['param1'] = 'p1' - input['param2'] = 'p2' - input['case'] = 'c' - - when: "A switch workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(FORK_JOIN_SWITCH_WF, 1, - 'switch_forkjoin', input, - null, null, null) - - then: "verify that the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 5 - tasks[0].taskType == 'FORK' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'SWITCH' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.SCHEDULED - tasks[4].taskType == 'JOIN' - tasks[4].status == Task.Status.IN_PROGRESS - } - - when: "the tasks 'integration_task_1' and 'integration_task_10' are polled and completed" - def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - def polledAndCompletedTask10Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_10', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) - verifyPolledAndAcknowledgedTask(polledAndCompletedTask10Try1) - - and: "verify that the 'integration_task_1' and 'integration_task_10' are COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 6 - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - tasks[3].taskType == 'integration_task_10' - tasks[3].status == Task.Status.COMPLETED - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_2' is polled and completed" - def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 7 - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.IN_PROGRESS - tasks[5].taskType == 'integration_task_2' - tasks[5].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_20' - tasks[6].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_20' is polled and completed" - def polledAndCompletedTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - - then: "verify that the task is completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask20Try1) - - and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 7 - tasks[4].taskType == 'JOIN' - tasks[4].inputData['joinOn'] == ['t20', 't10'] - tasks[4].status == Task.Status.COMPLETED - tasks[6].taskType == 'integration_task_20' - tasks[6].status == Task.Status.COMPLETED - } - } - - def "Test default case condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the default case is executed" - Map input = new HashMap() - input['param1'] = 'xxx' - input['param2'] = 'two' - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - 'conditional_default', input, - null, null, null) - - then: "verify that the workflow is running and the default condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'SWITCH' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['evaluationResult'] == ['xxx'] - tasks[1].taskType == 'integration_task_10' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_10' is polled and completed" - def polledAndCompletedTask10Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_10', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask10Try1) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[1].taskType == 'integration_task_10' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SWITCH' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['evaluationResult'] == ['null'] - } - } - - @Unroll - def "Test case 'nested' and '#caseValue' condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the 'nested' and '#caseValue' switch tree is executed" - Map input = new HashMap() - input['param1'] = 'nested' - input['param2'] = caseValue - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - workflowCorrelationId, input, - null, null, null) - - then: "verify that the workflow is running and the 'nested' and '#caseValue' condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[0].taskType == 'SWITCH' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['evaluationResult'] == ['nested'] - tasks[1].taskType == 'SWITCH' - tasks[1].status == Task.Status.COMPLETED - tasks[1].outputData['evaluationResult'] == [caseValue] - tasks[2].taskType == expectedTaskName - tasks[2].status == Task.Status.SCHEDULED - } - - when: "the task '#expectedTaskName' is polled and completed" - def polledAndCompletedTaskTry1 = workflowTestUtil.pollAndCompleteTask(expectedTaskName, 'task.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTaskTry1) - - and: - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[2].taskType == expectedTaskName - tasks[2].status == endTaskStatus - tasks[3].taskType == 'SWITCH' - tasks[3].status == Task.Status.COMPLETED - tasks[3].outputData['evaluationResult'] == ['null'] - } - - where: - caseValue | expectedTaskName | workflowCorrelationId || endTaskStatus - 'two' | 'integration_task_2' | 'conditional_nested_two' || Task.Status.COMPLETED - 'one' | 'integration_task_1' | 'conditional_nested_one' || Task.Status.COMPLETED - } - - def "Test 'three' case condition execution of a conditional workflow"() { - given: "input for a workflow to ensure that the default case is executed" - Map input = new HashMap() - input['param1'] = 'three' - input['param2'] = 'two' - input['finalCase'] = 'notify' - - when: "A conditional workflow is started with the workflow input" - def workflowInstanceId = workflowExecutor.startWorkflow(COND_TASK_WF, 1, - 'conditional_three', input, - null, null, null) - - then: "verify that the workflow is running and the 'three' condition case was executed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'SWITCH' - tasks[0].status == Task.Status.COMPLETED - tasks[0].outputData['evaluationResult'] == ['three'] - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_3' is polled and completed" - def polledAndCompletedTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask3Try1) - - and: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 4 - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SWITCH' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['evaluationResult'] == ['notify'] - tasks[3].taskType == 'integration_task_4' - tasks[3].status == Task.Status.SCHEDULED - } - - when: "the task 'integration_task_4' is polled and completed" - def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task1.integration.worker') - - then: "verify that the tasks are completed and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 4 - tasks[1].taskType == 'integration_task_3' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'SWITCH' - tasks[2].status == Task.Status.COMPLETED - tasks[2].outputData['evaluationResult'] == ['notify'] - tasks[3].taskType == 'integration_task_4' - tasks[3].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy deleted file mode 100644 index 5e6c0869e..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification -import com.netflix.conductor.test.utils.UserTask - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class SystemTaskSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - UserTask userTask - - @Shared - def ASYNC_COMPLETE_SYSTEM_TASK_WORKFLOW = 'async_complete_integration_test_wf' - - def setup() { - workflowTestUtil.registerWorkflows('simple_workflow_with_async_complete_system_task_integration_test.json') - } - - def "Test system task with asyncComplete set to true"() { - - given: "An existing workflow definition with async complete system task" - metadataService.getWorkflowDef(ASYNC_COMPLETE_SYSTEM_TASK_WORKFLOW, 1) - - and: "input required to start the workflow" - String correlationId = 'async_complete_test' + UUID.randomUUID() - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - when: "the workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(ASYNC_COMPLETE_SYSTEM_TASK_WORKFLOW, 1, - correlationId, input, null, null, null) - - then: "ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "poll and complete the integration_task_1 task" - def pollAndCompleteTask = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask) - - and: "verify that the 'integration_task1' is complete and the next task is in SCHEDULED state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'USER_TASK' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "the system task is started by issuing a system task call" - List polledTaskIds = queueDAO.pop("USER_TASK", 1, 200) - asyncSystemTaskExecutor.execute(userTask, polledTaskIds[0]) - - then: "verify that the system task is in IN_PROGRESS state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == "USER_TASK" - tasks[1].status == Task.Status.IN_PROGRESS - } - - when: "sweeper evaluates the workflow" - sweep(workflowInstanceId) - - then: "workflow state is unchanged" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == "USER_TASK" - tasks[1].status == Task.Status.IN_PROGRESS - } - - when: "result of the user task is curated" - Task task = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).getTaskByRefName('user_task') - def taskResult = new TaskResult(task) - taskResult.status = TaskResult.Status.COMPLETED - taskResult.outputData['op'] = 'user.task.done' - - and: "external signal is simulated with this output to complete the system task" - workflowExecutor.updateTask(taskResult) - - then: "ensure that the system task is COMPLETED and workflow is COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'USER_TASK' - tasks[1].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy deleted file mode 100644 index 3b78c392f..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification -import com.netflix.conductor.test.utils.UserTask - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class TaskLimitsWorkflowSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Autowired - UserTask userTask - - def RATE_LIMITED_SYSTEM_TASK_WORKFLOW = 'test_rate_limit_system_task_workflow' - def RATE_LIMITED_SIMPLE_TASK_WORKFLOW = 'test_rate_limit_simple_task_workflow' - def CONCURRENCY_EXECUTION_LIMITED_WORKFLOW = 'test_concurrency_limits_workflow' - - def setup() { - workflowTestUtil.registerWorkflows( - 'rate_limited_system_task_workflow_integration_test.json', - 'rate_limited_simple_task_workflow_integration_test.json', - 'concurrency_limited_task_workflow_integration_test.json' - ) - } - - def "Verify that the rate limiting for system tasks is honored"() { - when: "Start a workflow that has a rate limited system task in it" - def workflowInstanceId = workflowExecutor.startWorkflow(RATE_LIMITED_SYSTEM_TASK_WORKFLOW, 1, - '', [:], null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'USER_TASK' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Execute the user task" - def scheduledTask1 = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0] - asyncSystemTaskExecutor.execute(userTask, scheduledTask1.taskId) - - then: "Verify the state of the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'USER_TASK' - tasks[0].status == Task.Status.COMPLETED - } - - when: "A new instance of the workflow is started" - def workflowTwoInstanceId = workflowExecutor.startWorkflow(RATE_LIMITED_SYSTEM_TASK_WORKFLOW, 1, - '', [:], null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'USER_TASK' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Execute the user task on the second workflow" - def scheduledTask2 = workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true).tasks[0] - asyncSystemTaskExecutor.execute(userTask, scheduledTask2.taskId) - - then: "Verify the state of the workflow is still in running state" - with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'USER_TASK' - tasks[0].status == Task.Status.SCHEDULED - } - } - - def "Verify that the rate limiting for simple tasks is honored"() { - when: "Start a workflow that has a rate limited simple task in it" - def workflowInstanceId = workflowExecutor.startWorkflow(RATE_LIMITED_SIMPLE_TASK_WORKFLOW, 1, '', [:], null, - null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'test_simple_task_with_rateLimits' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "polling and completing the task" - Tuple polledAndCompletedTask = workflowTestUtil.pollAndCompleteTask('test_simple_task_with_rateLimits', 'rate.limit.test.worker') - - then: "verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask) - - and: "the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'test_simple_task_with_rateLimits' - tasks[0].status == Task.Status.COMPLETED - } - - when: "A new instance of the workflow is started" - def workflowTwoInstanceId = workflowExecutor.startWorkflow(RATE_LIMITED_SIMPLE_TASK_WORKFLOW, 1, - '', [:], null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'test_simple_task_with_rateLimits' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "polling for the task" - def polledTask = workflowExecutionService.poll('test_simple_task_with_rateLimits', 'rate.limit.test.worker') - - then: "verify that no task is returned" - !polledTask - - when: "sleep for 10 seconds to ensure rate limit duration is past" - Thread.sleep(10000L) - - and: "the task offset time is reset to ensure that a task is returned on the next poll" - queueDAO.resetOffsetTime('test_simple_task_with_rateLimits', - workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true).tasks[0].taskId) - - and: "polling and completing the task" - polledAndCompletedTask = workflowTestUtil.pollAndCompleteTask('test_simple_task_with_rateLimits', 'rate.limit.test.worker') - - then: "verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndCompletedTask) - - and: "the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'test_simple_task_with_rateLimits' - tasks[0].status == Task.Status.COMPLETED - } - } - - def "Verify that concurrency limited tasks are honored during workflow execution"() { - when: "Start a workflow that has a concurrency execution limited task in it" - def workflowInstanceId = workflowExecutor.startWorkflow(CONCURRENCY_EXECUTION_LIMITED_WORKFLOW, 1, - '', [:], null, null, null) - - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'test_task_with_concurrency_limit' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The task is polled and acknowledged" - def polledTask1 = workflowExecutionService.poll('test_task_with_concurrency_limit', 'test_task_worker') - - then: "Verify that the task was polled and acknowledged" - polledTask1.taskType == 'test_task_with_concurrency_limit' - polledTask1.workflowInstanceId == workflowInstanceId - - when: "A additional workflow that has a concurrency execution limited task in it" - def workflowTwoInstanceId = workflowExecutor.startWorkflow(CONCURRENCY_EXECUTION_LIMITED_WORKFLOW, 1, - '', [:], null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'test_task_with_concurrency_limit' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The task is polled" - def polledTaskTry1 = workflowExecutionService.poll('test_task_with_concurrency_limit', 'test_task_worker') - - then: "Verify that there is no task returned" - !polledTaskTry1 - - when: "The task that was polled and acknowledged is completed" - polledTask1.status = Task.Status.COMPLETED - workflowExecutionService.updateTask(new TaskResult(polledTask1)) - - and: "The task offset time is reset to ensure that a task is returned on the next poll" - queueDAO.resetOffsetTime('test_task_with_concurrency_limit', - workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true).tasks[0].taskId) - - then: "Verify that the first workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 1 - tasks[0].taskType == 'test_task_with_concurrency_limit' - tasks[0].status == Task.Status.COMPLETED - } - - and: "The task is polled again and acknowledged" - def polledTaskTry2 = workflowExecutionService.poll('test_task_with_concurrency_limit', 'test_task_worker') - - then: "Verify that the task is returned since there are no tasks in progress" - polledTaskTry2.taskType == 'test_task_with_concurrency_limit' - polledTaskTry2.workflowInstanceId == workflowTwoInstanceId - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy deleted file mode 100644 index 4f34fb01b..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class WaitTaskSpec extends AbstractSpecification { - - @Shared - def WAIT_BASED_WORKFLOW = 'test_wait_workflow' - - def setup() { - workflowTestUtil.registerWorkflows('wait_workflow_integration_test.json') - } - - def "Verify that a wait based simple workflow is executed"() { - when: "Start a wait task based workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(WAIT_BASED_WORKFLOW, 1, - '', [:], null, null, null) - - then: "Retrieve the workflow" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == TaskType.WAIT.name() - tasks[0].status == Task.Status.IN_PROGRESS - } - - when: "The wait task is completed" - def waitTask = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0] - waitTask.status = Task.Status.COMPLETED - workflowExecutor.updateTask(new TaskResult(waitTask)) - - then: "ensure that the wait task is completed and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == TaskType.WAIT.name() - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "The integration_task_1 is polled and completed" - def polledAndCompletedTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "verify that the task was polled and completed and the workflow is in a complete state" - verifyPolledAndAcknowledgedTask(polledAndCompletedTry1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy deleted file mode 100644 index 9725d52b7..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy +++ /dev/null @@ -1,985 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.tasks.TaskType -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.common.metadata.workflow.WorkflowTask -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.utils.Utils -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.test.base.AbstractSpecification - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class WorkflowAndTaskConfigurationSpec extends AbstractSpecification { - - @Autowired - QueueDAO queueDAO - - @Shared - def LINEAR_WORKFLOW_T1_T2 = 'integration_test_wf' - - @Shared - def TEMPLATED_LINEAR_WORKFLOW = 'integration_test_template_wf' - - @Shared - def WORKFLOW_WITH_OPTIONAL_TASK = 'optional_task_wf' - - @Shared - def TEST_WORKFLOW = 'integration_test_wf3' - - @Shared - def WAIT_TIME_OUT_WORKFLOW = 'test_wait_timeout' - - def setup() { - //Register LINEAR_WORKFLOW_T1_T2, TEST_WORKFLOW, RTOWF, WORKFLOW_WITH_OPTIONAL_TASK - workflowTestUtil.registerWorkflows( - 'simple_workflow_1_integration_test.json', - 'simple_workflow_1_input_template_integration_test.json', - 'simple_workflow_3_integration_test.json', - 'simple_workflow_with_optional_task_integration_test.json', - 'simple_wait_task_workflow_integration_test.json') - } - - def "Test simple workflow which has an optional task"() { - - given: "A input parameters for a workflow with an optional task" - def correlationId = 'integration_test' + UUID.randomUUID().toString() - def workflowInput = new HashMap() - workflowInput['param1'] = 'p1 value' - workflowInput['param2'] = 'p2 value' - - when: "An optional task workflow is started" - def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_OPTIONAL_TASK, 1, - correlationId, workflowInput, - null, null, null) - - then: "verify that the workflow has started and the optional task is in a scheduled state" - workflowInstanceId - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].status == Task.Status.SCHEDULED - tasks[0].taskType == 'task_optional' - } - - when: "The first optional task is polled and failed" - Tuple polledAndFailedTaskTry1 = workflowTestUtil.pollAndFailTask('task_optional', - 'task1.integration.worker', 'NETWORK ERROR') - - then: "Verify that the task_optional was polled and acknowledged" - verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry1) - - when: "A decide is executed on the workflow" - workflowExecutor.decide(workflowInstanceId) - - then: "verify that the workflow is still running and the first optional task has failed and the retry has kicked in" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].status == Task.Status.FAILED - tasks[0].taskType == 'task_optional' - tasks[1].status == Task.Status.SCHEDULED - tasks[1].taskType == 'task_optional' - } - - when: "Poll the optional task again and do not complete it and run decide" - workflowExecutionService.poll('task_optional', 'task1.integration.worker') - Thread.sleep(5000) - workflowExecutor.decide(workflowInstanceId) - - then: "Ensure that the workflow is updated" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].status == Task.Status.COMPLETED_WITH_ERRORS - tasks[1].taskType == 'task_optional' - tasks[2].status == Task.Status.SCHEDULED - tasks[2].taskType == 'integration_task_2' - } - - when: "The second task 'integration_task_2' is polled and completed" - def task2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "Verify that the task was polled and acknowledged" - verifyPolledAndAcknowledgedTask(task2Try1) - - and: "Ensure that the workflow is in completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[2].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_2' - } - } - - def "test workflow with input template parsing"() { - given: "Input parameters for a workflow with input template" - def correlationId = 'integration_test' + UUID.randomUUID().toString() - def workflowInput = new HashMap() - // leave other params blank on purpose to test input templates - workflowInput['param3'] = 'external string' - - when: "Is executed and completes" - def workflowInstanceId = workflowExecutor.startWorkflow(TEMPLATED_LINEAR_WORKFLOW, 1, - correlationId, workflowInput, - null, null, null) - workflowExecutor.decide(workflowInstanceId) - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "Verify that input template is processed" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - output == [ - output: "task1.done", - param3: 'external string', - param2: ['list', 'of', 'strings'], - param1: [nested_object: [nested_key: "nested_value"]] - ] - } - } - - def "Test simple workflow with task time out configuration"() { - - setup: "Register a task definition with retry policy on time out" - def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() - def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - persistedTask1Definition.ownerEmail, 1, 1, 1) - modifiedTaskDefinition.retryDelaySeconds = 0 - modifiedTaskDefinition.timeoutPolicy = TaskDef.TimeoutPolicy.RETRY - metadataService.updateTaskDef(modifiedTaskDefinition) - - when: "A simple workflow is started that has a task with time out and retry configured" - String correlationId = 'unit_test_1' + UUID.randomUUID() - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - input['failureWfName'] = 'FanInOutTest' - - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - and: "The decider queue has one task that is ready to be polled" - queueDAO.getSize(Utils.DECIDER_QUEUE) == 1 - - when: "The the first task 'integration_task_1' is polled and acknowledged" - def task1Try1 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that a task was polled" - task1Try1 - task1Try1.workflowInstanceId == workflowInstanceId - - and: "Ensure that the decider size queue is 1 to to enable the evaluation" - queueDAO.getSize(Utils.DECIDER_QUEUE) == 1 - - when: "There is a delay of 3 seconds introduced and the workflow is sweeped to run the evaluation" - Thread.sleep(3000) - sweep(workflowInstanceId) - - then: "Ensure that the first task has been TIMED OUT and the next task is SCHEDULED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.TIMED_OUT - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Poll for the task again and acknowledge" - def task1Try2 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that a task was polled" - task1Try2 - task1Try2.workflowInstanceId == workflowInstanceId - - when: "There is a delay of 3 seconds introduced and the workflow is swept to run the evaluation" - Thread.sleep(3000) - sweep(workflowInstanceId) - - then: "Ensure that the first task has been TIMED OUT and the next task is SCHEDULED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TIMED_OUT - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.TIMED_OUT - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.TIMED_OUT - } - - cleanup: "Ensure that the changes of the 'integration_task_1' are reverted" - metadataService.updateTaskDef(persistedTask1Definition) - } - - def "Test workflow timeout configurations"() { - setup: "Get the workflow definition and change the workflow configuration" - def testWorkflowDefinition = metadataService.getWorkflowDef(TEST_WORKFLOW, 1) - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.TIME_OUT_WF - testWorkflowDefinition.timeoutSeconds = 5 - metadataService.updateWorkflowDef(testWorkflowDefinition) - - when: "A simple workflow is started that has a workflow timeout configured" - String correlationId = 'unit_test_3' + UUID.randomUUID() - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - input['failureWfName'] = 'FanInOutTest' - - def workflowInstanceId = workflowExecutor.startWorkflow(TEST_WORKFLOW, 1, - correlationId, input, - null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The the first task 'integration_task_1' is polled and acknowledged" - def task1Try1 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that a task was polled" - task1Try1 - task1Try1.workflowInstanceId == workflowInstanceId - - when: "There is a delay of 6 seconds introduced and the workflow is swept to run the evaluation" - Thread.sleep(6000) - sweep(workflowInstanceId) - - then: "Ensure that the workflow has timed out" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TIMED_OUT - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - - cleanup: "Ensure that the workflow configuration changes are reverted" - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.ALERT_ONLY - testWorkflowDefinition.timeoutSeconds = 0 - metadataService.updateWorkflowDef(testWorkflowDefinition) - } - - def "Test retrying a timed out workflow due to workflow timeout"() { - setup: "Get the workflow definition and change the workflow configuration" - def testWorkflowDefinition = metadataService.getWorkflowDef(TEST_WORKFLOW, 1) - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.TIME_OUT_WF - testWorkflowDefinition.timeoutSeconds = 5 - metadataService.updateWorkflowDef(testWorkflowDefinition) - - when: "A simple workflow is started that has a workflow timeout configured" - String correlationId = 'retry_timeout_wf' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(TEST_WORKFLOW, 1, - correlationId, input, null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The the first task 'integration_task_1' is polled and acknowledged" - def task1Try1 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that a task was polled" - task1Try1 - task1Try1.workflowInstanceId == workflowInstanceId - - when: "There is a delay of 6 seconds introduced and the workflow is swept to run the evaluation" - Thread.sleep(6000) - sweep(workflowInstanceId) - - then: "Ensure that the workflow has timed out" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TIMED_OUT - lastRetriedTime == 0 - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - - when: "Retrying the workflow" - workflowExecutor.retry(workflowInstanceId, false) - - then: "Ensure that the workflow is RUNNING and task is retried" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - lastRetriedTime != 0 - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - cleanup: "Ensure that the workflow configuration changes are reverted" - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.ALERT_ONLY - testWorkflowDefinition.timeoutSeconds = 0 - metadataService.updateWorkflowDef(testWorkflowDefinition) - } - - def "Test retrying a timed out workflow due to workflow timeout without unsuccessful tasks"() { - setup: "Get the workflow definition and change the workflow configuration" - def testWorkflowDefinition = metadataService.getWorkflowDef(TEST_WORKFLOW, 1) - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.TIME_OUT_WF - testWorkflowDefinition.timeoutSeconds = 5 - metadataService.updateWorkflowDef(testWorkflowDefinition) - - when: "A simple workflow is started that has a workflow timeout configured" - String correlationId = 'retry_timeout_wf' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(TEST_WORKFLOW, 1, - correlationId, input, null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The the first task 'integration_task_1' is polled and acknowledged" - def task1 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that a task was polled" - task1 - task1.workflowInstanceId == workflowInstanceId - - when: "There is a delay of 6 seconds introduced and the task is completed" - Thread.sleep(6000) - task1.status = Task.Status.COMPLETED - workflowExecutor.updateTask(new TaskResult(task1)) - - then: "verify that the workflow is TIMED_OUT and the task is COMPLETED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TIMED_OUT - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - } - - when: "Retrying the workflow" - workflowExecutor.retry(workflowInstanceId, false) - sweep(workflowInstanceId) - - then: "Ensure that the workflow is RUNNING and next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - lastRetriedTime != 0 - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - cleanup: "Ensure that the workflow configuration changes are reverted" - testWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.ALERT_ONLY - testWorkflowDefinition.timeoutSeconds = 0 - metadataService.updateWorkflowDef(testWorkflowDefinition) - } - - def "Test re-running the simple workflow multiple times after completion"() { - - given: "input required to start the workflow execution" - String correlationId = 'unit_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - when: "Start a workflow based on the registered simple workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Poll and complete the 'integration_task_1' " - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete 'integration_task_2'" - def pollAndCompleteTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the 'integration_task_2' has been polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try1, ['tp1': inputParam1, 'tp2': 'task1.done']) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - output.containsKey('o3') - } - - when: "The completed workflow is re run after integration_task_1" - def reRunWorkflowRequest1 = new RerunWorkflowRequest() - reRunWorkflowRequest1.reRunFromWorkflowId = workflowInstanceId - def reRunTaskId = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[1].taskId - reRunWorkflowRequest1.reRunFromTaskId = reRunTaskId - def reRun1WorkflowInstanceId = workflowExecutor.rerun(reRunWorkflowRequest1) - - then: "Verify that the workflow is in running state and has started the re run after task 1" - with(workflowExecutionService.getExecutionStatus(reRun1WorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete 'integration_task_2'" - def pollAndCompleteReRunTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the 'integration_task_2' has been polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteReRunTask2Try1, ['tp1': inputParam1, 'tp2': 'task1.done']) - - and: "verify that the re run workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(reRun1WorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - output.containsKey('o3') - } - - when: "The completed workflow is re run" - def reRunWorkflowRequest2 = new RerunWorkflowRequest() - reRunWorkflowRequest2.reRunFromWorkflowId = workflowInstanceId - def reRun2WorkflowInstanceId = workflowExecutor.rerun(reRunWorkflowRequest2) - - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(reRun2WorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Poll and complete the 'integration_task_1' " - def pollAndCompleteReRun2Task1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteReRun2Task1Try1) - - and: "verify that the 'integration_task1' is complete and the next task is scheduled" - with(workflowExecutionService.getExecutionStatus(reRun2WorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete 'integration_task_2'" - def pollAndCompleteReRun2Task2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the 'integration_task_2' has been polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteReRun2Task2Try1, ['tp1': inputParam1, 'tp2': 'task1.done']) - - and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(reRun2WorkflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - output.containsKey('o3') - } - } - - def "Test task skipping in simple workflows"() { - - when: "A simple workflow is started" - String correlationId = 'unit_test_3' + UUID.randomUUID() - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - def workflowInstanceId = workflowExecutor.startWorkflow(TEST_WORKFLOW, 1, - correlationId, input, - null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The second task in the workflow is skipped" - workflowExecutor.skipTaskFromWorkflow(workflowInstanceId, 't2', null) - - then: "Ensure that the second task in the workflow is skipped and the first one is still in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_2' - tasks[0].status == Task.Status.SKIPPED - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "Poll and complete the 'integration_task_1' " - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "Ensure that the third task is scheduled and the first one is in complete state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'integration_task_1' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_3' - tasks[2].status == Task.Status.SCHEDULED - } - - when: "Poll and complete the 'integration_task_3' " - def pollAndCompleteTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3.integration.worker') - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask3Try1) - - and: "verify that the workflow is in a complete state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[2].taskType == 'integration_task_3' - tasks[2].status == Task.Status.COMPLETED - } - } - - def "Test pause and resume simple workflow"() { - - given: "input required to start the workflow execution" - String correlationId = 'unit_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - when: "Start a workflow based on the registered simple workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "verify that the workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The running workflow is paused" - workflowExecutor.pauseWorkflow(workflowInstanceId) - - and: "Poll and complete the 'integration_task_1' " - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the workflow is in PAUSED state and the next task is not scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.PAUSED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - } - - when: "The next task in the workflow is polled for" - def task2Try1 = workflowExecutionService.poll('integration_task_2', 'task2.integration.worker') - - then: "verify that there was no task polled" - !task2Try1 - - when: "A decide is run explicitly" - workflowExecutor.decide(workflowInstanceId) - - and: "The next task is polled again" - def task2Try2 = workflowExecutionService.poll('integration_task_2', 'task2.integration.worker') - - then: "verify that there was no task polled" - !task2Try2 - - when: "The workflow is resumed" - workflowExecutor.resumeWorkflow(workflowInstanceId) - - then: "verify that the workflow was resumed and the next task is in a scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll and complete 'integration_task_2'" - def pollAndCompleteTask2Try3 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the 'integration_task_2' has been polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try3, ['tp1': inputParam1, 'tp2': 'task1.done']) - - and: "verify that the re run workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - output.containsKey('o3') - } - } - - def "Test wait time out task based simple workflow"() { - when: "Start a workflow based on a task that has a registered wait time out" - def workflowInstanceId = workflowExecutor.startWorkflow(WAIT_TIME_OUT_WORKFLOW, 1, - '', [:], null, null, null) - - then: "verify that the workflow is running and the first task scheduled" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'WAIT' - tasks[0].status == Task.Status.IN_PROGRESS - } - - when: "A delay is introduced" - Thread.sleep(3000) - - and: "A decide is executed on the workflow" - workflowExecutor.decide(workflowInstanceId) - - then: "verify that the workflow is in running state and a replacement task has been scheduled due to time out" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'WAIT' - tasks[0].status == Task.Status.TIMED_OUT - tasks[1].taskType == 'WAIT' - tasks[1].status == Task.Status.IN_PROGRESS - } - - when: "The wait task is completed" - def waitTask = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[1] - waitTask.status = Task.Status.COMPLETED - workflowExecutor.updateTask(new TaskResult(waitTask)) - - and: "verify that the workflow is in running state and the next task is scheduled and 'waitTimeout' task is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 3 - tasks[1].taskType == 'WAIT' - tasks[1].status == Task.Status.COMPLETED - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.SCHEDULED - } - - and: "Poll and complete the 'integration_task_1' " - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "The workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 3 - tasks[2].taskType == 'integration_task_1' - tasks[2].status == Task.Status.COMPLETED - } - } - - def "Test simple workflow with callbackAfterSeconds for tasks"() { - - given: "input required to start the workflow execution" - String correlationId = 'unit_test_1' - def input = new HashMap() - String inputParam1 = 'p1 value' - input['param1'] = inputParam1 - input['param2'] = 'p2 value' - - when: "Start a workflow based on the registered simple workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, - correlationId, input, - null, null, null) - - then: "Ensure that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The first task is polled and then a callbackAfterSeconds is added to the task" - def task1Try1 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - task1Try1.status = Task.Status.IN_PROGRESS - task1Try1.callbackAfterSeconds = 2L - workflowExecutionService.updateTask(new TaskResult(task1Try1)) - - then: "verify that the workflow is in running state and the task is in SCHEDULED" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "the 'integration_task_1' is polled again" - def task1Try2 = workflowExecutionService.poll('integration_task_1', 'task1.worker') - - then: "Ensure that there was no task polled due to the callBackAfterSeconds" - !task1Try2 - - then: "verify that the workflow is in running state and the task is in progress" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "There is a delay introduced to go over the callbackAfterSeconds interval" - Thread.sleep(2050) - - and: "the 'integration_task_1' is polled and completed" - def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) - - and: "verify that the workflow has moved forward and 'integration_task_1 is completed'" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "The second task is polled and then a callbackAfterSeconds is added to the task" - def task2Try1 = workflowExecutionService.poll('integration_task_2', 'task2.worker') - task2Try1.status = Task.Status.IN_PROGRESS - task2Try1.callbackAfterSeconds = 5L - workflowExecutionService.updateTask(new TaskResult(task2Try1)) - - then: "Verify that the workflow is in running state and the task is in scheduled state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "poll for 'integration_task_2'" - def task2Try2 = workflowExecutionService.poll('integration_task_2', 'task2.worker') - - then: "Ensure that there was no task polled due to the callBackAfterSeconds, even though the task is in scheduled state" - !task2Try2 - - when: "A delay is introduced to get over the callBackAfterSeconds interval" - Thread.sleep(5100) - - and: "the 'integration_task_2' is polled and completed" - def pollAndCompleteTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') - - then: "verify that the 'integration_task_1' was polled and acknowledged" - verifyPolledAndAcknowledgedTask(pollAndCompleteTask2Try1) - - and: "verify that the workflow has moved forward and 'integration_task_1 is completed'" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - } - - def "Test workflow with no tasks"() { - setup: "Create a workflow definition with no tasks" - WorkflowDef emptyWorkflowDef = new WorkflowDef() - emptyWorkflowDef.setName("empty_workflow") - emptyWorkflowDef.setSchemaVersion(2) - - when: "a workflow is started with this definition" - def input = new HashMap() - def correlationId = 'empty_workflow' - def workflowInstanceId = workflowExecutor.startWorkflow(emptyWorkflowDef, input, null, correlationId, null, null) - - then: "the workflow is completed" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 0 - } - } - - def "Test task def template"() { - setup: "Register a task definition with input template" - TaskDef templatedTask = new TaskDef() - templatedTask.setName('templated_task') - def httpRequest = new HashMap<>() - httpRequest['method'] = 'GET' - httpRequest['vipStack'] = '${STACK2}' - httpRequest['uri'] = '/get/something' - def body = new HashMap<>() - body['inputPaths'] = Arrays.asList('${workflow.input.path1}', '${workflow.input.path2}') - body['requestDetails'] = '${workflow.input.requestDetails}' - body['outputPath'] = '${workflow.input.outputPath}' - httpRequest['body'] = body - templatedTask.inputTemplate['http_request'] = httpRequest - templatedTask.ownerEmail = "test@harness.com" - metadataService.registerTaskDef(Arrays.asList(templatedTask)) - - and: "set a system property for STACK2" - System.setProperty('STACK2', 'test_stack') - - and: "a workflow definition using this task is created" - WorkflowTask workflowTask = new WorkflowTask() - workflowTask.setName(templatedTask.getName()) - workflowTask.setWorkflowTaskType(TaskType.SIMPLE) - workflowTask.setTaskReferenceName("t0") - - WorkflowDef templateWorkflowDef = new WorkflowDef() - templateWorkflowDef.setName("template_workflow") - templateWorkflowDef.getTasks().add(workflowTask) - templateWorkflowDef.setSchemaVersion(2) - templateWorkflowDef.setOwnerEmail("test@harness.com") - metadataService.registerWorkflowDef(templateWorkflowDef) - - and: "the input to the workflow is curated" - def requestDetails = new HashMap<>() - requestDetails['key1'] = 'value1' - requestDetails['key2'] = 42 - - Map input = new HashMap<>() - input['path1'] = 'file://path1' - input['path2'] = 'file://path2' - input['outputPath'] = 's3://bucket/outputPath' - input['requestDetails'] = requestDetails - - when: "the workflow is started" - def correlationId = 'workflow_taskdef_template' - def workflowInstanceId = workflowExecutor.startWorkflow(templateWorkflowDef, input, null, correlationId, null, null) - - then: "the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].inputData.get('http_request') instanceof Map - tasks[0].inputData.get('http_request')['method'] == 'GET' - tasks[0].inputData.get('http_request')['vipStack'] == 'test_stack' - tasks[0].inputData.get('http_request')['body'] instanceof Map - tasks[0].inputData.get('http_request')['body']['requestDetails'] instanceof Map - tasks[0].inputData.get('http_request')['body']['requestDetails']['key1'] == 'value1' - tasks[0].inputData.get('http_request')['body']['requestDetails']['key2'] == 42 - tasks[0].inputData.get('http_request')['body']['outputPath'] == 's3://bucket/outputPath' - tasks[0].inputData.get('http_request')['body']['inputPaths'] instanceof List - tasks[0].inputData.get('http_request')['body']['inputPaths'][0] == 'file://path1' - tasks[0].inputData.get('http_request')['body']['inputPaths'][1] == 'file://path2' - tasks[0].inputData.get('http_request')['uri'] == '/get/something' - } - } - - def "Test task def created if not exist"() { - setup: "Register a workflow definition with task def not registered" - def taskDefName = "task_not_registered" - WorkflowTask workflowTask = new WorkflowTask() - workflowTask.setName(taskDefName) - workflowTask.setWorkflowTaskType(TaskType.SIMPLE) - workflowTask.setTaskReferenceName("t0") - - WorkflowDef testWorkflowDef = new WorkflowDef() - testWorkflowDef.setName("test_workflow") - testWorkflowDef.getTasks().add(workflowTask) - testWorkflowDef.setSchemaVersion(2) - testWorkflowDef.setOwnerEmail("test@harness.com") - metadataService.registerWorkflowDef(testWorkflowDef) - - when: "the workflow is started" - def correlationId = 'workflow_taskdef_not_registered' - def workflowInstanceId = workflowExecutor.startWorkflow(testWorkflowDef, new HashMap(), null, correlationId, null, null) - - then: "the workflow is in running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskDefName == taskDefName - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy deleted file mode 100644 index 1a04b59b6..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy +++ /dev/null @@ -1,560 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.resiliency - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.http.HttpStatus - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.rest.controllers.TaskResource -import com.netflix.conductor.rest.controllers.WorkflowResource -import com.netflix.conductor.test.base.AbstractResiliencySpecification - -/** - * When QueueDAO is unavailable, - * Ensure All Worklow and Task resource endpoints either: - * 1. Fails and/or throws an Exception - * 2. Succeeds - * 3. Doesn't involve QueueDAO - */ -class QueueResiliencySpec extends AbstractResiliencySpecification { - - @Autowired - WorkflowResource workflowResource - - @Autowired - TaskResource taskResource - - def SIMPLE_TWO_TASK_WORKFLOW = 'integration_test_wf' - - def setup() { - workflowTestUtil.taskDefinitions() - workflowTestUtil.registerWorkflows( - 'simple_workflow_1_integration_test.json' - ) - } - - /// Workflow Resource endpoints - - def "Verify Start workflow fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def response = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - then: "Verify workflow starts when there are no Queue failures" - response - - when: "We try same request Queue failure" - response = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify that workflow start fails with BACKEND_ERROR" - 1 * queueDAO.push(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue push failed from Spy") } - thrown(ApplicationException) - } - - def "Verify terminate succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "We terminate it when QueueDAO is unavailable" - workflowResource.terminate(workflowInstanceId, "Terminated from a test") - - then: "Verify that terminate is successful without any exceptions" - 2 * queueDAO.remove(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue remove failed from Spy") } - 0 * queueDAO._ - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - } - - def "Verify Restart workflow fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - and: "We terminate it when QueueDAO is unavailable" - workflowResource.terminate(workflowInstanceId, "Terminated from a test") - - then: "Verify that workflow is in terminated state" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - - when: "We restart workflow when QueueDAO is unavailable" - workflowResource.restart(workflowInstanceId, false) - - then: "" - 1 * queueDAO.push(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue push failed from Spy") } - 1 * queueDAO.remove(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue remove failed from Spy") } - 0 * queueDAO._ - thrown(ApplicationException) - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 0 - } - } - - def "Verify rerun fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - and: "terminate it" - workflowResource.terminate(workflowInstanceId, "Terminated from a test") - - then: "Verify that workflow is in terminated state" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - - when: "Workflow is rerun when QueueDAO is unavailable" - def rerunWorkflowRequest = new RerunWorkflowRequest() - rerunWorkflowRequest.setReRunFromWorkflowId(workflowInstanceId) - workflowResource.rerun(workflowInstanceId, rerunWorkflowRequest) - - then: "" - 1 * queueDAO.push(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue push failed from Spy") } - 0 * queueDAO._ - thrown(ApplicationException) - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 0 - } - } - - def "Verify retry fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - and: "terminate it" - workflowResource.terminate(workflowInstanceId, "Terminated from a test") - - then: "Verify that workflow is in terminated state" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - - when: "workflow is restarted when QueueDAO is unavailable" - workflowResource.retry(workflowInstanceId, false) - - then: "Verify retry fails" - 1 * queueDAO.push(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue push failed from Spy") } - 0 * queueDAO._ - thrown(ApplicationException) - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.TERMINATED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.CANCELED - } - } - - def "Verify getWorkflow succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "We get a workflow when QueueDAO is unavailable" - def workflow = workflowResource.getExecutionStatus(workflowInstanceId, true) - - then: "Verify workflow is returned" - 0 * queueDAO._ - workflow.getStatus() == Workflow.WorkflowStatus.RUNNING - workflow.getTasks().size() == 1 - workflow.getTasks()[0].status == Task.Status.SCHEDULED - } - - def "Verify getWorkflows succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "We get a workflow when QueueDAO is unavailable" - def workflows = workflowResource.getWorkflows(SIMPLE_TWO_TASK_WORKFLOW, "", true, true) - - then: "Verify queueDAO is not involved and an exception is not thrown" - 0 * queueDAO._ - notThrown(Exception) - } - - def "Verify remove workflow succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - then: "Verify workflow is started" - - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "We get a workflow when QueueDAO is unavailable" - workflowResource.delete(workflowInstanceId, false) - - then: "Verify queueDAO is called to remove from _deciderQueue" - 1 * queueDAO._ - - when: "We try to get deleted workflow" - workflowResource.getExecutionStatus(workflowInstanceId, true) - - then: - thrown(ApplicationException) - } - - def "Verify decide succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "We decide a workflow" - workflowResource.decide(workflowInstanceId) - - then: "Verify queueDAO is not involved" - 0 * queueDAO._ - } - - def "Verify pause succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The workflow is paused when QueueDAO is unavailable" - workflowResource.pauseWorkflow(workflowInstanceId) - - then: "Verify workflow is paused without any exceptions" - 1 * queueDAO.remove(*_) >> { throw new IllegalStateException("Queue remove failed from Spy") } - 0 * queueDAO._ - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.PAUSED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - } - - def "Verify resume fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The workflow is paused" - workflowResource.pauseWorkflow(workflowInstanceId) - - then: "Verify workflow is paused" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.PAUSED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Workflow is resumed when QueueDAO is unavailable" - workflowResource.resumeWorkflow(workflowInstanceId) - - then: "exception is thrown" - 1 * queueDAO.push(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue push failed from Spy") } - thrown(ApplicationException) - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.PAUSED - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - } - - def "Verify reset callbacks fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "Task is updated with callBackAfterSeconds" - def workflow = workflowResource.getExecutionStatus(workflowInstanceId, true) - def task = workflow.getTasks().get(0) - def taskResult = new TaskResult(task) - taskResult.setCallbackAfterSeconds(120) - taskResource.updateTask(taskResult) - - and: "and then reset callbacks when QueueDAO is unavailable" - workflowResource.resetWorkflow(workflowInstanceId) - - then: "Verify an exception is thrown" - 1 * queueDAO.resetOffsetTime(*_) >> { throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, "Queue resetOffsetTime failed from Spy") } - thrown(ApplicationException) - } - - def "Verify search is not impacted by QueueDAO"() { - when: "We perform a search" - workflowResource.search(0, 1, "", "", "") - - then: "Verify it doesn't involve QueueDAO" - 0 * queueDAO._ - } - - def "Verify search workflows by tasks is not impacted by QueueDAO"() { - when: "We perform a search" - workflowResource.searchWorkflowsByTasks(0, 1, "", "", "") - - then: "Verify it doesn't involve QueueDAO" - 0 * queueDAO._ - } - - def "Verify get external storage location is not impacted by QueueDAO"() { - when: - workflowResource.getExternalStorageLocation("", "", "") - - then: "Verify it doesn't involve QueueDAO" - 0 * queueDAO._ - } - - - /// Task Resource endpoints - - def "Verify polls return with no result when QueueDAO is unavailable"() { - when: "Some task 'integration_task_1' is polled" - def responseEntity = taskResource.poll("integration_task_1", "test", "") - - then: - 1 * queueDAO.pop(*_) >> { throw new IllegalStateException("Queue pop failed from Spy") } - 0 * queueDAO._ - notThrown(Exception) - responseEntity && responseEntity.statusCode == HttpStatus.NO_CONTENT && !responseEntity.body - } - - def "Verify updateTask with COMPLETE status succeeds when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The first task 'integration_task_1' is polled" - def responseEntity = taskResource.poll("integration_task_1", "test", null) - - then: "Verify task is returned successfully" - responseEntity && responseEntity.statusCode == HttpStatus.OK && responseEntity.body - responseEntity.body.status == Task.Status.IN_PROGRESS - responseEntity.body.taskType == 'integration_task_1' - - when: "the above task is updated, while QueueDAO is unavailable" - def taskResult = new TaskResult(responseEntity.body) - taskResult.setStatus(TaskResult.Status.COMPLETED) - def result = taskResource.updateTask(taskResult) - - then: "updateTask returns successfully without any exceptions" - 1 * queueDAO.remove(*_) >> { throw new IllegalStateException("Queue remove failed from Spy") } - result == responseEntity.body.taskId - notThrown(Exception) - } - - def "Verify updateTask with IN_PROGRESS state fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - then: "Verify workflow is started" - with(workflowResource.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 1 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.SCHEDULED - } - - when: "The first task 'integration_task_1' is polled" - def responseEntity = taskResource.poll("integration_task_1", "test", null) - - then: "Verify task is returned successfully" - responseEntity && responseEntity.statusCode == HttpStatus.OK - responseEntity.body.status == Task.Status.IN_PROGRESS - responseEntity.body.taskType == 'integration_task_1' - - when: "the above task is updated, while QueueDAO is unavailable" - def taskResult = new TaskResult(responseEntity.body) - taskResult.setStatus(TaskResult.Status.IN_PROGRESS) - taskResult.setCallbackAfterSeconds(120) - def result = taskResource.updateTask(taskResult) - - then: "updateTask fails with an exception" - 1 * queueDAO.postpone(*_) >> { throw new IllegalStateException("Queue postpone failed from Spy") } - thrown(Exception) - } - - def "verify getTaskQueueSizes fails when QueueDAO is unavailable"() { - when: - taskResource.size(Arrays.asList("testTaskType", "testTaskType2")) - - then: - 1 * queueDAO.getSize(*_) >> { throw new IllegalStateException("Queue getSize failed from Spy") } - thrown(Exception) - } - - def "Verify log doesn't involve QueueDAO"() { - when: - taskResource.log("testTaskId", "test log") - - then: - 0 * queueDAO._ - } - - def "Verify getTaskLogs doesn't involve QueueDAO"() { - when: - taskResource.getTaskLogs("testTaskId") - - then: - 0 * queueDAO._ - } - - def "Verify getTask doesn't involve QueueDAO"() { - when: - taskResource.getTask("testTaskId") - - then: - 0 * queueDAO._ - } - - def "Verify getAllQueueDetails fails when QueueDAO is unavailable"() { - when: - taskResource.all() - - then: - 1 * queueDAO.queuesDetail() >> { throw new IllegalStateException("Queue queuesDetail failed from Spy") } - thrown(Exception) - } - - def "Verify getPollData doesn't involve QueueDAO"() { - when: - taskResource.getPollData("integration_test_1") - - then: - 0 * queueDAO.queuesDetail() - } - - def "Verify getAllPollData fails when QueueDAO is unavailable"() { - when: - taskResource.getAllPollData() - - then: - 1 * queueDAO.queuesDetail() >> { throw new IllegalStateException("Queue queuesDetail failed from Spy") } - thrown(Exception) - } - - def "Verify task search is not impacted by QueueDAO"() { - when: "We perform a search" - taskResource.search(0, 1, "", "", "") - - then: "Verify it doesn't involve QueueDAO" - 0 * queueDAO._ - } - - def "Verify task get external storage location is not impacted by QueueDAO"() { - when: - taskResource.getExternalStorageLocation("", "", "") - - then: "Verify it doesn't involve QueueDAO" - 0 * queueDAO._ - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy deleted file mode 100644 index fb5114849..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.resiliency - -import org.springframework.beans.factory.annotation.Autowired - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.reconciliation.WorkflowRepairService -import com.netflix.conductor.test.base.AbstractResiliencySpecification - -import spock.lang.Shared - -import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask - -class TaskResiliencySpec extends AbstractResiliencySpecification { - - @Autowired - WorkflowRepairService workflowRepairService - - @Shared - def SIMPLE_TWO_TASK_WORKFLOW = 'integration_test_wf' - - def setup() { - workflowTestUtil.taskDefinitions() - workflowTestUtil.registerWorkflows( - 'simple_workflow_1_integration_test.json' - ) - } - - def "Verify that a workflow recovers and completes on schedule task failure from queue push failure"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowExecutor.startWorkflow(SIMPLE_TWO_TASK_WORKFLOW, 1, - '', [:], null, null, null) - - then: "Retrieve the workflow" - def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - workflow.status == Workflow.WorkflowStatus.RUNNING - workflow.tasks.size() == 1 - workflow.tasks[0].taskType == 'integration_task_1' - workflow.tasks[0].status == Task.Status.SCHEDULED - def taskId = workflow.tasks[0].taskId - - // Simulate queue push failure when creating a new task, after completing first task - when: "The first task 'integration_task_1' is polled and completed" - def task1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - - then: "Verify that the task was polled and acknowledged" - 1 * queueDAO.pop(_, 1, _) >> Collections.singletonList(taskId) - 1 * queueDAO.ack(*_) >> true - 1 * queueDAO.push(*_) >> { throw new IllegalStateException("Queue push failed from Spy") } - verifyPolledAndAcknowledgedTask(task1Try1) - - and: "Ensure that the next task is SCHEDULED even after failing to push taskId message to queue" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - } - - when: "The second task 'integration_task_2' is polled for" - def task1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "Verify that the task was not polled, and the taskId doesn't exist in the queue" - task1Try2[0] == null - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.RUNNING - tasks.size() == 2 - tasks[0].taskType == 'integration_task_1' - tasks[0].status == Task.Status.COMPLETED - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.SCHEDULED - def currentTaskId = tasks[1].getTaskId() - !queueDAO.containsMessage("integration_task_2", currentTaskId) - } - - when: "Running a repair and decide on the workflow" - workflowRepairService.verifyAndRepairWorkflow(workflowInstanceId, true) - workflowExecutor.decide(workflowInstanceId) - workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - - then: "verify that the next scheduled task can be polled and executed successfully" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { - status == Workflow.WorkflowStatus.COMPLETED - tasks.size() == 2 - tasks[1].taskType == 'integration_task_2' - tasks[1].status == Task.Status.COMPLETED - } - } -} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy deleted file mode 100644 index 4ba25b476..000000000 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.util - -import javax.annotation.PostConstruct - -import org.apache.commons.lang3.StringUtils -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.stereotype.Component - -import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.workflow.WorkflowDef -import com.netflix.conductor.core.WorkflowContext -import com.netflix.conductor.core.exception.ApplicationException -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.model.WorkflowModel -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService - -import com.fasterxml.jackson.databind.ObjectMapper - -/** - * This is a helper class used to initialize task definitions required by the tests when loaded up. - * The task definitions that are loaded up in {@link WorkflowTestUtil#taskDefinitions()} method as part of the post construct of the bean. - * This class is intended to be used in the Spock integration tests and provides helper methods to: - *

      - *
    • Terminate all the running Workflows
    • - *
    • Get the persisted task definition based on the taskName
    • - *
    • pollAndFailTask
    • - *
    • pollAndCompleteTask
    • - *
    • verifyPolledAndAcknowledgedTask
    • - *
    - * - * Usage: Autowire this class in any Spock based specification: - * - * {@literal @}Autowired - * WorkflowTestUtil workflowTestUtil - * - */ -@Component -class WorkflowTestUtil { - - private final MetadataService metadataService - private final ExecutionService workflowExecutionService - private final WorkflowExecutor workflowExecutor - private final QueueDAO queueDAO - private final ObjectMapper objectMapper - private static final int RETRY_COUNT = 1 - private static final String TEMP_FILE_PATH = "/input.json" - private static final String DEFAULT_EMAIL_ADDRESS = "test@harness.com" - - @Autowired - WorkflowTestUtil(MetadataService metadataService, ExecutionService workflowExecutionService, - WorkflowExecutor workflowExecutor, QueueDAO queueDAO, ObjectMapper objectMapper) { - this.metadataService = metadataService - this.workflowExecutionService = workflowExecutionService - this.workflowExecutor = workflowExecutor - this.queueDAO = queueDAO - this.objectMapper = objectMapper - } - - /** - * This function registers all the taskDefinitions required to enable spock based integration testing - */ - @PostConstruct - void taskDefinitions() { - WorkflowContext.set(new WorkflowContext("integration_app")) - - (0..20).collect { "integration_task_$it" } - .findAll { !getPersistedTaskDefinition(it).isPresent() } - .collect { new TaskDef(it, it, DEFAULT_EMAIL_ADDRESS, 1, 120, 120) } - .forEach { metadataService.registerTaskDef([it]) } - - (0..4).collect { "integration_task_0_RT_$it" } - .findAll { !getPersistedTaskDefinition(it).isPresent() } - .collect { new TaskDef(it, it, DEFAULT_EMAIL_ADDRESS, 0, 120, 120) } - .forEach { metadataService.registerTaskDef([it]) } - - metadataService.registerTaskDef([new TaskDef('short_time_out', 'short_time_out', DEFAULT_EMAIL_ADDRESS, 1, 5, 5)]) - - //This taskWithResponseTimeOut is required by the integration test which exercises the response time out scenarios - TaskDef taskWithResponseTimeOut = new TaskDef() - taskWithResponseTimeOut.name = "task_rt" - taskWithResponseTimeOut.timeoutSeconds = 120 - taskWithResponseTimeOut.retryCount = RETRY_COUNT - taskWithResponseTimeOut.retryDelaySeconds = 0 - taskWithResponseTimeOut.responseTimeoutSeconds = 10 - taskWithResponseTimeOut.ownerEmail = DEFAULT_EMAIL_ADDRESS - - TaskDef optionalTask = new TaskDef() - optionalTask.setName("task_optional") - optionalTask.setTimeoutSeconds(5) - optionalTask.setRetryCount(1) - optionalTask.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY) - optionalTask.setRetryDelaySeconds(0) - optionalTask.setResponseTimeoutSeconds(5) - optionalTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) - - TaskDef simpleSubWorkflowTask = new TaskDef() - simpleSubWorkflowTask.setName('simple_task_in_sub_wf') - simpleSubWorkflowTask.setRetryCount(0) - simpleSubWorkflowTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) - - TaskDef subWorkflowTask = new TaskDef() - subWorkflowTask.setName('sub_workflow_task') - subWorkflowTask.setRetryCount(1) - subWorkflowTask.setResponseTimeoutSeconds(5) - subWorkflowTask.setRetryDelaySeconds(0) - subWorkflowTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) - - TaskDef waitTimeOutTask = new TaskDef() - waitTimeOutTask.name = 'waitTimeout' - waitTimeOutTask.timeoutSeconds = 2 - waitTimeOutTask.responseTimeoutSeconds = 2 - waitTimeOutTask.retryCount = 1 - waitTimeOutTask.timeoutPolicy = TaskDef.TimeoutPolicy.RETRY - waitTimeOutTask.retryDelaySeconds = 10 - waitTimeOutTask.ownerEmail = DEFAULT_EMAIL_ADDRESS - - TaskDef userTask = new TaskDef() - userTask.setName("user_task") - userTask.setTimeoutSeconds(20) - userTask.setResponseTimeoutSeconds(20) - userTask.setRetryCount(1) - userTask.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY) - userTask.setRetryDelaySeconds(10) - userTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) - - TaskDef concurrentExecutionLimitedTask = new TaskDef() - concurrentExecutionLimitedTask.name = "test_task_with_concurrency_limit" - concurrentExecutionLimitedTask.concurrentExecLimit = 1 - concurrentExecutionLimitedTask.ownerEmail = DEFAULT_EMAIL_ADDRESS - - TaskDef rateLimitedTask = new TaskDef() - rateLimitedTask.name = 'test_task_with_rateLimits' - rateLimitedTask.rateLimitFrequencyInSeconds = 10 - rateLimitedTask.rateLimitPerFrequency = 1 - rateLimitedTask.ownerEmail = DEFAULT_EMAIL_ADDRESS - - TaskDef rateLimitedSimpleTask = new TaskDef() - rateLimitedSimpleTask.name = 'test_simple_task_with_rateLimits' - rateLimitedSimpleTask.rateLimitFrequencyInSeconds = 10 - rateLimitedSimpleTask.rateLimitPerFrequency = 1 - rateLimitedSimpleTask.ownerEmail = DEFAULT_EMAIL_ADDRESS - - TaskDef eventTaskX = new TaskDef() - eventTaskX.name = 'eventX' - eventTaskX.timeoutSeconds = 1 - eventTaskX.responseTimeoutSeconds = 1 - eventTaskX.ownerEmail = DEFAULT_EMAIL_ADDRESS - - metadataService.registerTaskDef( - [taskWithResponseTimeOut, optionalTask, simpleSubWorkflowTask, - subWorkflowTask, waitTimeOutTask, userTask, eventTaskX, - rateLimitedTask, rateLimitedSimpleTask, concurrentExecutionLimitedTask] - ) - } - - /** - * This is an helper method that enables each test feature to run from a clean state - * This method is intended to be used in the cleanup() or cleanupSpec() method of any spock specification. - * By invoking this method all the running workflows are terminated. - * @throws Exception When unable to terminate any running workflow - */ - void clearWorkflows() throws Exception { - List workflowsWithVersion = metadataService.getWorkflowDefs() - .collect { workflowDef -> workflowDef.getName() + ":" + workflowDef.getVersion() } - for (String workflowWithVersion : workflowsWithVersion) { - String workflowName = StringUtils.substringBefore(workflowWithVersion, ":") - int version = Integer.parseInt(StringUtils.substringAfter(workflowWithVersion, ":")) - List running = workflowExecutionService.getRunningWorkflows(workflowName, version) - for (String workflowId : running) { - WorkflowModel workflow = workflowExecutor.getWorkflow(workflowId, false) - if (!workflow.getStatus().isTerminal()) { - workflowExecutor.terminateWorkflow(workflowId, "cleanup") - } - } - } - - queueDAO.queuesDetail().keySet() - .forEach { queueDAO.flush(it) } - - new FileOutputStream(this.getClass().getResource(TEMP_FILE_PATH).getPath()).close() - } - - /** - * A helper method to retrieve a task definition that is persisted - * @param taskDefName The name of the task for which the task definition is requested - * @return an Optional of the TaskDefinition - */ - Optional getPersistedTaskDefinition(String taskDefName) { - try { - return Optional.of(metadataService.getTaskDef(taskDefName)) - } catch (ApplicationException applicationException) { - if (applicationException.code == ApplicationException.Code.NOT_FOUND) { - return Optional.empty() - } else { - throw applicationException - } - } - } - - /** - * A helper methods that registers workflows based on the paths of the json file representing a workflow definition - * @param workflowJsonPaths a comma separated var ags of the paths of the workflow definitions - */ - void registerWorkflows(String... workflowJsonPaths) { - workflowJsonPaths.collect { readFile(it) } - .forEach { metadataService.updateWorkflowDef(it) } - } - - WorkflowDef readFile(String path) { - InputStream inputStream = getClass().getClassLoader().getResourceAsStream(path) - return objectMapper.readValue(inputStream, WorkflowDef.class) - } - - /** - * A helper method intended to be used in the when: block of the spock test feature - * This method is intended to be used to poll and update the task status as failed - * It also provides a delay to return if needed after the task has been updated to failed - * @param taskName name of the task that needs to be polled and failed - * @param workerId name of the worker id using which a task is polled - * @param failureReason the reason to fail the task that will added to the task update - * @param outputParams An optional output parameters if available will be added to the task before updating to failed - * @param waitAtEndSeconds an optional delay before the method returns, if the value is 0 skips the delay - * @return A Tuple of taskResult and acknowledgement of the poll - */ - Tuple pollAndFailTask(String taskName, String workerId, String failureReason, Map outputParams = null, int waitAtEndSeconds = 0) { - def polledIntegrationTask = workflowExecutionService.poll(taskName, workerId) - def taskResult = new TaskResult(polledIntegrationTask) - taskResult.status = TaskResult.Status.FAILED - taskResult.reasonForIncompletion = failureReason - if (outputParams) { - outputParams.forEach { k, v -> - taskResult.outputData[k] = v - } - } - workflowExecutionService.updateTask(taskResult) - return waitAtEndSecondsAndReturn(waitAtEndSeconds, polledIntegrationTask) - } - - /** - * A helper method to introduce delay and convert the polledIntegrationTask and ackPolledIntegrationTask - * into a tuple. This method is intended to be used by pollAndFailTask and pollAndCompleteTask - * @param waitAtEndSeconds The total seconds of delay before the method returns - * @param ackedTaskResult the task result created after ack - * @return A Tuple of polledTask and acknowledgement of the poll - */ - static Tuple waitAtEndSecondsAndReturn(int waitAtEndSeconds, Task polledIntegrationTask) { - if (waitAtEndSeconds > 0) { - Thread.sleep(waitAtEndSeconds * 1000) - } - return new Tuple(polledIntegrationTask) - } - - /** - * A helper method intended to be used in the when: block of the spock test feature - * This method is intended to be used to poll and update the task status as completed - * It also provides a delay to return if needed after the task has been updated to completed - * @param taskName name of the task that needs to be polled and completed - * @param workerId name of the worker id using which a task is polled - * @param outputParams An optional output parameters if available will be added to the task before updating to completed - * @param waitAtEndSeconds waitAtEndSeconds an optional delay before the method returns, if the value is 0 skips the delay - * @return A Tuple of polledTask and acknowledgement of the poll - */ - Tuple pollAndCompleteTask(String taskName, String workerId, Map outputParams = null, int waitAtEndSeconds = 0) { - def polledIntegrationTask = workflowExecutionService.poll(taskName, workerId) - if (polledIntegrationTask == null) { - return new Tuple(null, null) - } - def taskResult = new TaskResult(polledIntegrationTask) - taskResult.status = TaskResult.Status.COMPLETED - if (outputParams) { - outputParams.forEach { k, v -> - taskResult.outputData[k] = v - } - } - workflowExecutionService.updateTask(taskResult) - return waitAtEndSecondsAndReturn(waitAtEndSeconds, polledIntegrationTask) - } - - Tuple pollAndCompleteLargePayloadTask(String taskName, String workerId, String outputPayloadPath) { - def polledIntegrationTask = workflowExecutionService.poll(taskName, workerId) - def taskResult = new TaskResult(polledIntegrationTask) - taskResult.status = TaskResult.Status.COMPLETED - taskResult.outputData = null - taskResult.externalOutputPayloadStoragePath = outputPayloadPath - workflowExecutionService.updateTask(taskResult) - return new Tuple(polledIntegrationTask) - } - - /** - * A helper method intended to be used in the then: block of the spock test feature, ideally intended to be called after either: - * pollAndCompleteTask function or pollAndFailTask function - * @param completedTaskAndAck A Tuple of polledTask and acknowledgement of the poll - * @param expectedTaskInputParams a map of input params that are verified against the polledTask that is part of the completedTaskAndAck tuple - */ - static void verifyPolledAndAcknowledgedTask(Tuple completedTaskAndAck, Map expectedTaskInputParams = null) { - assert completedTaskAndAck[0]: "The task polled cannot be null" - def polledIntegrationTask = completedTaskAndAck[0] as Task - assert polledIntegrationTask - if (expectedTaskInputParams) { - expectedTaskInputParams.forEach { - k, v -> - assert polledIntegrationTask.inputData.containsKey(k) - assert polledIntegrationTask.inputData[k] == v - } - } - } - - static void verifyPolledAndAcknowledgedLargePayloadTask(Tuple completedTaskAndAck) { - assert completedTaskAndAck[0]: "The task polled cannot be null" - def polledIntegrationTask = completedTaskAndAck[0] as Task - assert polledIntegrationTask - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/ConductorTestApp.java b/test-harness/src/test/java/com/netflix/conductor/ConductorTestApp.java deleted file mode 100644 index 51cbc9ac5..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/ConductorTestApp.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor; - -import java.io.IOException; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; - -/** Copy of com.netflix.conductor.Conductor for use by @SpringBootTest in AbstractSpecification. */ - -// Prevents from the datasource beans to be loaded, AS they are needed only for specific databases. -// In case that SQL database is selected this class will be imported back in the appropriate -// database persistence module. -@SpringBootApplication(exclude = DataSourceAutoConfiguration.class) -public class ConductorTestApp { - - public static void main(String[] args) throws IOException { - SpringApplication.run(ConductorTestApp.class, args); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java deleted file mode 100644 index 3feeafe25..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration; - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.io.Reader; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Optional; - -import org.apache.http.HttpHost; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.test.context.TestPropertySource; -import org.testcontainers.elasticsearch.ElasticsearchContainer; -import org.testcontainers.utility.DockerImageName; - -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; - -@TestPropertySource( - properties = {"conductor.indexing.enabled=true", "conductor.elasticsearch.version=6"}) -public abstract class AbstractEndToEndTest { - - private static final Logger log = LoggerFactory.getLogger(AbstractEndToEndTest.class); - - private static final String TASK_DEFINITION_PREFIX = "task_"; - private static final String DEFAULT_DESCRIPTION = "description"; - // Represents null value deserialized from the redis in memory db - private static final String DEFAULT_NULL_VALUE = "null"; - protected static final String DEFAULT_EMAIL_ADDRESS = "test@harness.com"; - - private static final ElasticsearchContainer container = - new ElasticsearchContainer( - DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch-oss") - .withTag("6.8.12")); // this should match the client version - - private static RestClient restClient; - - // Initialization happens in a static block so the container is initialized - // only once for all the sub-class tests in a CI environment - // container is stopped when JVM exits - // https://www.testcontainers.org/test_framework_integration/manual_lifecycle_control/#singleton-containers - static { - container.start(); - String httpHostAddress = container.getHttpHostAddress(); - System.setProperty("conductor.elasticsearch.url", "http://" + httpHostAddress); - log.info("Initialized Elasticsearch {}", container.getContainerId()); - } - - @BeforeClass - public static void initializeEs() { - String httpHostAddress = container.getHttpHostAddress(); - String host = httpHostAddress.split(":")[0]; - int port = Integer.parseInt(httpHostAddress.split(":")[1]); - - RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost(host, port, "http")); - restClient = restClientBuilder.build(); - } - - @AfterClass - public static void cleanupEs() throws Exception { - // deletes all indices - Response beforeResponse = restClient.performRequest(new Request("GET", "/_cat/indices")); - Reader streamReader = new InputStreamReader(beforeResponse.getEntity().getContent()); - BufferedReader bufferedReader = new BufferedReader(streamReader); - - String line; - while ((line = bufferedReader.readLine()) != null) { - String[] fields = line.split("\\s"); - String endpoint = String.format("/%s", fields[2]); - - restClient.performRequest(new Request("DELETE", endpoint)); - } - - if (restClient != null) { - restClient.close(); - } - } - - @Test - public void testEphemeralWorkflowsWithStoredTasks() { - String workflowExecutionName = "testEphemeralWorkflow"; - - createAndRegisterTaskDefinitions("storedTaskDef", 5); - WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); - WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); - WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); - workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); - - String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); - assertNotNull(workflowId); - - Workflow workflow = getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - } - - @Test - public void testEphemeralWorkflowsWithEphemeralTasks() { - String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; - - WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); - TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); - workflowTask2.setTaskDefinition(taskDefinition2); - workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); - - String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); - - assertNotNull(workflowId); - - Workflow workflow = getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - List ephemeralTasks = ephemeralWorkflow.getTasks(); - assertEquals(2, ephemeralTasks.size()); - for (WorkflowTask ephemeralTask : ephemeralTasks) { - assertNotNull(ephemeralTask.getTaskDefinition()); - } - } - - @Test - public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() { - createAndRegisterTaskDefinitions("storedTask", 1); - - WorkflowDef workflowDefinition = - createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); - - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - - WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; - - String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); - assertNotNull(workflowId); - - Workflow workflow = getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - TaskDef storedTaskDefinition = getTaskDefinition("storedTask0"); - List tasks = ephemeralWorkflow.getTasks(); - assertEquals(2, tasks.size()); - assertEquals(workflowTask1, tasks.get(0)); - TaskDef currentStoredTaskDefinition = tasks.get(1).getTaskDefinition(); - assertNotNull(currentStoredTaskDefinition); - assertEquals(storedTaskDefinition, currentStoredTaskDefinition); - } - - @Test - public void testEventHandler() { - String eventName = "conductor:test_workflow:complete_task_with_event"; - EventHandler eventHandler = new EventHandler(); - eventHandler.setName("test_complete_task_event"); - EventHandler.Action completeTaskAction = new EventHandler.Action(); - completeTaskAction.setAction(EventHandler.Action.Type.complete_task); - completeTaskAction.setComplete_task(new EventHandler.TaskDetails()); - completeTaskAction.getComplete_task().setTaskRefName("test_task"); - completeTaskAction.getComplete_task().setWorkflowId("test_id"); - completeTaskAction.getComplete_task().setOutput(new HashMap<>()); - eventHandler.getActions().add(completeTaskAction); - eventHandler.setEvent(eventName); - eventHandler.setActive(true); - registerEventHandler(eventHandler); - - Iterator it = getEventHandlers(eventName, true); - EventHandler result = it.next(); - assertFalse(it.hasNext()); - assertEquals(eventHandler.getName(), result.getName()); - } - - protected WorkflowTask createWorkflowTask(String name) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName(name); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName(name); - workflowTask.setDescription(getDefaultDescription(name)); - workflowTask.setDynamicTaskNameParam(DEFAULT_NULL_VALUE); - workflowTask.setCaseValueParam(DEFAULT_NULL_VALUE); - workflowTask.setCaseExpression(DEFAULT_NULL_VALUE); - workflowTask.setDynamicForkTasksParam(DEFAULT_NULL_VALUE); - workflowTask.setDynamicForkTasksInputParamName(DEFAULT_NULL_VALUE); - workflowTask.setSink(DEFAULT_NULL_VALUE); - workflowTask.setEvaluatorType(DEFAULT_NULL_VALUE); - workflowTask.setExpression(DEFAULT_NULL_VALUE); - return workflowTask; - } - - protected TaskDef createTaskDefinition(String name) { - TaskDef taskDefinition = new TaskDef(); - taskDefinition.setName(name); - return taskDefinition; - } - - protected WorkflowDef createWorkflowDefinition(String workflowName) { - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName(workflowName); - workflowDefinition.setDescription(getDefaultDescription(workflowName)); - workflowDefinition.setFailureWorkflow(DEFAULT_NULL_VALUE); - workflowDefinition.setOwnerEmail(DEFAULT_EMAIL_ADDRESS); - return workflowDefinition; - } - - protected List createAndRegisterTaskDefinitions( - String prefixTaskDefinition, int numberOfTaskDefinitions) { - String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); - List definitions = new LinkedList<>(); - for (int i = 0; i < numberOfTaskDefinitions; i++) { - TaskDef def = - new TaskDef( - prefix + i, - "task " + i + DEFAULT_DESCRIPTION, - DEFAULT_EMAIL_ADDRESS, - 3, - 60, - 60); - def.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY); - definitions.add(def); - } - this.registerTaskDefinitions(definitions); - return definitions; - } - - private String getDefaultDescription(String nameResource) { - return nameResource + " " + DEFAULT_DESCRIPTION; - } - - protected abstract String startWorkflow( - String workflowExecutionName, WorkflowDef workflowDefinition); - - protected abstract Workflow getWorkflow(String workflowId, boolean includeTasks); - - protected abstract TaskDef getTaskDefinition(String taskName); - - protected abstract void registerTaskDefinitions(List taskDefinitionList); - - protected abstract void registerWorkflowDefinition(WorkflowDef workflowDefinition); - - protected abstract void registerEventHandler(EventHandler eventHandler); - - protected abstract Iterator getEventHandlers(String event, boolean activeOnly); -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java deleted file mode 100644 index 3d4a23e1d..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration.grpc; - -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.client.grpc.EventClient; -import com.netflix.conductor.client.grpc.MetadataClient; -import com.netflix.conductor.client.grpc.TaskClient; -import com.netflix.conductor.client.grpc.WorkflowClient; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.test.integration.AbstractEndToEndTest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -@RunWith(SpringRunner.class) -@SpringBootTest( - properties = {"conductor.grpc-server.enabled=true", "conductor.grpc-server.port=8092"}) -@TestPropertySource(locations = "classpath:application-integrationtest.properties") -public abstract class AbstractGrpcEndToEndTest extends AbstractEndToEndTest { - - protected static TaskClient taskClient; - protected static WorkflowClient workflowClient; - protected static MetadataClient metadataClient; - protected static EventClient eventClient; - - @Override - protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { - StartWorkflowRequest workflowRequest = - new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - return workflowClient.startWorkflow(workflowRequest); - } - - @Override - protected Workflow getWorkflow(String workflowId, boolean includeTasks) { - return workflowClient.getWorkflow(workflowId, includeTasks); - } - - @Override - protected TaskDef getTaskDefinition(String taskName) { - return metadataClient.getTaskDef(taskName); - } - - @Override - protected void registerTaskDefinitions(List taskDefinitionList) { - metadataClient.registerTaskDefs(taskDefinitionList); - } - - @Override - protected void registerWorkflowDefinition(WorkflowDef workflowDefinition) { - metadataClient.registerWorkflowDef(workflowDefinition); - } - - @Override - protected void registerEventHandler(EventHandler eventHandler) { - eventClient.registerEventHandler(eventHandler); - } - - @Override - protected Iterator getEventHandlers(String event, boolean activeOnly) { - return eventClient.getEventHandlers(event, activeOnly); - } - - @Test - public void testAll() throws Exception { - assertNotNull(taskClient); - List defs = new LinkedList<>(); - for (int i = 0; i < 5; i++) { - TaskDef def = new TaskDef("t" + i, "task " + i, DEFAULT_EMAIL_ADDRESS, 3, 60, 60); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - defs.add(def); - } - metadataClient.registerTaskDefs(defs); - - for (int i = 0; i < 5; i++) { - final String taskName = "t" + i; - TaskDef def = metadataClient.getTaskDef(taskName); - assertNotNull(def); - assertEquals(taskName, def.getName()); - } - - WorkflowDef def = createWorkflowDefinition("test"); - WorkflowTask t0 = createWorkflowTask("t0"); - WorkflowTask t1 = createWorkflowTask("t1"); - - def.getTasks().add(t0); - def.getTasks().add(t1); - - metadataClient.registerWorkflowDef(def); - WorkflowDef found = metadataClient.getWorkflowDef(def.getName(), null); - assertNotNull(found); - assertEquals(def, found); - - String correlationId = "test_corr_id"; - StartWorkflowRequest startWf = new StartWorkflowRequest(); - startWf.setName(def.getName()); - startWf.setCorrelationId(correlationId); - - String workflowId = workflowClient.startWorkflow(startWf); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, false); - assertEquals(0, workflow.getTasks().size()); - assertEquals(workflowId, workflow.getWorkflowId()); - - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); - assertEquals(workflowId, workflow.getWorkflowId()); - - List runningIds = - workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); - assertNotNull(runningIds); - assertEquals(1, runningIds.size()); - assertEquals(workflowId, runningIds.get(0)); - - List polled = - taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); - assertNotNull(polled); - assertEquals(0, polled.size()); - - polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertEquals(1, polled.size()); - assertEquals(t0.getName(), polled.get(0).getTaskDefName()); - Task task = polled.get(0); - - task.getOutputData().put("key1", "value1"); - task.setStatus(Status.COMPLETED); - taskClient.updateTask(new TaskResult(task)); - - polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertTrue(polled.toString(), polled.isEmpty()); - - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); - assertEquals(t1.getTaskReferenceName(), workflow.getTasks().get(1).getReferenceTaskName()); - assertEquals(Status.COMPLETED, workflow.getTasks().get(0).getStatus()); - assertEquals(Status.SCHEDULED, workflow.getTasks().get(1).getStatus()); - - Task taskById = taskClient.getTaskDetails(task.getTaskId()); - assertNotNull(taskById); - assertEquals(task.getTaskId(), taskById.getTaskId()); - - Thread.sleep(1000); - SearchResult searchResult = - workflowClient.search("workflowType='" + def.getName() + "'"); - assertNotNull(searchResult); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - - SearchResult searchResultV2 = - workflowClient.searchV2("workflowType='" + def.getName() + "'"); - assertNotNull(searchResultV2); - assertEquals(1, searchResultV2.getTotalHits()); - assertEquals(workflow.getWorkflowId(), searchResultV2.getResults().get(0).getWorkflowId()); - - SearchResult searchResultAdvanced = - workflowClient.search(0, 1, null, null, "workflowType='" + def.getName() + "'"); - assertNotNull(searchResultAdvanced); - assertEquals(1, searchResultAdvanced.getTotalHits()); - assertEquals( - workflow.getWorkflowId(), searchResultAdvanced.getResults().get(0).getWorkflowId()); - - SearchResult searchResultV2Advanced = - workflowClient.searchV2(0, 1, null, null, "workflowType='" + def.getName() + "'"); - assertNotNull(searchResultV2Advanced); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals( - workflow.getWorkflowId(), - searchResultV2Advanced.getResults().get(0).getWorkflowId()); - - SearchResult taskSearchResult = - taskClient.search("taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResult); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals(t0.getName(), taskSearchResult.getResults().get(0).getTaskDefName()); - - SearchResult taskSearchResultAdvanced = - taskClient.search(0, 1, null, null, "taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultAdvanced); - assertEquals(1, taskSearchResultAdvanced.getTotalHits()); - assertEquals(t0.getName(), taskSearchResultAdvanced.getResults().get(0).getTaskDefName()); - - SearchResult taskSearchResultV2 = - taskClient.searchV2("taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultV2); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals( - t0.getTaskReferenceName(), - taskSearchResultV2.getResults().get(0).getReferenceTaskName()); - - SearchResult taskSearchResultV2Advanced = - taskClient.searchV2(0, 1, null, null, "taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultV2Advanced); - assertEquals(1, taskSearchResultV2Advanced.getTotalHits()); - assertEquals( - t0.getTaskReferenceName(), - taskSearchResultV2Advanced.getResults().get(0).getReferenceTaskName()); - - workflowClient.terminateWorkflow(workflowId, "terminate reason"); - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); - - workflowClient.restart(workflowId, false); - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java deleted file mode 100644 index 0353cd048..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration.grpc; - -import org.junit.Before; - -import com.netflix.conductor.client.grpc.EventClient; -import com.netflix.conductor.client.grpc.MetadataClient; -import com.netflix.conductor.client.grpc.TaskClient; -import com.netflix.conductor.client.grpc.WorkflowClient; - -public class GrpcEndToEndTest extends AbstractGrpcEndToEndTest { - - @Before - public void init() { - taskClient = new TaskClient("localhost", 8092); - workflowClient = new WorkflowClient("localhost", 8092); - metadataClient = new MetadataClient("localhost", 8092); - eventClient = new EventClient("localhost", 8092); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java deleted file mode 100644 index abb2371f6..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java +++ /dev/null @@ -1,523 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration.http; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.stream.Collectors; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; -import org.springframework.boot.web.server.LocalServerPort; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringRunner; - -import com.netflix.conductor.client.exception.ConductorClientException; -import com.netflix.conductor.client.http.EventClient; -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.validation.ValidationError; -import com.netflix.conductor.test.integration.AbstractEndToEndTest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -@RunWith(SpringRunner.class) -@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT) -@TestPropertySource(locations = "classpath:application-integrationtest.properties") -public abstract class AbstractHttpEndToEndTest extends AbstractEndToEndTest { - - @LocalServerPort protected int port; - - protected static String apiRoot; - - protected static TaskClient taskClient; - protected static WorkflowClient workflowClient; - protected static MetadataClient metadataClient; - protected static EventClient eventClient; - - @Override - protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { - StartWorkflowRequest workflowRequest = - new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - return workflowClient.startWorkflow(workflowRequest); - } - - @Override - protected Workflow getWorkflow(String workflowId, boolean includeTasks) { - return workflowClient.getWorkflow(workflowId, includeTasks); - } - - @Override - protected TaskDef getTaskDefinition(String taskName) { - return metadataClient.getTaskDef(taskName); - } - - @Override - protected void registerTaskDefinitions(List taskDefinitionList) { - metadataClient.registerTaskDefs(taskDefinitionList); - } - - @Override - protected void registerWorkflowDefinition(WorkflowDef workflowDefinition) { - metadataClient.registerWorkflowDef(workflowDefinition); - } - - @Override - protected void registerEventHandler(EventHandler eventHandler) { - eventClient.registerEventHandler(eventHandler); - } - - @Override - protected Iterator getEventHandlers(String event, boolean activeOnly) { - return eventClient.getEventHandlers(event, activeOnly).iterator(); - } - - @Test - public void testAll() throws Exception { - createAndRegisterTaskDefinitions("t", 5); - - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - def.setOwnerEmail(DEFAULT_EMAIL_ADDRESS); - WorkflowTask t0 = new WorkflowTask(); - t0.setName("t0"); - t0.setWorkflowTaskType(TaskType.SIMPLE); - t0.setTaskReferenceName("t0"); - - WorkflowTask t1 = new WorkflowTask(); - t1.setName("t1"); - t1.setWorkflowTaskType(TaskType.SIMPLE); - t1.setTaskReferenceName("t1"); - - def.getTasks().add(t0); - def.getTasks().add(t1); - - metadataClient.registerWorkflowDef(def); - WorkflowDef workflowDefinitionFromSystem = - metadataClient.getWorkflowDef(def.getName(), null); - assertNotNull(workflowDefinitionFromSystem); - assertEquals(def, workflowDefinitionFromSystem); - - String correlationId = "test_corr_id"; - StartWorkflowRequest startWorkflowRequest = - new StartWorkflowRequest() - .withName(def.getName()) - .withCorrelationId(correlationId) - .withPriority(50) - .withInput(new HashMap<>()); - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, false); - assertEquals(0, workflow.getTasks().size()); - assertEquals(workflowId, workflow.getWorkflowId()); - - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); - assertEquals(workflowId, workflow.getWorkflowId()); - - int queueSize = taskClient.getQueueSizeForTask(workflow.getTasks().get(0).getTaskType()); - assertEquals(1, queueSize); - - List runningIds = - workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); - assertNotNull(runningIds); - assertEquals(1, runningIds.size()); - assertEquals(workflowId, runningIds.get(0)); - - List polled = - taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); - assertNotNull(polled); - assertEquals(0, polled.size()); - - polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertEquals(1, polled.size()); - assertEquals(t0.getName(), polled.get(0).getTaskDefName()); - Task task = polled.get(0); - - task.getOutputData().put("key1", "value1"); - task.setStatus(Status.COMPLETED); - taskClient.updateTask(new TaskResult(task)); - - polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertTrue(polled.toString(), polled.isEmpty()); - - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); - assertEquals(t1.getTaskReferenceName(), workflow.getTasks().get(1).getReferenceTaskName()); - assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); - assertEquals(Task.Status.SCHEDULED, workflow.getTasks().get(1).getStatus()); - - Task taskById = taskClient.getTaskDetails(task.getTaskId()); - assertNotNull(taskById); - assertEquals(task.getTaskId(), taskById.getTaskId()); - - queueSize = taskClient.getQueueSizeForTask(workflow.getTasks().get(1).getTaskType()); - assertEquals(1, queueSize); - - Thread.sleep(1000); - SearchResult searchResult = - workflowClient.search("workflowType='" + def.getName() + "'"); - assertNotNull(searchResult); - assertEquals(1, searchResult.getTotalHits()); - assertEquals(workflow.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); - - SearchResult searchResultV2 = - workflowClient.searchV2("workflowType='" + def.getName() + "'"); - assertNotNull(searchResultV2); - assertEquals(1, searchResultV2.getTotalHits()); - assertEquals(workflow.getWorkflowId(), searchResultV2.getResults().get(0).getWorkflowId()); - - SearchResult searchResultAdvanced = - workflowClient.search(0, 1, null, null, "workflowType='" + def.getName() + "'"); - assertNotNull(searchResultAdvanced); - assertEquals(1, searchResultAdvanced.getTotalHits()); - assertEquals( - workflow.getWorkflowId(), searchResultAdvanced.getResults().get(0).getWorkflowId()); - - SearchResult searchResultV2Advanced = - workflowClient.searchV2(0, 1, null, null, "workflowType='" + def.getName() + "'"); - assertNotNull(searchResultV2Advanced); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals( - workflow.getWorkflowId(), - searchResultV2Advanced.getResults().get(0).getWorkflowId()); - - SearchResult taskSearchResult = - taskClient.search("taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResult); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals(t0.getName(), taskSearchResult.getResults().get(0).getTaskDefName()); - - SearchResult taskSearchResultAdvanced = - taskClient.search(0, 1, null, null, "taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultAdvanced); - assertEquals(1, taskSearchResultAdvanced.getTotalHits()); - assertEquals(t0.getName(), taskSearchResultAdvanced.getResults().get(0).getTaskDefName()); - - SearchResult taskSearchResultV2 = - taskClient.searchV2("taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultV2); - assertEquals(1, searchResultV2Advanced.getTotalHits()); - assertEquals( - t0.getTaskReferenceName(), - taskSearchResultV2.getResults().get(0).getReferenceTaskName()); - - SearchResult taskSearchResultV2Advanced = - taskClient.searchV2(0, 1, null, null, "taskType='" + t0.getName() + "'"); - assertNotNull(taskSearchResultV2Advanced); - assertEquals(1, taskSearchResultV2Advanced.getTotalHits()); - assertEquals( - t0.getTaskReferenceName(), - taskSearchResultV2Advanced.getResults().get(0).getReferenceTaskName()); - - workflowClient.terminateWorkflow(workflowId, "terminate reason"); - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); - - workflowClient.restart(workflowId, false); - workflow = workflowClient.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - workflowClient.skipTaskFromWorkflow(workflowId, "t1"); - } - - @Test(expected = ConductorClientException.class) - public void testMetadataWorkflowDefinition() { - String workflowDefName = "testWorkflowDefMetadata"; - WorkflowDef def = new WorkflowDef(); - def.setName(workflowDefName); - def.setVersion(1); - WorkflowTask t0 = new WorkflowTask(); - t0.setName("t0"); - t0.setWorkflowTaskType(TaskType.SIMPLE); - t0.setTaskReferenceName("t0"); - WorkflowTask t1 = new WorkflowTask(); - t1.setName("t1"); - t1.setWorkflowTaskType(TaskType.SIMPLE); - t1.setTaskReferenceName("t1"); - def.getTasks().add(t0); - def.getTasks().add(t1); - - metadataClient.registerWorkflowDef(def); - metadataClient.unregisterWorkflowDef(workflowDefName, 1); - - try { - metadataClient.getWorkflowDef(workflowDefName, 1); - } catch (ConductorClientException e) { - int statusCode = e.getStatus(); - String errorMessage = e.getMessage(); - boolean retryable = e.isRetryable(); - assertEquals(404, statusCode); - assertEquals( - "No such workflow found by name: testWorkflowDefMetadata, version: 1", - errorMessage); - assertFalse(retryable); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testInvalidResource() { - MetadataClient metadataClient = new MetadataClient(); - metadataClient.setRootURI(String.format("%sinvalid", apiRoot)); - WorkflowDef def = new WorkflowDef(); - def.setName("testWorkflowDel"); - def.setVersion(1); - try { - metadataClient.registerWorkflowDef(def); - } catch (ConductorClientException e) { - int statusCode = e.getStatus(); - boolean retryable = e.isRetryable(); - assertEquals(404, statusCode); - assertFalse(retryable); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testUpdateWorkflow() { - TaskDef taskDef = new TaskDef(); - taskDef.setName("taskUpdate"); - ArrayList tasks = new ArrayList<>(); - tasks.add(taskDef); - metadataClient.registerTaskDefs(tasks); - - WorkflowDef def = new WorkflowDef(); - def.setName("testWorkflowDel"); - def.setVersion(1); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("taskUpdate"); - workflowTask.setTaskReferenceName("taskUpdate"); - List workflowTaskList = new ArrayList<>(); - workflowTaskList.add(workflowTask); - def.setTasks(workflowTaskList); - List workflowList = new ArrayList<>(); - workflowList.add(def); - metadataClient.registerWorkflowDef(def); - - def.setVersion(2); - metadataClient.updateWorkflowDefs(workflowList); - WorkflowDef def1 = metadataClient.getWorkflowDef(def.getName(), 2); - assertNotNull(def1); - try { - metadataClient.getTaskDef("test"); - } catch (ConductorClientException e) { - int statuCode = e.getStatus(); - assertEquals(404, statuCode); - assertEquals("No such taskType found by name: test", e.getMessage()); - assertFalse(e.isRetryable()); - throw e; - } - } - - @Test(expected = IllegalArgumentException.class) - public void testStartWorkflow() { - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - try { - workflowClient.startWorkflow(startWorkflowRequest); - } catch (IllegalArgumentException e) { - assertEquals("Workflow name cannot be null or empty", e.getMessage()); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testUpdateTask() { - TaskResult taskResult = new TaskResult(); - try { - taskClient.updateTask(taskResult); - } catch (ConductorClientException e) { - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertEquals(2, errors.size()); - assertTrue(errorMessages.contains("Workflow Id cannot be null or empty")); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testGetWorfklowNotFound() { - try { - workflowClient.getWorkflow("w123", true); - } catch (ConductorClientException e) { - assertEquals(404, e.getStatus()); - assertEquals("No such workflow found by id: w123", e.getMessage()); - assertFalse(e.isRetryable()); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testEmptyCreateWorkflowDef() { - try { - WorkflowDef workflowDef = new WorkflowDef(); - metadataClient.registerWorkflowDef(workflowDef); - } catch (ConductorClientException e) { - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testUpdateWorkflowDef() { - try { - WorkflowDef workflowDef = new WorkflowDef(); - List workflowDefList = new ArrayList<>(); - workflowDefList.add(workflowDef); - metadataClient.updateWorkflowDefs(workflowDefList); - } catch (ConductorClientException e) { - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertEquals(3, errors.size()); - assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); - assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(errorMessages.contains("ownerEmail cannot be empty")); - throw e; - } - } - - @Test - public void testTaskByTaskId() { - try { - taskClient.getTaskDetails("test999"); - } catch (ConductorClientException e) { - assertEquals(404, e.getStatus()); - assertEquals("No such task found by taskId: test999", e.getMessage()); - } - } - - @Test - public void testListworkflowsByCorrelationId() { - workflowClient.getWorkflows("test", "test12", false, false); - } - - @Test(expected = ConductorClientException.class) - public void testCreateInvalidWorkflowDef() { - try { - WorkflowDef workflowDef = new WorkflowDef(); - List workflowDefList = new ArrayList<>(); - workflowDefList.add(workflowDef); - metadataClient.registerWorkflowDef(workflowDef); - } catch (ConductorClientException e) { - assertEquals(3, e.getValidationErrors().size()); - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); - assertTrue(errorMessages.contains("ownerEmail cannot be empty")); - throw e; - } - } - - @Test(expected = ConductorClientException.class) - public void testUpdateTaskDefNameNull() { - TaskDef taskDef = new TaskDef(); - try { - metadataClient.updateTaskDef(taskDef); - } catch (ConductorClientException e) { - assertEquals(2, e.getValidationErrors().size()); - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertTrue(errorMessages.contains("TaskDef name cannot be null or empty")); - assertTrue(errorMessages.contains("ownerEmail cannot be empty")); - throw e; - } - } - - @Test(expected = IllegalArgumentException.class) - public void testGetTaskDefNotExisting() { - metadataClient.getTaskDef(""); - } - - @Test(expected = ConductorClientException.class) - public void testUpdateWorkflowDefNameNull() { - WorkflowDef workflowDef = new WorkflowDef(); - List list = new ArrayList<>(); - list.add(workflowDef); - try { - metadataClient.updateWorkflowDefs(list); - } catch (ConductorClientException e) { - assertEquals(3, e.getValidationErrors().size()); - assertEquals(400, e.getStatus()); - assertEquals("Validation failed, check below errors for detail.", e.getMessage()); - assertFalse(e.isRetryable()); - List errors = e.getValidationErrors(); - List errorMessages = - errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); - assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); - assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); - assertTrue(errorMessages.contains("ownerEmail cannot be empty")); - throw e; - } - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java deleted file mode 100644 index 0dded2cdd..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.integration.http; - -import org.junit.Before; - -import com.netflix.conductor.client.http.EventClient; -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; - -public class HttpEndToEndTest extends AbstractHttpEndToEndTest { - - @Before - public void init() { - apiRoot = String.format("http://localhost:%d/api/", port); - - taskClient = new TaskClient(); - taskClient.setRootURI(apiRoot); - - workflowClient = new WorkflowClient(); - workflowClient.setRootURI(apiRoot); - - metadataClient = new MetadataClient(); - metadataClient.setRootURI(apiRoot); - - eventClient = new EventClient(); - eventClient.setRootURI(apiRoot); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java b/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java deleted file mode 100644 index 2351f2e51..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright 2021 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.utils; - -import java.io.*; -import java.nio.file.Files; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.ExternalStorageLocation; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SIMPLE; -import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; - -/** A {@link ExternalPayloadStorage} implementation that stores payload in file. */ -@ConditionalOnProperty(name = "conductor.external-payload-storage.type", havingValue = "mock") -@Component -public class MockExternalPayloadStorage implements ExternalPayloadStorage { - - private static final Logger LOGGER = LoggerFactory.getLogger(MockExternalPayloadStorage.class); - - private final ObjectMapper objectMapper; - private final File payloadDir; - - @Autowired - public MockExternalPayloadStorage(ObjectMapper objectMapper) throws IOException { - this.objectMapper = objectMapper; - this.payloadDir = Files.createTempDirectory("payloads").toFile(); - LOGGER.info( - "{} initialized in directory: {}", - this.getClass().getSimpleName(), - payloadDir.getAbsolutePath()); - } - - @Override - public ExternalStorageLocation getLocation( - Operation operation, PayloadType payloadType, String path) { - ExternalStorageLocation location = new ExternalStorageLocation(); - location.setPath(UUID.randomUUID() + ".json"); - return location; - } - - @Override - public void upload(String path, InputStream payload, long payloadSize) { - File file = new File(payloadDir, path); - String filePath = file.getAbsolutePath(); - try { - if (!file.exists() && file.createNewFile()) { - LOGGER.debug("Created file: {}", filePath); - } - IOUtils.copy(payload, new FileOutputStream(file)); - LOGGER.debug("Written to {}", filePath); - } catch (IOException e) { - // just handle this exception here and return empty map so that test will fail in case - // this exception is thrown - LOGGER.error("Error writing to {}", filePath); - } finally { - try { - if (payload != null) { - payload.close(); - } - } catch (IOException e) { - LOGGER.warn("Unable to close input stream when writing to file"); - } - } - } - - @Override - public InputStream download(String path) { - try { - LOGGER.debug("Reading from {}", path); - return new FileInputStream(new File(payloadDir, path)); - } catch (IOException e) { - LOGGER.error("Error reading {}", path, e); - return null; - } - } - - public void upload(String path, Map payload) { - try { - InputStream bais = new ByteArrayInputStream(objectMapper.writeValueAsBytes(payload)); - upload(path, bais, 0); - } catch (IOException e) { - LOGGER.error("Error serializing map to json", e); - } - } - - public InputStream readOutputDotJson() { - return MockExternalPayloadStorage.class.getResourceAsStream("/output.json"); - } - - @SuppressWarnings("unchecked") - public Map curateDynamicForkLargePayload() { - Map dynamicForkLargePayload = new HashMap<>(); - try { - InputStream inputStream = readOutputDotJson(); - Map largePayload = objectMapper.readValue(inputStream, Map.class); - - WorkflowTask simpleWorkflowTask = new WorkflowTask(); - simpleWorkflowTask.setName("integration_task_10"); - simpleWorkflowTask.setTaskReferenceName("t10"); - simpleWorkflowTask.setType(TASK_TYPE_SIMPLE); - simpleWorkflowTask.setInputParameters( - Collections.singletonMap("p1", "${workflow.input.imageType}")); - - WorkflowDef subWorkflowDef = new WorkflowDef(); - subWorkflowDef.setName("one_task_workflow"); - subWorkflowDef.setVersion(1); - subWorkflowDef.setTasks(Collections.singletonList(simpleWorkflowTask)); - - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("one_task_workflow"); - subWorkflowParams.setVersion(1); - subWorkflowParams.setWorkflowDef(subWorkflowDef); - - WorkflowTask subWorkflowTask = new WorkflowTask(); - subWorkflowTask.setName("large_payload_subworkflow"); - subWorkflowTask.setType(TASK_TYPE_SUB_WORKFLOW); - subWorkflowTask.setTaskReferenceName("large_payload_subworkflow"); - subWorkflowTask.setInputParameters(largePayload); - subWorkflowTask.setSubWorkflowParam(subWorkflowParams); - - dynamicForkLargePayload.put("dynamicTasks", List.of(subWorkflowTask)); - dynamicForkLargePayload.put( - "dynamicTasksInput", Map.of("large_payload_subworkflow", largePayload)); - } catch (IOException e) { - // just handle this exception here and return empty map so that test will fail in case - // this exception is thrown - } - return dynamicForkLargePayload; - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java b/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java deleted file mode 100644 index d0af23206..000000000 --- a/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2022 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.test.utils; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import com.netflix.conductor.model.TaskModel; -import com.netflix.conductor.model.WorkflowModel; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; - -@Component(UserTask.NAME) -public class UserTask extends WorkflowSystemTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(UserTask.class); - - public static final String NAME = "USER_TASK"; - - private final ObjectMapper objectMapper; - - private static final TypeReference>>> - mapStringListObjects = new TypeReference<>() {}; - - @Autowired - public UserTask(ObjectMapper objectMapper) { - super(NAME); - this.objectMapper = objectMapper; - LOGGER.info("Initialized system task - {}", getClass().getCanonicalName()); - } - - @Override - public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - if (task.getWorkflowTask().isAsyncComplete()) { - task.setStatus(TaskModel.Status.IN_PROGRESS); - } else { - Map>> map = - objectMapper.convertValue(task.getInputData(), mapStringListObjects); - Map output = new HashMap<>(); - Map> defaultLargeInput = new HashMap<>(); - defaultLargeInput.put("TEST_SAMPLE", Collections.singletonList("testDefault")); - output.put( - "size", - map.getOrDefault("largeInput", defaultLargeInput).get("TEST_SAMPLE").size()); - task.setOutputData(output); - task.setStatus(TaskModel.Status.COMPLETED); - } - } - - @Override - public boolean isAsync() { - return true; - } -} diff --git a/test-harness/src/test/resources/application-integrationtest.properties b/test-harness/src/test/resources/application-integrationtest.properties deleted file mode 100644 index 3c93ecadb..000000000 --- a/test-harness/src/test/resources/application-integrationtest.properties +++ /dev/null @@ -1,55 +0,0 @@ -# -# /* -# * Copyright 2021 Netflix, Inc. -# *

    -# * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with -# * the License. You may obtain a copy of the License at -# *

    -# * http://www.apache.org/licenses/LICENSE-2.0 -# *

    -# * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -# * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -# * specific language governing permissions and limitations under the License. -# */ -# - -conductor.db.type=memory -conductor.workflow-execution-lock.type=local_only -conductor.external-payload-storage.type=mock -conductor.indexing.enabled=false - -conductor.app.stack=test -conductor.app.appId=conductor - -conductor.app.workflow-offset-timeout=30s - -conductor.system-task-workers.enabled=false -conductor.app.system-task-worker-callback-duration=0 - -conductor.app.event-message-indexing-enabled=true -conductor.app.event-execution-indexing-enabled=true - -conductor.workflow-reconciler.enabled=true -conductor.workflow-repair-service.enabled=false - -conductor.app.workflow-execution-lock-enabled=false - -conductor.app.workflow-input-payload-size-threshold=10KB -conductor.app.max-workflow-input-payload-size-threshold=10240KB -conductor.app.workflow-output-payload-size-threshold=10KB -conductor.app.max-workflow-output-payload-size-threshold=10240KB -conductor.app.task-input-payload-size-threshold=10KB -conductor.app.max-task-input-payload-size-threshold=10240KB -conductor.app.task-output-payload-size-threshold=10KB -conductor.app.max-task-output-payload-size-threshold=10240KB -conductor.app.max-workflow-variables-payload-size-threshold=2KB - -conductor.redis.availability-zone=us-east-1c -conductor.redis.data-center-region=us-east-1 -conductor.redis.workflow-namespace-prefix=integration-test -conductor.redis.queue-namespace-prefix=integtest - -conductor.elasticsearch.index-prefix=conductor -conductor.elasticsearch.cluster-health-color=yellow - -management.metrics.export.datadog.enabled=false diff --git a/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json b/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json deleted file mode 100644 index b63724743..000000000 --- a/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "test_concurrency_limits_workflow", - "version": 1, - "tasks": [ - { - "name": "test_task_with_concurrency_limit", - "taskReferenceName": "test_task_with_concurrency_limit", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/conditional_switch_task_workflow_integration_test.json b/test-harness/src/test/resources/conditional_switch_task_workflow_integration_test.json deleted file mode 100644 index be6fc83cd..000000000 --- a/test-harness/src/test/resources/conditional_switch_task_workflow_integration_test.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "name": "ConditionalTaskWF", - "description": "ConditionalTaskWF", - "version": 1, - "tasks": [ - { - "name": "conditional", - "taskReferenceName": "conditional", - "inputParameters": { - "case": "${workflow.input.param1}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "nested": [ - { - "name": "nestedCondition", - "taskReferenceName": "nestedCondition", - "inputParameters": { - "case": "${workflow.input.param2}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "one": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "two": [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "three": [ - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp3": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_10", - "taskReferenceName": "t10", - "inputParameters": { - "tp10": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "finalcondition", - "taskReferenceName": "finalCase", - "inputParameters": { - "finalCase": "${workflow.input.finalCase}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "finalCase", - "decisionCases": { - "notify": [ - { - "name": "integration_task_4", - "taskReferenceName": "integration_task_4", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json b/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json deleted file mode 100644 index 275f928f8..000000000 --- a/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "name": "ConditionalSystemWorkflow", - "description": "ConditionalSystemWorkflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "decision", - "taskReferenceName": "decision", - "inputParameters": { - "case": "${t1.output.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "one": [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp21": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "two": [ - { - "name": "user_task", - "taskReferenceName": "user_task", - "inputParameters": { - "largeInput": "${t1.output.op}" - }, - "type": "USER_TASK", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp31": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o2": "${t1.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/conditional_task_workflow_integration_test.json b/test-harness/src/test/resources/conditional_task_workflow_integration_test.json deleted file mode 100644 index bc9c59de0..000000000 --- a/test-harness/src/test/resources/conditional_task_workflow_integration_test.json +++ /dev/null @@ -1,170 +0,0 @@ -{ - "name": "ConditionalTaskWF", - "description": "ConditionalTaskWF", - "version": 1, - "tasks": [ - { - "name": "conditional", - "taskReferenceName": "conditional", - "inputParameters": { - "case": "${workflow.input.param1}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "nested": [ - { - "name": "nestedCondition", - "taskReferenceName": "nestedCondition", - "inputParameters": { - "case": "${workflow.input.param2}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "one": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "two": [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "three": [ - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp3": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_10", - "taskReferenceName": "t10", - "inputParameters": { - "tp10": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "finalcondition", - "taskReferenceName": "finalCase", - "inputParameters": { - "finalCase": "${workflow.input.finalCase}" - }, - "type": "DECISION", - "caseValueParam": "finalCase", - "decisionCases": { - "notify": [ - { - "name": "integration_task_4", - "taskReferenceName": "integration_task_4", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/decision_and_fork_join_integration_test.json b/test-harness/src/test/resources/decision_and_fork_join_integration_test.json deleted file mode 100644 index d2fb055d2..000000000 --- a/test-harness/src/test/resources/decision_and_fork_join_integration_test.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "name": "ForkConditionalTest", - "description": "ForkConditionalTest", - "version": 1, - "tasks": [ - { - "name": "forkTask", - "taskReferenceName": "forkTask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "decisionTask", - "taskReferenceName": "decisionTask", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "c": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_5", - "taskReferenceName": "t5", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_10", - "taskReferenceName": "t10", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "joinTask", - "taskReferenceName": "joinTask", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t20", - "t10" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/decision_and_terminate_integration_test.json b/test-harness/src/test/resources/decision_and_terminate_integration_test.json deleted file mode 100644 index c7f0d5d49..000000000 --- a/test-harness/src/test/resources/decision_and_terminate_integration_test.json +++ /dev/null @@ -1,113 +0,0 @@ -{ - "name": "ConditionalTerminateWorkflow", - "description": "ConditionalTerminateWorkflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "decision", - "taskReferenceName": "decision", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "one": [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp21": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "two": [ - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "FAILED", - "workflowOutput": "${t1.output.op}" - }, - "type": "TERMINATE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp31": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o2": "${t3.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/do_while_as_subtask_integration_test.json b/test-harness/src/test/resources/do_while_as_subtask_integration_test.json deleted file mode 100644 index 2fc471efc..000000000 --- a/test-harness/src/test/resources/do_while_as_subtask_integration_test.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "name": "Do_While_SubTask", - "description": "Do_While_SubTask", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fork", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "loopTask", - "taskReferenceName": "loopTask", - "inputParameters": { - "value": "${workflow.input.loop}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask['iteration'] < $.value) { true; } else { false;} ", - "loopOver": [ - { - "name": "integration_task_0", - "taskReferenceName": "integration_task_0", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_1", - "taskReferenceName": "integration_task_1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "integration_task_2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "loopTask", - "integration_task_2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/do_while_integration_test.json b/test-harness/src/test/resources/do_while_integration_test.json deleted file mode 100644 index e6723a3ed..000000000 --- a/test-harness/src/test/resources/do_while_integration_test.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "name": "Do_While_Workflow", - "description": "Do_While_Workflow", - "version": 1, - "tasks": [ - { - "name": "loopTask", - "taskReferenceName": "loopTask", - "inputParameters": { - "value": "${workflow.input.loop}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask['iteration'] < $.value) { true; } else { false;} ", - "loopOver": [ - { - "name": "integration_task_0", - "taskReferenceName": "integration_task_0", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "fork", - "taskReferenceName": "fork", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_1", - "taskReferenceName": "integration_task_1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "integration_task_2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "integration_task_1", - "integration_task_2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/do_while_iteration_fix_test.json b/test-harness/src/test/resources/do_while_iteration_fix_test.json deleted file mode 100644 index b9dd19a29..000000000 --- a/test-harness/src/test/resources/do_while_iteration_fix_test.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "Do_While_Workflow_Iteration_Fix", - "description": "Do_While_Workflow_Iteration_Fix", - "version": 1, - "tasks": [ - { - "name": "loopTask", - "taskReferenceName": "loopTask", - "inputParameters": { - "value": "${workflow.input.loop}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask['iteration'] < $.value) { true; } else { false;} ", - "loopOver": [ - { - "name": "form_uri", - "taskReferenceName": "form_uri", - "inputParameters": { - "index" : "${loopTask['iteration']}", - "scriptExpression": "return $.index - 1;" - }, - "type": "LAMBDA" - } - ] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/do_while_multiple_integration_test.json b/test-harness/src/test/resources/do_while_multiple_integration_test.json deleted file mode 100644 index 1da5bc2db..000000000 --- a/test-harness/src/test/resources/do_while_multiple_integration_test.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "name": "Do_While_Multiple", - "description": "Do_While_Multiple", - "version": 1, - "tasks": [ - { - "name": "loopTask", - "taskReferenceName": "loopTask", - "inputParameters": { - "value": "${workflow.input.loop}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask['iteration'] < $.value ) { true;} else {false;} ", - "loopOver": [ - { - "name": "integration_task_0", - "taskReferenceName": "integration_task_0", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "fork", - "taskReferenceName": "fork", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_1", - "taskReferenceName": "integration_task_1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "integration_task_2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "integration_task_1", - "integration_task_2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - { - "name": "loopTask2", - "taskReferenceName": "loopTask2", - "inputParameters": { - "value": "${workflow.input.loop2}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask2['iteration'] < $.value) { true; } else { false; }", - "loopOver": [ - { - "name": "integration_task_3", - "taskReferenceName": "integration_task_3", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/do_while_sub_workflow_integration_test.json b/test-harness/src/test/resources/do_while_sub_workflow_integration_test.json deleted file mode 100644 index 80cd20e07..000000000 --- a/test-harness/src/test/resources/do_while_sub_workflow_integration_test.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "name": "Do_While_Sub_Workflow", - "description": "Do_While_Sub_Workflow", - "version": 1, - "tasks": [ - { - "name": "loopTask", - "taskReferenceName": "loopTask", - "inputParameters": { - "value": "${workflow.input.loop}" - }, - "type": "DO_WHILE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopCondition": "if ($.loopTask['iteration'] < $.value) { true; } else { false;} ", - "loopOver": [ - { - "name": "integration_task_0", - "taskReferenceName": "integration_task_0", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "fork", - "taskReferenceName": "fork", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_1", - "taskReferenceName": "integration_task_1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "integration_task_2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "integration_task_1", - "integration_task_2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "sub_workflow_task", - "taskReferenceName": "st1", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_workflow" - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/dynamic_fork_join_integration_test.json b/test-harness/src/test/resources/dynamic_fork_join_integration_test.json deleted file mode 100644 index b442d7b1a..000000000 --- a/test-harness/src/test/resources/dynamic_fork_join_integration_test.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "name": "DynamicFanInOutTest", - "description": "DynamicFanInOutTest", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "dt1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "taskDefinition": { - "createdBy": "integration_app", - "name": "integration_task_1", - "description": "integration_task_1", - "retryCount": 1, - "timeoutSeconds": 120, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "fork", - "taskReferenceName": "dynamicfanouttask", - "inputParameters": { - "dynamicTasks": "${dt1.output.dynamicTasks}", - "dynamicTasksInput": "${dt1.output.dynamicTasksInput}" - }, - "type": "FORK_JOIN_DYNAMIC", - "decisionCases": {}, - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "dynamicTasksInput", - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "dynamicfanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_4", - "taskReferenceName": "task4", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "taskDefinition": { - "createdBy": "integration_app", - "name": "integration_task_4", - "description": "integration_task_4", - "retryCount": 1, - "timeoutSeconds": 120, - "inputKeys": [], - "outputKeys": [], - "timeoutPolicy": "TIME_OUT_WF", - "retryLogic": "FIXED", - "retryDelaySeconds": 60, - "responseTimeoutSeconds": 3600, - "inputTemplate": {}, - "rateLimitPerFrequency": 0, - "rateLimitFrequencyInSeconds": 1 - }, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/event_workflow_integration_test.json b/test-harness/src/test/resources/event_workflow_integration_test.json deleted file mode 100644 index d7aa466a8..000000000 --- a/test-harness/src/test/resources/event_workflow_integration_test.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "test_event_workflow", - "version": 1, - "tasks": [ - { - "name": "eventX", - "taskReferenceName": "wait0", - "inputParameters": {}, - "type": "EVENT", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": "conductor", - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/exclusive_join_integration_test.json b/test-harness/src/test/resources/exclusive_join_integration_test.json deleted file mode 100644 index 17b6671df..000000000 --- a/test-harness/src/test/resources/exclusive_join_integration_test.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "name": "ExclusiveJoinTestWorkflow", - "description": "Exclusive Join Test Workflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "task1", - "inputParameters": { - "payload": "${workflow.input.payload}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false - }, - { - "name": "decide_task", - "taskReferenceName": "decision1", - "inputParameters": { - "decision_1": "${workflow.input.decision_1}" - }, - "type": "DECISION", - "caseValueParam": "decision_1", - "decisionCases": { - "true": [ - { - "name": "integration_task_2", - "taskReferenceName": "task2", - "inputParameters": { - "payload": "${task1.output.payload}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false - }, - { - "name": "decide_task", - "taskReferenceName": "decision2", - "inputParameters": { - "decision_2": "${workflow.input.decision_2}" - }, - "type": "DECISION", - "caseValueParam": "decision_2", - "decisionCases": { - "true": [ - { - "name": "integration_task_3", - "taskReferenceName": "task3", - "inputParameters": { - "payload": "${task2.output.payload}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false - } - ] - } - } - ], - "false": [ - { - "name": "integration_task_4", - "taskReferenceName": "task4", - "inputParameters": { - "payload": "${task1.output.payload}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false - }, - { - "name": "decide_task", - "taskReferenceName": "decision3", - "inputParameters": { - "decision_3": "${workflow.input.decision_3}" - }, - "type": "DECISION", - "caseValueParam": "decision_3", - "decisionCases": { - "true": [ - { - "name": "integration_task_5", - "taskReferenceName": "task5", - "inputParameters": { - "payload": "${task4.output.payload}" - }, - "type": "SIMPLE", - "startDelay": 0, - "optional": false - } - ] - } - } - ] - } - }, - { - "name": "exclusive_join", - "taskReferenceName": "exclusiveJoin", - "type": "EXCLUSIVE_JOIN", - "joinOn": [ - "task3", - "task5" - ], - "defaultExclusiveJoinTask": [ - "task2", - "task4", - "task1" - ] - } - ], - "schemaVersion": 2, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json b/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json deleted file mode 100644 index c0ad47d05..000000000 --- a/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "failure_workflow", - "version": 1, - "tasks": [ - { - "name": "lambda", - "taskReferenceName": "lambda0", - "inputParameters": { - "input": "${workflow.input}", - "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" - }, - "type": "LAMBDA", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_integration_test.json b/test-harness/src/test/resources/fork_join_integration_test.json deleted file mode 100644 index 7e9933840..000000000 --- a/test-harness/src/test/resources/fork_join_integration_test.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "name": "FanInOutTest", - "description": "FanInOutTest", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fanouttask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "workflow.input.param1", - "p2": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "p1": "workflow.input.param1", - "p2": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "workflow.input.param1" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t3", - "t2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_4", - "taskReferenceName": "t4", - "inputParameters": { - "tp1": "workflow.input.param1" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_sub_workflow.json b/test-harness/src/test/resources/fork_join_sub_workflow.json deleted file mode 100644 index 140175163..000000000 --- a/test-harness/src/test/resources/fork_join_sub_workflow.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "integration_test_fork_join_sw", - "description": "integration_test_fork_join_sw", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fanouttask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "sub_workflow_task", - "taskReferenceName": "st1", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_workflow" - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "st1", - "t2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json b/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json deleted file mode 100644 index ffdaf97f1..000000000 --- a/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "name": "FanInOutTest_2", - "description": "FanInOutTest_2", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fanouttask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_0_RT_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "workflow.input.param1", - "p2": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_0_RT_3", - "taskReferenceName": "t3", - "inputParameters": { - "p1": "workflow.input.param1", - "p2": "workflow.input.param2" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_0_RT_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "workflow.input.param1" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t3", - "t2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_0_RT_4", - "taskReferenceName": "t4", - "inputParameters": { - "tp1": "workflow.input.param1" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json b/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json deleted file mode 100644 index 35ea60d7d..000000000 --- a/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "integration_test_fork_join_optional_sw", - "description": "integration_test_fork_join_optional_sw", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fanouttask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "st1", - "taskReferenceName": "st1", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_workflow" - }, - "joinOn": [], - "optional": true, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "st2", - "taskReferenceName": "st2", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_workflow" - }, - "joinOn": [], - "optional": true, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join", - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "st1", - "st2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/hierarchical_fork_join_swf.json b/test-harness/src/test/resources/hierarchical_fork_join_swf.json deleted file mode 100644 index dbcc75e8e..000000000 --- a/test-harness/src/test/resources/hierarchical_fork_join_swf.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "name": "hierarchical_fork_join_swf", - "description": "hierarchical_fork_join_swf", - "version": 1, - "tasks": [ - { - "name": "fork", - "taskReferenceName": "fanouttask", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "sub_workflow_task", - "taskReferenceName": "st1", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "${workflow.input.subwf}", - "version": 1 - }, - "retryCount": 0 - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "retryCount": 0 - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "joinOn": [ - "st1", - "t2" - ] - } - ], - "inputParameters": [ - "param1", - "param2", - "subwf" - ], - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/input.json b/test-harness/src/test/resources/input.json deleted file mode 100644 index e69de29bb..000000000 diff --git a/test-harness/src/test/resources/nested_fork_join_integration_test.json b/test-harness/src/test/resources/nested_fork_join_integration_test.json deleted file mode 100644 index 17f607ae6..000000000 --- a/test-harness/src/test/resources/nested_fork_join_integration_test.json +++ /dev/null @@ -1,348 +0,0 @@ -{ - "name": "FanInOutNestedTest", - "description": "FanInOutNestedTest", - "version": 1, - "tasks": [ - { - "name": "fork1", - "taskReferenceName": "fork1", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_11", - "taskReferenceName": "t11", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "fork2", - "taskReferenceName": "fork2", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_12", - "taskReferenceName": "t12", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_14", - "taskReferenceName": "t14", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_13", - "taskReferenceName": "t13", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "Decision", - "taskReferenceName": "d1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "a": [ - { - "name": "integration_task_16", - "taskReferenceName": "t16", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_19", - "taskReferenceName": "t19", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "b": [ - { - "name": "integration_task_17", - "taskReferenceName": "t17", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20b", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_18", - "taskReferenceName": "t18", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20def", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join2", - "taskReferenceName": "join2", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t14", - "t20" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join1", - "taskReferenceName": "join1", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t11", - "join2" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_15", - "taskReferenceName": "t15", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/nested_fork_join_swf.json b/test-harness/src/test/resources/nested_fork_join_swf.json deleted file mode 100644 index f06e0dac6..000000000 --- a/test-harness/src/test/resources/nested_fork_join_swf.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "name": "nested_fork_join_swf", - "description": "nested_fork_join_swf", - "version": 1, - "tasks": [ - { - "name": "outer_fork", - "taskReferenceName": "outer_fork", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "inner_fork", - "taskReferenceName": "inner_fork", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "sub_workflow_task", - "taskReferenceName": "st1", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "${workflow.input.subwf}", - "version": 1 - }, - "retryCount": 0 - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "retryCount": 0 - } - ] - ] - }, - { - "name": "inner_join", - "taskReferenceName": "inner_join", - "type": "JOIN", - "joinOn": [ - "st1", - "t2" - ] - } - ], - [ - { - "name": "integration_task_2", - "taskReferenceName": "t3", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "retryCount": 0 - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "outer_join", - "inputParameters": {}, - "type": "JOIN", - "joinOn": [ - "inner_join", - "t3" - ] - } - ], - "inputParameters": [ - "param1", - "param2", - "subwf" - ], - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json b/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json deleted file mode 100644 index 6c9cbbc7b..000000000 --- a/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json +++ /dev/null @@ -1,369 +0,0 @@ -{ - "name": "FanInOutNestedSubWorkflowTest", - "description": "FanInOutNestedSubWorkflowTest", - "version": 1, - "tasks": [ - { - "name": "fork1", - "taskReferenceName": "fork1", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_11", - "taskReferenceName": "t11", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "fork2", - "taskReferenceName": "fork2", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "integration_task_12", - "taskReferenceName": "t12", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_14", - "taskReferenceName": "t14", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_13", - "taskReferenceName": "t13", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "Decision", - "taskReferenceName": "d1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "a": [ - { - "name": "integration_task_16", - "taskReferenceName": "t16", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_19", - "taskReferenceName": "t19", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "b": [ - { - "name": "integration_task_17", - "taskReferenceName": "t17", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20b", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_18", - "taskReferenceName": "t18", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20def", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join2", - "taskReferenceName": "join2", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t14", - "t20" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "sw1", - "taskReferenceName": "sw1", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "integration_test_wf" - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "join1", - "taskReferenceName": "join1", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t11", - "join2", - "sw1" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_15", - "taskReferenceName": "t15", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "case": "${workflow.input.case}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/output.json b/test-harness/src/test/resources/output.json deleted file mode 100644 index c0921ccaa..000000000 --- a/test-harness/src/test/resources/output.json +++ /dev/null @@ -1,424 +0,0 @@ -{ - "imageType": "TEST_SAMPLE", - "case": "two", - "op": { - "TEST_SAMPLE": [ - { - "sourceId": "1413900_10830", - "url": "file/location/a0bdc4d0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_50241", - "url": "file/location/cd4e00a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-55ee8663-85c2-42d3-aca2-4076707e6d4e", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-14056154-1544-4350-81db-b3751fe44777", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-0b0ae5ea-d5c5-410c-adc9-bf16d2909c2e", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-08869779-614d-417c-bfea-36a3f8f199da", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-e117db45-1c48-45d0-b751-89386eb2d81d", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0221421-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/4a009209-002f-4b58-8b96-cb2198f8ba3c" - }, - { - "sourceId": "f0252161-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/55b56298-5e7a-4949-b919-88c5c9557e8e" - }, - { - "sourceId": "f038d070-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/3c4804f4-e826-436f-90c9-52b8d9266d52" - }, - { - "sourceId": "f04e0621-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/689283a1-1816-48ef-83da-7f9ac874bf45" - }, - { - "sourceId": "f04ddf10-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/586666ae-7321-445a-80b6-323c8c241ecd" - }, - { - "sourceId": "f05950c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/31795cc4-2590-4b20-a617-deaa18301f99" - }, - { - "sourceId": "1413900_46819", - "url": "file/location/c74497a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_11177", - "url": "file/location/a231c730-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48713", - "url": "file/location/ca638ae0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48525", - "url": "file/location/ca0c9140-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_73303", - "url": "file/location/d5943a40-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_55202", - "url": "file/location/d1a4d7a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-61413adf-3c10-4484-b25d-e238df898f45", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-addca397-f050-4339-ae86-9ba8c4e1b0d5", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e4de9810-0f69-4593-8926-01ed82cbebcb", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e16e2074-7af6-4700-ab05-ca41ba9c9ab4", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-341c86f8-57a5-40e1-8842-3eb41dd9f528", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-88c2ea9b-cef7-4120-8043-b92713d8fade", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3f6a731f-3c92-4677-9923-f80b8a6be632", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-1508b871-64de-47ce-8b07-76c5cb3f3e1e", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "generated-1406dce8-7b9c-4956-a7e8-78721c476ce9", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "f0206671-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35ebee36-3072-44c5-abb5-702a5a3b1a91" - }, - { - "sourceId": "f01f5501-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d3a9133d-c681-4910-a769-8195526ae634" - }, - { - "sourceId": "f022b060-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8fc1413d-170e-4644-a554-5e0c596b225c" - }, - { - "sourceId": "f02fa8b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35bed0a2-7def-457b-bded-4f4d7d94f76e" - }, - { - "sourceId": "f031f2a0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a5a2ea1f-8d13-429c-a44d-3057d21f608a" - }, - { - "sourceId": "f0424650-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/1c599ffc-4f10-4c0b-8d9a-ae41c7256113" - }, - { - "sourceId": "f04ec970-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8404a421-e1a6-41cf-af63-a35ccb474457" - }, - { - "sourceId": "1413900_47197", - "url": "file/location/c81b6fa0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-2a63c0c8-62ea-44a4-a33b-f0b3047e8b00", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-b27face7-3589-4209-944a-5153b20c5996", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-144675b3-9321-48d2-8b5b-e19a40d30ef2", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-8cbe821e-b1fb-48ce-beb5-735319af4db6", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-ecc4ea47-9bad-4b91-97c7-35f4ea6fb479", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-c1eb9ed0-8560-4e09-a748-f926edb7cdc2", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-6bed81fd-c777-4c61-8da1-0bb7f7cf0082", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-852e5510-dd5d-4900-a614-854148fcc716", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-f4dedcb7-37c9-4ba9-ab37-64ec9be7c882", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0259691-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/721bc0de-e75f-4386-8b2e-ca84eb653596" - }, - { - "sourceId": "f02b3be1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d2043b17-8ce5-42ee-a5e4-81c68f0c4838" - }, - { - "sourceId": "f02b62f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/63931561-3b5b-4ffe-af47-da2c9de94684" - }, - { - "sourceId": "f0315660-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d99ed629-2885-4e4a-8a1b-22e487b875fa" - }, - { - "sourceId": "f0306c00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6f8e673a-7003-44aa-96b9-e2ed8a4654ff" - }, - { - "sourceId": "f033c760-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/627c00f9-14b3-4057-b6e2-0f962ad0308e" - }, - { - "sourceId": "f03526f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fafabaf9-fe58-4a9a-b555-026521aeb2fe" - }, - { - "sourceId": "f03acc41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6c9fed2c-558a-4db3-8360-659b5e8c46e4" - }, - { - "sourceId": "f0463df1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e9fb83d2-5f14-4442-92b5-67e613f2e35f" - }, - { - "sourceId": "f04fb3d0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e7a0f82f-be8d-4ada-a4b1-13e8165e08be" - }, - { - "sourceId": "f05272f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9aba488a-22b3-4932-85a7-52c461203541" - }, - { - "sourceId": "f0581841-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/457415f6-6d0c-4304-8533-0d5b43fac564" - }, - { - "sourceId": "generated-8fefb48c-6fde-4fd6-8f33-a1f3f3b62105", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-30c61aa5-f5bd-4077-8c32-336b87acbe96", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-d5da37db-d486-46d4-8f7d-1e0710a77eb5", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-77af26fe-9e22-48af-99e3-f63f10fbe6de", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-2e807016-3d11-4b60-bec7-c380a608b67d", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-615d02e9-62c2-43ab-9df7-753b6b8e2c22", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3e1600fd-a626-4ee6-972b-5f0187e96c38", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-1dcb208c-6a58-4334-a60c-6fb54c8a2af5", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f024ac30-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0af2107b-4231-4d23-bef3-4e417ac6c5d3" - }, - { - "sourceId": "f0282ea1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0f592681-fd23-4194-ae43-42f61c664485" - }, - { - "sourceId": "f02c4d50-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ec46b9a3-99af-410a-af7d-726f8854909f" - }, - { - "sourceId": "f02b8a00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aed7e5da-b524-4d41-b264-28ce615ec826" - }, - { - "sourceId": "f02b14d1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/b88c9055-ab0d-4d27-a405-265ba2a15f0c" - }, - { - "sourceId": "f03044f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb8c4df9-d59e-4ac3-880e-4ea94cd880a4" - }, - { - "sourceId": "f034ffe1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/59f3fbe8-b300-4861-9b2f-dac7b15aea7d" - }, - { - "sourceId": "f03c2bd0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/19a06d54-41ed-419d-9947-f10cd5f0d85c" - }, - { - "sourceId": "f03fae41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a9a48a62-7d62-4f67-b281-cc6fdc1e722c" - }, - { - "sourceId": "f0455390-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0aeffc0a-a5ad-46ff-abab-1b3bc6a5840a" - }, - { - "sourceId": "f04b1ff1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9a08aaed-c125-48f7-9d1d-fd11266c2b12" - }, - { - "sourceId": "f04cf4b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/17a6e0f9-aa64-411f-9af7-837c84f7443f" - }, - { - "sourceId": "f0511360-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb633c73-cb33-4806-bc08-049024644856" - }, - { - "sourceId": "f0538460-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a7012248-6769-42da-a6c8-d4b831f6efce" - }, - { - "sourceId": "f058db91-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/bcf71522-6168-48c4-86c9-995bca60ae51" - }, - { - "sourceId": "generated-adf005c4-95c1-4904-9968-09cc19a26bfe", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-c4d367a4-4cdc-412e-af79-09b227f2e3ba", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-48dba018-f884-49db-b87e-67274e244c8f", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "generated-26700b83-4892-420e-8b46-1ee21eba75fb", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-632f3198-c0dc-4348-974f-51684d4e443e", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-86e2dd1d-1aa4-4dbe-b37b-b488f5dd1c70", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f04134e0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ff8f59bf-7757-4d51-a7e4-619f3e8ffaf2" - }, - { - "sourceId": "f04f65b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d66467d1-3ac6-4041-8d15-e722ee07231f" - }, - { - "sourceId": "1413900_15255", - "url": "file/location/a9e20260-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-e953493b-cbe3-4319-885e-00c82089c76c", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-65c54676-3adb-4ef0-b65e-8e2a49533cbf", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "f02ac6b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/21568877-07a5-411f-9715-5e92806c4448" - }, - { - "sourceId": "f02fcfc1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/f3b1f1a2-48d3-475d-a607-2e5a1fe532e7" - }, - { - "sourceId": "f03526f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/84a40c66-d925-4a4a-ba62-8491d26e29e9" - }, - { - "sourceId": "f03e75c1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e84c00e8-a148-46cf-9a0b-431c4c2aeb08" - }, - { - "sourceId": "f0429471-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/178de9fa-7cc8-457a-8fb6-5c080e6163ea" - }, - { - "sourceId": "f047eba0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/18d153aa-e13b-4264-ae03-f3da75eb425b" - }, - { - "sourceId": "f04fdae0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/7c843e53-8d87-47cf-bca5-1a02e7f5e33f" - }, - { - "sourceId": "f0553210-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/26bacd65-9082-4d83-9506-90e5f1ccd16a" - }, - { - "sourceId": "1413900_84904", - "url": "file/location/d8f7b090-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-84adc784-8d7d-4088-ba51-16fde57fbc21", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-9e49c58b-0b33-4daf-a39a-8fc91e302328", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "f02dd3f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8937b328-8f0d-4762-8d1f-7d7bc80c3d2e" - }, - { - "sourceId": "f03240c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aab6e386-4d59-4b40-b257-9aed12a45446" - } - ] - } -} \ No newline at end of file diff --git a/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json b/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json deleted file mode 100644 index 5a04ca0c1..000000000 --- a/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "test_rate_limit_simple_task_workflow", - "version": 1, - "tasks": [ - { - "name": "test_simple_task_with_rateLimits", - "taskReferenceName": "test_simple_task_with_rateLimits", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json b/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json deleted file mode 100644 index 29690b653..000000000 --- a/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "test_rate_limit_system_task_workflow", - "version": 1, - "tasks": [ - { - "name": "test_task_with_rateLimits", - "taskReferenceName": "test_task_with_rateLimits", - "inputParameters": {}, - "type": "USER_TASK", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_decision_task_integration_test.json b/test-harness/src/test/resources/simple_decision_task_integration_test.json deleted file mode 100644 index 3e69ada86..000000000 --- a/test-harness/src/test/resources/simple_decision_task_integration_test.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "name": "DecisionWorkflow", - "description": "DecisionWorkflow", - "version": 1, - "tasks": [ - { - "name": "decisionTask", - "taskReferenceName": "decisionTask", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "c": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_5", - "taskReferenceName": "t5", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json b/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json deleted file mode 100644 index dc394775d..000000000 --- a/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "test_json_jq_transform_wf", - "version": 1, - "tasks": [ - { - "name": "jq", - "taskReferenceName": "jq_1", - "inputParameters": { - "input": "${workflow.input}", - "queryExpression": ".input as $_ | { out: ($_.in1.array + $_.in2.array) }" - }, - "type": "JSON_JQ_TRANSFORM", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json b/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json deleted file mode 100644 index 1496e56b7..000000000 --- a/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "test_lambda_wf", - "version": 1, - "tasks": [ - { - "name": "lambda", - "taskReferenceName": "lambda0", - "inputParameters": { - "input": "${workflow.input}", - "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }" - }, - "type": "LAMBDA", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json b/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json deleted file mode 100644 index 1cfcb8deb..000000000 --- a/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "sub_workflow", - "description": "sub_workflow", - "version": 1, - "tasks": [ - { - "name": "simple_task_in_sub_wf", - "taskReferenceName": "t1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json b/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json deleted file mode 100644 index 69e66ac1d..000000000 --- a/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "test_set_variable_wf", - "version": 1, - "tasks": [ - { - "name": "set_variable", - "taskReferenceName": "set_variable_1", - "inputParameters": { - "var": "${workflow.input.var}" - }, - "type": "SET_VARIABLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": { - "variables": "${workflow.variables}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_switch_task_integration_test.json b/test-harness/src/test/resources/simple_switch_task_integration_test.json deleted file mode 100644 index 38ad29ed6..000000000 --- a/test-harness/src/test/resources/simple_switch_task_integration_test.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "name": "SwitchWorkflow", - "description": "SwitchWorkflow", - "version": 1, - "tasks": [ - { - "name": "switchTask", - "taskReferenceName": "switchTask", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "c": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_5", - "taskReferenceName": "t5", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json b/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json deleted file mode 100644 index f6968a9a6..000000000 --- a/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "test_wait_timeout", - "version": 1, - "tasks": [ - { - "name": "waitTimeout", - "taskReferenceName": "wait0", - "inputParameters": {}, - "type": "WAIT", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_1_input_template_integration_test.json b/test-harness/src/test/resources/simple_workflow_1_input_template_integration_test.json deleted file mode 100644 index 28a7ab19c..000000000 --- a/test-harness/src/test/resources/simple_workflow_1_input_template_integration_test.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "integration_test_template_wf", - "description": "Test a simple workflow with an input template", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "p3": "${CPEWF_TASK_ID}", - "someNullKey": null - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2", - "param3", - "param4" - ], - "inputTemplate": { - "param1": { - "nested_object": { - "nested_key": "nested_value" - } - }, - "param2": ["list", "of", "strings"], - "param3": "string" - }, - "outputParameters": { - "output": "${t1.output.op}", - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "param3": "${workflow.input.param3}" - }, - "failureWorkflow": "$workflow.input.failureWfName", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/simple_workflow_1_integration_test.json b/test-harness/src/test/resources/simple_workflow_1_integration_test.json deleted file mode 100644 index 202cb1305..000000000 --- a/test-harness/src/test/resources/simple_workflow_1_integration_test.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "integration_test_wf", - "description": "integration_test_wf", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "p3": "${CPEWF_TASK_ID}", - "someNullKey": null - }, - "type": "SIMPLE" - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}", - "tp3": "${CPEWF_TASK_ID}" - }, - "type": "SIMPLE" - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o1": "${workflow.input.param1}", - "o2": "${t2.output.uuid}", - "o3": "${t1.output.op}" - }, - "failureWorkflow": "$workflow.input.failureWfName", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/simple_workflow_3_integration_test.json b/test-harness/src/test/resources/simple_workflow_3_integration_test.json deleted file mode 100644 index 4d5e687c5..000000000 --- a/test-harness/src/test/resources/simple_workflow_3_integration_test.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "name": "integration_test_wf3", - "description": "integration_test_wf3", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "someNullKey": null - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json deleted file mode 100644 index d085bd33b..000000000 --- a/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "name": "async_complete_integration_test_wf", - "description": "async_complete_integration_test_wf", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "p3": "${CPEWF_TASK_ID}", - "someNullKey": null - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "user_task", - "taskReferenceName": "user_task", - "inputParameters": { - "input": "${t1.output.op}" - }, - "type": "USER_TASK", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": true, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o1": "${workflow.input.param1}", - "o2": "${user_task.output.uuid}", - "o3": "${t1.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json deleted file mode 100644 index de280d601..000000000 --- a/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "optional_task_wf", - "description": "optional_task_wf", - "version": 1, - "tasks": [ - { - "name": "task_optional", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": true, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o1": "${workflow.input.param1}", - "o2": "${t2.output.uuid}", - "o3": "${t1.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json deleted file mode 100644 index 812d9b5b8..000000000 --- a/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "name": "RTOWF", - "description": "RTOWF", - "version": 1, - "tasks": [ - { - "name": "task_rt", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o1": "${workflow.input.param1}", - "o2": "${t2.output.uuid}", - "o3": "${t1.output.op}" - }, - "failureWorkflow": "$workflow.input.failureWfName", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json deleted file mode 100644 index de3d6dd67..000000000 --- a/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "name": "WorkflowWithInlineSubWorkflow", - "description": "WorkflowWithInlineSubWorkflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "swt", - "taskReferenceName": "swt", - "inputParameters": { - "op": "${t1.output.op}", - "imageType": "${t1.output.imageType}" - }, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "one_task_workflow", - "version": 1, - "workflowDefinition": { - "name": "one_task_workflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "p1": "${workflow.input.imageType}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "imageType", - "op" - ], - "outputParameters": { - "op": "${t3.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 - } - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "op": "${t1.output.op}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o3": "${t1.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/start_workflow_input.json b/test-harness/src/test/resources/start_workflow_input.json deleted file mode 100644 index 0abd4d3b3..000000000 --- a/test-harness/src/test/resources/start_workflow_input.json +++ /dev/null @@ -1,427 +0,0 @@ -{ - "startWorkflow": { - "name": "integration_test_wf", - "input": { - "op": { - "TEST_SAMPLE": [ - { - "sourceId": "1413900_10830", - "url": "file/location/a0bdc4d0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_50241", - "url": "file/location/cd4e00a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-55ee8663-85c2-42d3-aca2-4076707e6d4e", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-14056154-1544-4350-81db-b3751fe44777", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-0b0ae5ea-d5c5-410c-adc9-bf16d2909c2e", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-08869779-614d-417c-bfea-36a3f8f199da", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-e117db45-1c48-45d0-b751-89386eb2d81d", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0221421-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/4a009209-002f-4b58-8b96-cb2198f8ba3c" - }, - { - "sourceId": "f0252161-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/55b56298-5e7a-4949-b919-88c5c9557e8e" - }, - { - "sourceId": "f038d070-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/3c4804f4-e826-436f-90c9-52b8d9266d52" - }, - { - "sourceId": "f04e0621-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/689283a1-1816-48ef-83da-7f9ac874bf45" - }, - { - "sourceId": "f04ddf10-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/586666ae-7321-445a-80b6-323c8c241ecd" - }, - { - "sourceId": "f05950c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/31795cc4-2590-4b20-a617-deaa18301f99" - }, - { - "sourceId": "1413900_46819", - "url": "file/location/c74497a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_11177", - "url": "file/location/a231c730-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48713", - "url": "file/location/ca638ae0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_48525", - "url": "file/location/ca0c9140-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_73303", - "url": "file/location/d5943a40-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "1413900_55202", - "url": "file/location/d1a4d7a0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-61413adf-3c10-4484-b25d-e238df898f45", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-addca397-f050-4339-ae86-9ba8c4e1b0d5", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e4de9810-0f69-4593-8926-01ed82cbebcb", - "url": "file/sample/location/838a0ddb-a315-453a-8b8a-fa795f9d7691" - }, - { - "sourceId": "generated-e16e2074-7af6-4700-ab05-ca41ba9c9ab4", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-341c86f8-57a5-40e1-8842-3eb41dd9f528", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-88c2ea9b-cef7-4120-8043-b92713d8fade", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3f6a731f-3c92-4677-9923-f80b8a6be632", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-1508b871-64de-47ce-8b07-76c5cb3f3e1e", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "generated-1406dce8-7b9c-4956-a7e8-78721c476ce9", - "url": "file/sample/location/a2e4195f-3900-45b4-9335-45f85fca6467" - }, - { - "sourceId": "f0206671-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35ebee36-3072-44c5-abb5-702a5a3b1a91" - }, - { - "sourceId": "f01f5501-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d3a9133d-c681-4910-a769-8195526ae634" - }, - { - "sourceId": "f022b060-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8fc1413d-170e-4644-a554-5e0c596b225c" - }, - { - "sourceId": "f02fa8b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/35bed0a2-7def-457b-bded-4f4d7d94f76e" - }, - { - "sourceId": "f031f2a0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a5a2ea1f-8d13-429c-a44d-3057d21f608a" - }, - { - "sourceId": "f0424650-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/1c599ffc-4f10-4c0b-8d9a-ae41c7256113" - }, - { - "sourceId": "f04ec970-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8404a421-e1a6-41cf-af63-a35ccb474457" - }, - { - "sourceId": "1413900_47197", - "url": "file/location/c81b6fa0-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-2a63c0c8-62ea-44a4-a33b-f0b3047e8b00", - "url": "file/sample/location/e008d018-63d7-44b2-b07e-c7435430ac71" - }, - { - "sourceId": "generated-b27face7-3589-4209-944a-5153b20c5996", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-144675b3-9321-48d2-8b5b-e19a40d30ef2", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-8cbe821e-b1fb-48ce-beb5-735319af4db6", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-ecc4ea47-9bad-4b91-97c7-35f4ea6fb479", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-c1eb9ed0-8560-4e09-a748-f926edb7cdc2", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-6bed81fd-c777-4c61-8da1-0bb7f7cf0082", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-852e5510-dd5d-4900-a614-854148fcc716", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-f4dedcb7-37c9-4ba9-ab37-64ec9be7c882", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f0259691-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/721bc0de-e75f-4386-8b2e-ca84eb653596" - }, - { - "sourceId": "f02b3be1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d2043b17-8ce5-42ee-a5e4-81c68f0c4838" - }, - { - "sourceId": "f02b62f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/63931561-3b5b-4ffe-af47-da2c9de94684" - }, - { - "sourceId": "f0315660-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d99ed629-2885-4e4a-8a1b-22e487b875fa" - }, - { - "sourceId": "f0306c00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6f8e673a-7003-44aa-96b9-e2ed8a4654ff" - }, - { - "sourceId": "f033c760-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/627c00f9-14b3-4057-b6e2-0f962ad0308e" - }, - { - "sourceId": "f03526f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fafabaf9-fe58-4a9a-b555-026521aeb2fe" - }, - { - "sourceId": "f03acc41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/6c9fed2c-558a-4db3-8360-659b5e8c46e4" - }, - { - "sourceId": "f0463df1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e9fb83d2-5f14-4442-92b5-67e613f2e35f" - }, - { - "sourceId": "f04fb3d0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e7a0f82f-be8d-4ada-a4b1-13e8165e08be" - }, - { - "sourceId": "f05272f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9aba488a-22b3-4932-85a7-52c461203541" - }, - { - "sourceId": "f0581841-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/457415f6-6d0c-4304-8533-0d5b43fac564" - }, - { - "sourceId": "generated-8fefb48c-6fde-4fd6-8f33-a1f3f3b62105", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-30c61aa5-f5bd-4077-8c32-336b87acbe96", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-d5da37db-d486-46d4-8f7d-1e0710a77eb5", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-77af26fe-9e22-48af-99e3-f63f10fbe6de", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-2e807016-3d11-4b60-bec7-c380a608b67d", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-615d02e9-62c2-43ab-9df7-753b6b8e2c22", - "url": "file/sample/location/519f6c80-96ef-440f-9d37-ccf36c7d1e5d" - }, - { - "sourceId": "generated-3e1600fd-a626-4ee6-972b-5f0187e96c38", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-1dcb208c-6a58-4334-a60c-6fb54c8a2af5", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f024ac30-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0af2107b-4231-4d23-bef3-4e417ac6c5d3" - }, - { - "sourceId": "f0282ea1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0f592681-fd23-4194-ae43-42f61c664485" - }, - { - "sourceId": "f02c4d50-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ec46b9a3-99af-410a-af7d-726f8854909f" - }, - { - "sourceId": "f02b8a00-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aed7e5da-b524-4d41-b264-28ce615ec826" - }, - { - "sourceId": "f02b14d1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/b88c9055-ab0d-4d27-a405-265ba2a15f0c" - }, - { - "sourceId": "f03044f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb8c4df9-d59e-4ac3-880e-4ea94cd880a4" - }, - { - "sourceId": "f034ffe1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/59f3fbe8-b300-4861-9b2f-dac7b15aea7d" - }, - { - "sourceId": "f03c2bd0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/19a06d54-41ed-419d-9947-f10cd5f0d85c" - }, - { - "sourceId": "f03fae41-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a9a48a62-7d62-4f67-b281-cc6fdc1e722c" - }, - { - "sourceId": "f0455390-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/0aeffc0a-a5ad-46ff-abab-1b3bc6a5840a" - }, - { - "sourceId": "f04b1ff1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/9a08aaed-c125-48f7-9d1d-fd11266c2b12" - }, - { - "sourceId": "f04cf4b1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/17a6e0f9-aa64-411f-9af7-837c84f7443f" - }, - { - "sourceId": "f0511360-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/fb633c73-cb33-4806-bc08-049024644856" - }, - { - "sourceId": "f0538460-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/a7012248-6769-42da-a6c8-d4b831f6efce" - }, - { - "sourceId": "f058db91-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/bcf71522-6168-48c4-86c9-995bca60ae51" - }, - { - "sourceId": "generated-adf005c4-95c1-4904-9968-09cc19a26bfe", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-c4d367a4-4cdc-412e-af79-09b227f2e3ba", - "url": "file/sample/location/3d927190-1c4d-4af2-91cf-2968d3ccfe70" - }, - { - "sourceId": "generated-48dba018-f884-49db-b87e-67274e244c8f", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "generated-26700b83-4892-420e-8b46-1ee21eba75fb", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "generated-632f3198-c0dc-4348-974f-51684d4e443e", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "generated-86e2dd1d-1aa4-4dbe-b37b-b488f5dd1c70", - "url": "file/sample/location/e87da4d1-72da-47a3-801d-43e01c050c89" - }, - { - "sourceId": "f04134e0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/ff8f59bf-7757-4d51-a7e4-619f3e8ffaf2" - }, - { - "sourceId": "f04f65b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/d66467d1-3ac6-4041-8d15-e722ee07231f" - }, - { - "sourceId": "1413900_15255", - "url": "file/location/a9e20260-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-e953493b-cbe3-4319-885e-00c82089c76c", - "url": "file/sample/location/ec16facd-86e3-4c3f-8dfb-7a2ad3a4e18c" - }, - { - "sourceId": "generated-65c54676-3adb-4ef0-b65e-8e2a49533cbf", - "url": "file/sample/location/07ec28a1-189e-4f2a-9dd5-f3ca68ce977d" - }, - { - "sourceId": "f02ac6b0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/21568877-07a5-411f-9715-5e92806c4448" - }, - { - "sourceId": "f02fcfc1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/f3b1f1a2-48d3-475d-a607-2e5a1fe532e7" - }, - { - "sourceId": "f03526f0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/84a40c66-d925-4a4a-ba62-8491d26e29e9" - }, - { - "sourceId": "f03e75c1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/e84c00e8-a148-46cf-9a0b-431c4c2aeb08" - }, - { - "sourceId": "f0429471-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/178de9fa-7cc8-457a-8fb6-5c080e6163ea" - }, - { - "sourceId": "f047eba0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/18d153aa-e13b-4264-ae03-f3da75eb425b" - }, - { - "sourceId": "f04fdae0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/7c843e53-8d87-47cf-bca5-1a02e7f5e33f" - }, - { - "sourceId": "f0553210-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/26bacd65-9082-4d83-9506-90e5f1ccd16a" - }, - { - "sourceId": "1413900_84904", - "url": "file/location/d8f7b090-5315-11e8-bf88-0efd527701fc" - }, - { - "sourceId": "generated-84adc784-8d7d-4088-ba51-16fde57fbc21", - "url": "file/sample/location/3881aea9-a731-4e22-9ead-2d6eccc51140" - }, - { - "sourceId": "generated-9e49c58b-0b33-4daf-a39a-8fc91e302328", - "url": "file/sample/location/4bce4154-fb4b-4f0a-887d-a0cd12d4d214" - }, - { - "sourceId": "f02dd3f1-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/8937b328-8f0d-4762-8d1f-7d7bc80c3d2e" - }, - { - "sourceId": "f03240c0-86e8-11e8-af77-0a2ba4eae3ec", - "url": "file/test/location/aab6e386-4d59-4b40-b257-9aed12a45446" - } - ] - } - } - } -} diff --git a/test-harness/src/test/resources/switch_and_fork_join_integration_test.json b/test-harness/src/test/resources/switch_and_fork_join_integration_test.json deleted file mode 100644 index e152a87ff..000000000 --- a/test-harness/src/test/resources/switch_and_fork_join_integration_test.json +++ /dev/null @@ -1,166 +0,0 @@ -{ - "name": "ForkConditionalTest", - "description": "ForkConditionalTest", - "version": 1, - "tasks": [ - { - "name": "forkTask", - "taskReferenceName": "forkTask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [ - { - "name": "switchTask", - "taskReferenceName": "switchTask", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "c": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [ - { - "name": "integration_task_5", - "taskReferenceName": "t5", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_20", - "taskReferenceName": "t20", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - [ - { - "name": "integration_task_10", - "taskReferenceName": "t10", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "joinTask", - "taskReferenceName": "joinTask", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [ - "t20", - "t10" - ], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/switch_and_terminate_integration_test.json b/test-harness/src/test/resources/switch_and_terminate_integration_test.json deleted file mode 100644 index fdaf12e64..000000000 --- a/test-harness/src/test/resources/switch_and_terminate_integration_test.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "name": "ConditionalTerminateWorkflow", - "description": "ConditionalTerminateWorkflow", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "switch", - "taskReferenceName": "switch", - "inputParameters": { - "case": "${workflow.input.case}" - }, - "type": "SWITCH", - "evaluatorType": "value-param", - "expression": "case", - "decisionCases": { - "one": [ - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp21": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "two": [ - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "FAILED", - "workflowOutput": "${t1.output.op}" - }, - "type": "TERMINATE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp31": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "outputParameters": { - "o2": "${t3.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json b/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json deleted file mode 100644 index 9a59b9141..000000000 --- a/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "test_terminate_task_wf", - "version": 1, - "tasks": [ - { - "name": "lambda", - "taskReferenceName": "lambda0", - "inputParameters": { - "input": "${workflow.input}", - "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" - }, - "type": "LAMBDA", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "COMPLETED", - "workflowOutput": "${lambda0.output}" - }, - "type": "TERMINATE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": { - "o1": "${lambda0.output}", - "o2": "${t2.output}" - }, - "failureWorkflow": "failure_workflow", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json b/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json deleted file mode 100644 index fc5813f36..000000000 --- a/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "test_terminate_task_failed_wf", - "version": 1, - "tasks": [ - { - "name": "lambda", - "taskReferenceName": "lambda0", - "inputParameters": { - "input": "${workflow.input}", - "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" - }, - "type": "LAMBDA", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "FAILED", - "terminationReason": "Early exit in terminate", - "workflowOutput": "${lambda0.output}" - }, - "type": "TERMINATE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "failureWorkflow": "failure_workflow", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_parent_workflow.json b/test-harness/src/test/resources/terminate_task_parent_workflow.json deleted file mode 100644 index f38790e66..000000000 --- a/test-harness/src/test/resources/terminate_task_parent_workflow.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "test_terminate_task_parent_wf", - "version": 1, - "tasks": [ - { - "name": "test_forkjoin", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "test_lambda_task1", - "taskReferenceName": "lambdaTask1", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - }, - { - "name": "test_terminate_subworkflow", - "taskReferenceName": "test_terminate_subworkflow", - "inputParameters": { - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "test_terminate_task_sub_wf" - } - } - ], - [ - { - "name": "test_lambda_task2", - "taskReferenceName": "lambdaTask2", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - }, - { - "name": "test_wait_task", - "taskReferenceName": "basicJavaA", - "type": "WAIT" - }, - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "COMPLETED", - "workflowOutput": "some output" - }, - "type": "TERMINATE", - "startDelay": 0, - "optional": false - }, - { - "name": "test_second_wait_task", - "taskReferenceName": "basicJavaB", - "type": "WAIT" - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "thejoin", - "type": "JOIN", - "joinOn": [ - "basicJavaA", - "basicJavaB" - ] - } - ], - "schemaVersion": 2, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_sub_workflow.json b/test-harness/src/test/resources/terminate_task_sub_workflow.json deleted file mode 100644 index b4196a170..000000000 --- a/test-harness/src/test/resources/terminate_task_sub_workflow.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "test_terminate_task_sub_wf", - "version": 1, - "tasks": [ - { - "name": "integration_task_3", - "taskReferenceName": "t3", - "type": "SIMPLE" - } - ], - "schemaVersion": 2, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/test_task_failed_parent_workflow.json b/test-harness/src/test/resources/test_task_failed_parent_workflow.json deleted file mode 100644 index c1c32369e..000000000 --- a/test-harness/src/test/resources/test_task_failed_parent_workflow.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "test_task_failed_parent_wf", - "version": 1, - "tasks": [ - { - "name": "test_lambda_task1", - "taskReferenceName": "lambdaTask1", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - }, - { - "name": "test_task_failed_sub_wf", - "taskReferenceName": "test_task_failed_sub_wf", - "inputParameters": { - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "test_task_failed_sub_wf" - } - }, - { - "name": "test_lambda_task2", - "taskReferenceName": "lambdaTask2", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - } - ], - "schemaVersion": 2, - "ownerEmail": "test@harness.com", - "failureWorkflow": "failure_workflow" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/test_task_failed_sub_workflow.json b/test-harness/src/test/resources/test_task_failed_sub_workflow.json deleted file mode 100644 index 5f1e76e9e..000000000 --- a/test-harness/src/test/resources/test_task_failed_sub_workflow.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "name": "test_task_failed_sub_wf", - "version": 1, - "tasks": [ - { - "name": "lambda", - "taskReferenceName": "lambda0", - "inputParameters": { - "input": "${workflow.input}", - "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" - }, - "type": "LAMBDA", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "FAILED", - "workflowOutput": "${lambda0.output}" - }, - "type": "TERMINATE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/wait_workflow_integration_test.json b/test-harness/src/test/resources/wait_workflow_integration_test.json deleted file mode 100644 index 8cc567e88..000000000 --- a/test-harness/src/test/resources/wait_workflow_integration_test.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "test_wait_workflow", - "version": 1, - "tasks": [ - { - "name": "wait", - "taskReferenceName": "wait0", - "inputParameters": {}, - "type": "WAIT", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": {}, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - } - ], - "inputParameters": [], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} \ No newline at end of file diff --git a/test-harness/src/test/resources/workflow_that_starts_another_workflow.json b/test-harness/src/test/resources/workflow_that_starts_another_workflow.json deleted file mode 100644 index ba3cf9c99..000000000 --- a/test-harness/src/test/resources/workflow_that_starts_another_workflow.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "workflow_that_starts_another_workflow", - "description": "A workflow that uses START_WORKFLOW task to start another workflow", - "version": 1, - "tasks": [ - { - "name": "start_workflow", - "taskReferenceName": "st", - "inputParameters": { - "startWorkflow": "${workflow.input.startWorkflow}" - }, - "type": "START_WORKFLOW" - } - ], - "inputParameters": ["start_workflow"], - "outputParameters": {}, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json b/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json deleted file mode 100644 index 890d6115c..000000000 --- a/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "integration_test_wf_with_sub_wf", - "description": "integration_test_wf_with_sub_wf", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "p1": "${workflow.input.param1}", - "p2": "${workflow.input.param2}", - "someNullKey": null - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, - { - "name": "sub_workflow_task", - "taskReferenceName": "t2", - "inputParameters": { - "param1": "${workflow.input.param1}", - "param2": "${workflow.input.param2}", - "subwf": "${workflow.input.nextSubwf}" - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "${workflow.input.subwf}", - "version": 1 - }, - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [], - "retryCount": 0 - } - ], - "inputParameters": [ - "param1", - "param2" - ], - "failureWorkflow": "$workflow.input.failureWfName", - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "ownerEmail": "test@harness.com" -} diff --git a/test-harness/src/test/resources/workflow_with_synchronous_system_task.json b/test-harness/src/test/resources/workflow_with_synchronous_system_task.json deleted file mode 100644 index c56fda745..000000000 --- a/test-harness/src/test/resources/workflow_with_synchronous_system_task.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "workflow_with_synchronous_system_task", - "description": "A workflow with a simple task followed a synchronous task", - "version": 1, - "tasks": [ - { - "name": "integration_task_1", - "taskReferenceName": "t1", - "type": "SIMPLE" - }, - { - "name": "jsonjq", - "taskReferenceName": "jsonjq", - "inputParameters": { - "queryExpression": ".tp2.TEST_SAMPLE | length", - "tp1": "${workflow.input.param1}", - "tp2": "${t1.output.op}" - }, - "type": "JSON_JQ_TRANSFORM" - } - ], - "inputParameters": [], - "outputParameters": { - "data": "${jsonjq.output.resources}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "ownerEmail": "example@email.com", - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0, - "variables": {}, - "inputTemplate": {} -} diff --git a/ui/.env b/ui/.env deleted file mode 100644 index 9a69c2488..000000000 --- a/ui/.env +++ /dev/null @@ -1 +0,0 @@ -PORT=5000 \ No newline at end of file diff --git a/ui/.eslintrc b/ui/.eslintrc deleted file mode 100644 index 9f8f2ab75..000000000 --- a/ui/.eslintrc +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "react-app", - "rules": { - "import/no-anonymous-default-export": 0 - } -} diff --git a/ui/.gitignore b/ui/.gitignore deleted file mode 100644 index 448863b57..000000000 --- a/ui/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# production -/build - -# misc -.DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local - -npm-debug.log* -yarn-debug.log* -yarn-error.log* - diff --git a/ui/.prettierignore b/ui/.prettierignore deleted file mode 100644 index c795b054e..000000000 --- a/ui/.prettierignore +++ /dev/null @@ -1 +0,0 @@ -build \ No newline at end of file diff --git a/ui/.prettierrc.json b/ui/.prettierrc.json deleted file mode 100644 index 0967ef424..000000000 --- a/ui/.prettierrc.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/ui/README.md b/ui/README.md deleted file mode 100644 index 204b7d17d..000000000 --- a/ui/README.md +++ /dev/null @@ -1,44 +0,0 @@ -## Conductor UI - -The UI is a standard `create-react-app` React Single Page Application (SPA). To get started, with Node 14 and `yarn` installed, first run `yarn install` from within the `/ui` directory to retrieve package dependencies. - -For more information regarding CRA configuration and usage, see the official [doc site](https://create-react-app.dev/). - -> ### For upgrading users -> -> The UI is designed to operate directly with the Conductor Server API. A Node `express` backend is no longer required. - -### Development Server - -To run the UI on the bundled development server, run `yarn run start`. Navigate your browser to `http://localhost:5000`. - -#### Reverse Proxy configuration - -The default setup expects that the Conductor Server API will be available at `localhost:8080/api`. You may select an alternate port and hostname, or rewrite the API path by editing `setupProxy.js`. Note that `setupProxy.js` is used ONLY by the development server. - -### Hosting for Production - -There is no need to "build" the project unless you require compiled assets to host on a production web server. In this case, the project can be built with the command `yarn build`. The assets will be produced to `/build`. - -Your hosting environment should make the Conductor Server API available on the same domain. This avoids complexities regarding cross-origin data fetching. The default path prefix is `/api`. If a different prefix is desired, `plugins/fetch.js` can be modified to customize the API fetch behavior. - -See `docker/serverAndUI` for an `nginx` based example. - -### Customization Hooks - -For ease of maintanence, a number of touch points for customization have been removed to `/plugins`. - -- `AppBarModules.jsx` -- `AppLogo.jsx` -- `env.js` -- `fetch.js` - -### Authentication - -We recommend that authentication & authorization be de-coupled from the UI and handled at the web server/access gateway. - -#### Examples (WIP) - -- Basic Auth (username/password) with `nginx` -- Commercial IAM Vendor -- Node `express` server with `passport.js` diff --git a/ui/package.json b/ui/package.json deleted file mode 100644 index bd6902b00..000000000 --- a/ui/package.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "client", - "version": "3.7.2", - "dependencies": { - "@material-ui/core": "^4.12.3", - "@material-ui/icons": "^4.11.2", - "@material-ui/lab": "^4.0.0-alpha.60", - "@material-ui/styles": "^4.11.4", - "@monaco-editor/react": "^4.3.1", - "clsx": "^1.1.1", - "cronstrue": "^1.72.0", - "d3": "^6.2.0", - "dagre-d3": "^0.6.4", - "date-fns": "^2.16.1", - "formik": "^2.2.9", - "http-proxy-middleware": "^2.0.1", - "immutability-helper": "^3.1.1", - "json-bigint-string": "^1.0.0", - "lodash": "^4.17.20", - "moment": "^2.29.2", - "node-forge": "^1.3.0", - "parse-svg-path": "^0.1.2", - "prop-types": "^15.7.2", - "react": "^16.8.0", - "react-cron-generator": "^1.3.5", - "react-data-table-component": "^6.11.8", - "react-dom": "^16.8.0", - "react-helmet": "^6.1.0", - "react-is": "^17.0.2", - "react-query": "^3.19.4", - "react-resize-detector": "^5.2.0", - "react-router": "^5.2.0", - "react-router-dom": "^5.2.0", - "react-router-use-location-state": "^2.5.0", - "react-scripts": "^5.0.1", - "react-vis-timeline-2": "^2.1.6", - "rison": "^0.1.1", - "styled-components": "^5.3.0", - "url-parse": "^1.5.1", - "use-local-storage-state": "^10.0.0", - "xss": "^1.0.8", - "yup": "^0.32.11" - }, - "scripts": { - "start": "react-scripts start", - "build": "react-scripts build", - "test": "react-scripts test", - "eject": "react-scripts eject", - "prettier": "prettier --write ." - }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - }, - "resolutions": { - "validator": "^13.7.0", - "nth-check": "^2.0.1", - "async": "^3.2.2", - "ejs": "^3.1.7" - }, - "devDependencies": { - "@testing-library/dom": "^8.2.0", - "@testing-library/jest-dom": "^5.16.4", - "@testing-library/react": "^13.2.0", - "@testing-library/user-event": "^14.2.0", - "@wdio/cli": "7.19.7", - "@wdio/junit-reporter": "7.19.7", - "@wdio/local-runner": "7.19.7", - "@wdio/mocha-framework": "7.19.7", - "@wdio/selenium-standalone-service": "7.19.5", - "@wdio/spec-reporter": "7.19.7", - "@wdio/sync": "7.19.7", - "js-yaml": "4.1.0", - "prettier": "^2.2.1", - "sass": "^1.49.9", - "typescript": "^4.6.3", - "webdriver": "^7.19.7", - "webdriverio": "^7.19.7" - }, - "engines": { - "node": ">=14.17.0" - }, - "license": "Apache-2.0" -} diff --git a/ui/public/index.html b/ui/public/index.html deleted file mode 100644 index 2fa6155de..000000000 --- a/ui/public/index.html +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - Conductor UI - - - -

    - - - diff --git a/ui/public/logo.png b/ui/public/logo.png deleted file mode 100644 index 5c05ecbd9..000000000 Binary files a/ui/public/logo.png and /dev/null differ diff --git a/ui/public/robots.txt b/ui/public/robots.txt deleted file mode 100644 index e9e57dc4d..000000000 --- a/ui/public/robots.txt +++ /dev/null @@ -1,3 +0,0 @@ -# https://www.robotstxt.org/robotstxt.html -User-agent: * -Disallow: diff --git a/ui/src/App.jsx b/ui/src/App.jsx deleted file mode 100644 index f59643b5a..000000000 --- a/ui/src/App.jsx +++ /dev/null @@ -1,134 +0,0 @@ -import React from "react"; - -import { Route, Switch } from "react-router-dom"; -import { makeStyles } from "@material-ui/styles"; -import { Button, AppBar, Toolbar } from "@material-ui/core"; -import AppLogo from "./plugins/AppLogo"; -import NavLink from "./components/NavLink"; - -import WorkflowSearch from "./pages/executions/WorkflowSearch"; -import TaskSearch from "./pages/executions/TaskSearch"; - -import Execution from "./pages/execution/Execution"; -import WorkflowDefinitions from "./pages/definitions/Workflow"; -import WorkflowDefinition from "./pages/definition/WorkflowDefinition"; -import TaskDefinitions from "./pages/definitions/Task"; -import TaskDefinition from "./pages/definition/TaskDefinition"; -import EventHandlerDefinitions from "./pages/definitions/EventHandler"; -import EventHandlerDefinition from "./pages/definition/EventHandler"; -import TaskQueue from "./pages/misc/TaskQueue"; -import KitchenSink from "./pages/kitchensink/KitchenSink"; -import DiagramTest from "./pages/kitchensink/DiagramTest"; -import Examples from "./pages/kitchensink/Examples"; -import Gantt from "./pages/kitchensink/Gantt"; - -import CustomRoutes from "./plugins/CustomRoutes"; -import AppBarModules from "./plugins/AppBarModules"; -import CustomAppBarButtons from "./plugins/CustomAppBarButtons"; -import Workbench from "./pages/workbench/Workbench"; - -const useStyles = makeStyles((theme) => ({ - root: { - backgroundColor: "#efefef", // TODO: Use theme var - display: "flex", - }, - body: { - width: "100vw", - height: "100vh", - paddingTop: theme.overrides.MuiAppBar.root.height, - }, - toolbarRight: { - marginLeft: "auto", - display: "flex", - flexDirection: "row", - }, - toolbarRegular: { - minHeight: 80, - }, -})); - -export default function App() { - const classes = useStyles(); - - return ( - // Provide context for backward compatibility with class components -
    - - - - - - - - - -
    - -
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - ); -} diff --git a/ui/src/App.test.js b/ui/src/App.test.js deleted file mode 100644 index 352d7b8ff..000000000 --- a/ui/src/App.test.js +++ /dev/null @@ -1,9 +0,0 @@ -import React from "react"; -import { render } from "@testing-library/react"; -import App from "./App"; - -test("renders learn react link", () => { - const { getByText } = render(); - const linkElement = getByText(/learn react/i); - expect(linkElement).toBeInTheDocument(); -}); diff --git a/ui/src/components/Banner.jsx b/ui/src/components/Banner.jsx deleted file mode 100644 index e909c94d3..000000000 --- a/ui/src/components/Banner.jsx +++ /dev/null @@ -1,28 +0,0 @@ -import React from "react"; -import { Paper } from "@material-ui/core"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - root: { - padding: 15, - backgroundColor: "rgba(73, 105, 228, 0.1)", - color: "rgba(0, 0, 0, 0.9)", - borderLeft: "solid rgba(73, 105, 228, 0.1) 4px", - }, -}); - -export default function Banner({ children, ...rest }) { - const classes = useStyles(); - - return ( - - {children} - - ); -} diff --git a/ui/src/components/Button.jsx b/ui/src/components/Button.jsx deleted file mode 100644 index 3ad5b12b2..000000000 --- a/ui/src/components/Button.jsx +++ /dev/null @@ -1,10 +0,0 @@ -import { Button as MuiButton } from "@material-ui/core"; - -export default function Button({ variant = "primary", ...props }) { - if (variant === "secondary") { - return ; - } else { - // primary or invalid - return ; - } -} diff --git a/ui/src/components/ButtonGroup.jsx b/ui/src/components/ButtonGroup.jsx deleted file mode 100644 index 2dce99035..000000000 --- a/ui/src/components/ButtonGroup.jsx +++ /dev/null @@ -1,22 +0,0 @@ -import React from "react"; -import { - FormControl, - InputLabel, - ButtonGroup, - Button, -} from "@material-ui/core"; - -export default function ({ options, label, style, classes, ...props }) { - return ( - - {label && {label}} - - {options.map((option, idx) => ( - - ))} - - - ); -} diff --git a/ui/src/components/ConfirmChoiceDialog.jsx b/ui/src/components/ConfirmChoiceDialog.jsx deleted file mode 100644 index 0bc8a6f1f..000000000 --- a/ui/src/components/ConfirmChoiceDialog.jsx +++ /dev/null @@ -1,39 +0,0 @@ -import React from "react"; -import { - Dialog, - DialogActions, - DialogContent, - DialogTitle, -} from "@material-ui/core"; -import Text from "./Text"; -import Button from "./Button"; - -export default function ({ - header = "Confirmation", - message = "Please confirm", - handleConfirmationValue, - open, -}) { - return ( - handleConfirmationValue(false)} - > - {header} - - {message} - - - - - - - ); -} diff --git a/ui/src/components/CustomButtons.jsx b/ui/src/components/CustomButtons.jsx deleted file mode 100644 index 1e6237637..000000000 --- a/ui/src/components/CustomButtons.jsx +++ /dev/null @@ -1,109 +0,0 @@ -import Button from "@material-ui/core/Button"; -import { styled } from "@material-ui/core"; - -export const fontFamilyList = [ - "-apple-system", - "BlinkMacSystemFont", - '"Segoe UI"', - "Roboto", - '"Helvetica Neue"', - "Arial", - "sans-serif", - '"Apple Color Emoji"', - '"Segoe UI Emoji"', - '"Segoe UI Symbol"', -].join(","); - -const hoverCss = { - backgroundColor: "#857aff", - borderColor: "#857aff", - boxShadow: "none", - "&> .MuiButton-label": { - color: "white", - }, -}; - -const buttonBaseStyle = { - boxShadow: "none", - textTransform: "none", - fontSize: 16, - padding: "6px 12px", - border: "1px solid", - lineHeight: 1.3, - color: "#ffffff", - backgroundColor: "#6558F5", - borderColor: "#6558F5", - fontFamily: fontFamilyList, - "&:hover": hoverCss, - "&:active": hoverCss, - "&:focus": { - boxShadow: "0 0 0 0.2rem rgba(0,123,255,.5)", - }, - "&> .MuiButton-label": { - color: "#ffffff", - }, -}; - -export const BootstrapButton = styled(Button)(buttonBaseStyle); - -const outlineHoverCss = { - ...hoverCss, - "&> .MuiButton-label": { - color: "ghostwhite", - }, -}; - -const actionHoverCss = { - ...hoverCss, - backgroundColor: "#30499f", - borderColor: "#30499f", -}; - -export const BootstrapOutlineButton = styled(Button)({ - ...buttonBaseStyle, - color: "#ffffff", - backgroundColor: "ghostwhite", - borderColor: "#6558F5", - "&> .MuiButton-label": { - color: "#6558F5", - }, - "&:hover": outlineHoverCss, - "&:active": outlineHoverCss, -}); - -export const BootstrapOutlineActionButton = styled(Button)({ - ...buttonBaseStyle, - color: "#ffffff", - backgroundColor: "ghostwhite", - borderColor: "#30499f", - "&> .MuiButton-label": { - color: "#30499f", - }, - "&:hover": actionHoverCss, - "&:active": actionHoverCss, -}); - -export const BootstrapTextButton = styled(Button)({ - ...buttonBaseStyle, - color: "#ffffff", - backgroundColor: "ghostwhite", - borderColor: "transparent", - "&> .MuiButton-label": { - color: "#6558F5", - }, - "&:hover": outlineHoverCss, - "&:active": outlineHoverCss, -}); - -export const BootstrapActionButton = styled(Button)({ - ...buttonBaseStyle, - fontSize: 14, - lineHeight: 1.5, - backgroundColor: "#4969e4", - borderColor: "#4969e4", - "&> .MuiButton-label": { - color: "#ffffff", - }, - "&:hover": actionHoverCss, - "&:active": actionHoverCss, -}); diff --git a/ui/src/components/DataTable.jsx b/ui/src/components/DataTable.jsx deleted file mode 100644 index c720a50cb..000000000 --- a/ui/src/components/DataTable.jsx +++ /dev/null @@ -1,348 +0,0 @@ -import React, { useMemo, useState } from "react"; -import RawDataTable from "react-data-table-component"; -import { - Checkbox, - MenuItem, - ListItemText, - IconButton, - Menu, - Tooltip, - Popover, -} from "@material-ui/core"; -import ViewColumnIcon from "@material-ui/icons/ViewColumn"; -import SearchIcon from "@material-ui/icons/Search"; -import { Heading, Select, Input } from "./"; -import { timestampRenderer } from "../utils/helpers"; -import { useLocalStorage } from "../utils/localstorage"; - -import _ from "lodash"; -export const DEFAULT_ROWS_PER_PAGE = 15; - -export default function DataTable(props) { - const { - localStorageKey, - columns, - data, - options, - defaultShowColumns, - paginationPerPage = 15, - showFilter = true, - showColumnSelector = true, - paginationServer = false, - title, - onFilterChange, - initialFilterObj, - ...rest - } = props; - - const DEFAULT_FILTER_OBJ = { - columnName: columns.find((col) => col.searchable !== false).name, - substring: "", - }; - - // If no defaultColumns passed - use all columns - const defaultColumns = useMemo( - () => - props.defaultShowColumns || props.columns.map((col) => getColumnId(col)), - [props.defaultShowColumns, props.columns] - ); - - const [tableState, setTableState] = useLocalStorage( - localStorageKey, - defaultColumns - ); - - const [filterObj, setFilterObj] = useState( - initialFilterObj || DEFAULT_FILTER_OBJ - ); - - const handleFilterChange = (val) => { - setFilterObj(val); - if (onFilterChange) { - if (!_.isEmpty(val.substring)) { - onFilterChange(val); - } else { - onFilterChange(undefined); - } - } - }; - - // Append bodyRenderer for date fields; - const dataTableColumns = useMemo(() => { - let viewColumns = []; - if (tableState) { - for (let col of columns) { - if (tableState.includes(getColumnId(col))) { - viewColumns.push(col); - } - } - } else { - viewColumns = columns; - } - - return viewColumns.map((column) => { - let { - id, - name, - label, - type, - renderer, - wrap = true, - sortable = true, - ...rest - } = column; - - const internalOptions = {}; - if (type === "date") { - internalOptions.format = (row) => timestampRenderer(_.get(row, name)); - } else if (type === "json") { - internalOptions.format = (row) => JSON.stringify(_.get(row, name)); - } - - if (renderer) { - internalOptions.format = (row) => renderer(_.get(row, name), row); - } - - return { - id: getColumnId(column), - selector: name, - name: getColumnLabel(column), - sortable: sortable, - wrap: wrap, - type, - ...internalOptions, - ...rest, - }; - }); - }, [tableState, columns]); - - const filteredItems = useMemo(() => { - const column = dataTableColumns.find( - (col) => col.id === filterObj.columnName - ); - - if (!filterObj.substring || !filterObj.columnName) { - return data; - } else { - try { - const regexp = new RegExp(filterObj.substring, "i"); - - return data.filter((row) => { - let target; - if ( - column.type === "json" || - column.type === "date" || - column.searchable === "calculated" - ) { - target = column.format(row); - - if (!_.isString(target)) { - target = JSON.stringify(target); - } - } else { - target = _.get(row, column.selector); - } - - return _.isString(target) && regexp.test(target); - }); - } catch (e) { - // Bad or incomplete Regexp - console.log(e); - return []; - } - } - }, [data, dataTableColumns, filterObj]); - - return ( - {title}} - columns={dataTableColumns} - data={filteredItems} - pagination - paginationServer={paginationServer} - paginationPerPage={paginationPerPage} - paginationRowsPerPageOptions={[15, 30, 100]} - actions={ - <> - {!paginationServer && showFilter && ( - - )} - {showColumnSelector && ( - - )} - - } - {...rest} - /> - ); -} - -function Filter({ columns, filterObj, setFilterObj }) { - const [anchorEl, setAnchorEl] = React.useState(null); - - const handleClick = (event) => { - setAnchorEl(event.currentTarget); - }; - - const handleClose = () => { - setAnchorEl(null); - }; - - const handleValueChange = (v) => { - setFilterObj({ - columnName: filterObj.columnName, - substring: v, - }); - }; - - const handleColumnChange = (c) => { - setFilterObj({ - columnName: c, - substring: "", - }); - }; - - return ( - <> - - - - - - - - - - - ); -} - -function getColumnLabelById(columnId, columns) { - const col = columns.find((c) => c.id === columnId || c.name === columnId); - return col.label || col.name; -} - -function getColumnLabel(col) { - return col.label || col.name; -} - -function getColumnId(col) { - return col.id || col.name; -} - -function ColumnsSelector({ columns, selected, setSelected, defaultColumns }) { - const [anchorEl, setAnchorEl] = React.useState(null); - - const handleClick = (event) => { - setAnchorEl(event.currentTarget); - }; - - const handleClose = () => { - setAnchorEl(null); - }; - - const handleChange = (columnId, checked) => { - if (!checked && selected.includes(columnId)) { - setSelected(selected.filter((v) => v !== columnId)); - } else { - setSelected([...selected, columnId]); - } - }; - - const reset = () => { - setSelected(defaultColumns); - }; - return ( - <> - - - - - - - {[ - ...columns.map((column) => ( - - - handleChange(getColumnId(column), e.target.checked) - } - /> - - - )), - - Reset to default - , - ]} - - - ); -} diff --git a/ui/src/components/DateRangePicker.jsx b/ui/src/components/DateRangePicker.jsx deleted file mode 100644 index b9cbfa062..000000000 --- a/ui/src/components/DateRangePicker.jsx +++ /dev/null @@ -1,52 +0,0 @@ -import React from "react"; -import { Input } from "./"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - wrapper: { - display: "flex", - }, - input: { - marginRight: 5, - flex: "0 1 50%", - }, - quick: { - flex: "0 0 auto", - }, -}); - -export default function DateRangePicker({ - onFromChange, - from, - onToChange, - to, - label, - disabled, -}) { - const classes = useStyles(); - - return ( -
    - - -
    - ); -} diff --git a/ui/src/components/Dropdown.jsx b/ui/src/components/Dropdown.jsx deleted file mode 100644 index 887716f77..000000000 --- a/ui/src/components/Dropdown.jsx +++ /dev/null @@ -1,36 +0,0 @@ -import React from "react"; -import { Input } from "./"; -import Autocomplete from "@material-ui/lab/Autocomplete"; -import FormControl from "@material-ui/core/FormControl"; -import InputLabel from "@material-ui/core/InputLabel"; -import CloseIcon from "@material-ui/icons/Close"; - -export default function ({ - label, - className, - style, - error, - helperText, - name, - value, - ...props -}) { - return ( - - {label && {label}} - } - renderInput={(params) => ( - - )} - value={value === undefined ? null : value} // convert undefined to null - /> - - ); -} diff --git a/ui/src/components/DropdownButton.jsx b/ui/src/components/DropdownButton.jsx deleted file mode 100644 index 3b9efc3c8..000000000 --- a/ui/src/components/DropdownButton.jsx +++ /dev/null @@ -1,75 +0,0 @@ -import React from "react"; -import Button from "@material-ui/core/Button"; -import ArrowDropDownIcon from "@material-ui/icons/ArrowDropDown"; -import ClickAwayListener from "@material-ui/core/ClickAwayListener"; -import Grow from "@material-ui/core/Grow"; -import Popper from "@material-ui/core/Popper"; -import MenuItem from "@material-ui/core/MenuItem"; -import MenuList from "@material-ui/core/MenuList"; -import { Paper } from "./"; - -export default function DropdownButton({ children, options }) { - const [open, setOpen] = React.useState(false); - const anchorRef = React.useRef(null); - - const handleToggle = () => { - setOpen((prevOpen) => !prevOpen); - }; - - const handleClose = (event) => { - if (anchorRef.current && anchorRef.current.contains(event.target)) { - return; - } - - setOpen(false); - }; - - return ( - - - - - {({ TransitionProps, placement }) => ( - - - - - {options.map(({ label, handler }, index) => ( - { - handler(event, index); - setOpen(false); - }} - > - {label} - - ))} - - - - - )} - - - ); -} diff --git a/ui/src/components/Heading.jsx b/ui/src/components/Heading.jsx deleted file mode 100644 index 9ecb2158e..000000000 --- a/ui/src/components/Heading.jsx +++ /dev/null @@ -1,8 +0,0 @@ -import React from "react"; -import Typography from "@material-ui/core/Typography"; - -const levelMap = ["h6", "h5", "h4", "h3", "h2", "h1"]; - -export default function ({ level = 3, ...props }) { - return ; -} diff --git a/ui/src/components/Input.jsx b/ui/src/components/Input.jsx deleted file mode 100644 index 117611045..000000000 --- a/ui/src/components/Input.jsx +++ /dev/null @@ -1,44 +0,0 @@ -import React, { useRef } from "react"; -import { TextField, InputAdornment, IconButton } from "@material-ui/core"; -import ClearIcon from "@material-ui/icons/Clear"; - -export default function ({ label, clearable, onBlur, onChange, ...props }) { - const inputRef = useRef(); - - function handleClear() { - inputRef.current.value = ""; - if (onBlur) return onBlur(""); - if (onChange) return onChange(""); - } - - function handleBlur(e) { - if (onBlur) onBlur(e.target.value); - } - - function handleChange(e) { - if (onChange) onChange(e.target.value); - } - - return ( - - - - - - ), - }} - onBlur={handleBlur} - onChange={handleChange} - {...props} - /> - ); -} diff --git a/ui/src/components/KeyValueTable.jsx b/ui/src/components/KeyValueTable.jsx deleted file mode 100644 index 1d4b99b11..000000000 --- a/ui/src/components/KeyValueTable.jsx +++ /dev/null @@ -1,67 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/styles"; -import List from "@material-ui/core/List"; -import ListItem from "@material-ui/core/ListItem"; -import ListItemText from "@material-ui/core/ListItemText"; -import _ from "lodash"; - -import { useEnv } from "../plugins/env"; -import { timestampRenderer, durationRenderer } from "../utils/helpers"; -import { customTypeRenderers } from "../plugins/customTypeRenderers"; - -const useStyles = makeStyles((theme) => ({ - value: { - flex: 0.7, - }, - label: { - flex: 0.3, - minWidth: "100px", - }, - labelText: { - fontWeight: "bold !important", - }, -})); - -export default function KeyValueTable({ data }) { - const classes = useStyles(); - const env = useEnv(); - return ( - - {data.map((item, index) => { - let displayValue; - const renderer = item.type ? customTypeRenderers[item.type] : null; - if (renderer) { - displayValue = renderer(item.value, data, env); - } else { - switch (item.type) { - case "date": - displayValue = - !isNaN(item.value) && item.value > 0 - ? timestampRenderer(item.value) - : "N/A"; - break; - case "duration": - displayValue = - !isNaN(item.value) && item.value > 0 - ? durationRenderer(item.value) - : "N/A"; - break; - default: - displayValue = !_.isNil(item.value) ? item.value : "N/A"; - } - } - - return ( - - - - - ); - })} - - ); -} diff --git a/ui/src/components/LinearProgress.jsx b/ui/src/components/LinearProgress.jsx deleted file mode 100644 index b53d4cd68..000000000 --- a/ui/src/components/LinearProgress.jsx +++ /dev/null @@ -1,22 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/styles"; -import clsx from "clsx"; -import LinearProgress from "@material-ui/core/LinearProgress"; - -const useStyles = makeStyles({ - progress: { - marginBottom: -4, - zIndex: 999, - }, -}); - -export default function ({ className, ...props }) { - const classes = useStyles(); - - return ( - - ); -} diff --git a/ui/src/components/NavLink.jsx b/ui/src/components/NavLink.jsx deleted file mode 100644 index ae47b24f4..000000000 --- a/ui/src/components/NavLink.jsx +++ /dev/null @@ -1,49 +0,0 @@ -import React from "react"; -import { Link as RouterLink, useHistory } from "react-router-dom"; -import { Link } from "@material-ui/core"; -import LaunchIcon from "@material-ui/icons/Launch"; -import Url from "url-parse"; -import { useEnv } from "../plugins/env"; - -// 1. Strip `navigate` from props to prevent error -// 2. Preserve stack param - -export default React.forwardRef((props, ref) => { - const { navigate, path, newTab, ...rest } = props; - const { stack, defaultStack } = useEnv(); - - const url = new Url(path, {}, true); - if (stack !== defaultStack) { - url.query.stack = stack; - } - - if (!newTab) { - return ( - - {rest.children} - - ); - } else { - return ( - - {rest.children} -   - - - ); - } -}); - -export function usePushHistory() { - const history = useHistory(); - const { stack, defaultStack } = useEnv(); - - return (path) => { - const url = new Url(path, {}, true); - if (stack !== defaultStack) { - url.query.stack = stack; - } - - history.push(url.toString()); - }; -} diff --git a/ui/src/components/Paper.jsx b/ui/src/components/Paper.jsx deleted file mode 100644 index 76c0cecf3..000000000 --- a/ui/src/components/Paper.jsx +++ /dev/null @@ -1,27 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/styles"; -import clsx from "clsx"; -import Paper from "@material-ui/core/Paper"; - -const useStyles = makeStyles({ - padded: { - padding: 15, - }, -}); - -export default React.forwardRef(function ( - { elevation, className, padded, ...props }, - ref -) { - const classes = useStyles(); - const internalClassName = []; - if (padded) internalClassName.push(classes.padded); - return ( - - ); -}); diff --git a/ui/src/components/Pill.jsx b/ui/src/components/Pill.jsx deleted file mode 100644 index 757b382cd..000000000 --- a/ui/src/components/Pill.jsx +++ /dev/null @@ -1,28 +0,0 @@ -import { makeStyles } from "@material-ui/styles"; -import Chip from "@material-ui/core/Chip"; - -const COLORS = { - red: "rgb(229, 9, 20)", - yellow: "rgb(251, 164, 4)", - green: "rgb(65, 185, 87)", -}; - -const useStyles = makeStyles({ - pill: { - borderColor: (props) => COLORS[props.color], - color: (props) => COLORS[props.color], - }, -}); - -export default function Pill({ color, ...props }) { - const classes = useStyles({ color }); - - return ( - - ); -} diff --git a/ui/src/components/PrimaryButton.jsx b/ui/src/components/PrimaryButton.jsx deleted file mode 100644 index 07ae5a92f..000000000 --- a/ui/src/components/PrimaryButton.jsx +++ /dev/null @@ -1,6 +0,0 @@ -import React from "react"; -import Button from "@material-ui/core/Button"; - -export default function (props) { - return - - - dispatch({ type: actions.TOGGLE_GRAPH_PANEL })} - > - {workflowDefState.toggleGraphPanel && ( - - )} - {!workflowDefState.toggleGraphPanel && ( - - )} - - - - - - handleMouseDown(e)} - /> -
    - {dag && } -
    - - - ); -} - -function versionTime(versionObj) { - return ( - versionObj && - timestampRenderer(versionObj.updateTime || versionObj.createTime) - ); -} diff --git a/ui/src/pages/definitions/EventHandler.jsx b/ui/src/pages/definitions/EventHandler.jsx deleted file mode 100644 index 275166bf1..000000000 --- a/ui/src/pages/definitions/EventHandler.jsx +++ /dev/null @@ -1,52 +0,0 @@ -import React from "react"; -import { NavLink, DataTable } from "../../components"; -import { makeStyles } from "@material-ui/styles"; -import Header from "./Header"; -import sharedStyles from "../styles"; -import { Helmet } from "react-helmet"; -import { useEventHandlers } from "../../data/misc"; - -const useStyles = makeStyles(sharedStyles); - -const columns = [ - { - name: "name", - renderer: (name) => ( - {name} - ), - }, - { name: "event" }, - { name: "createTime", type: "date" }, - { - name: "actions", - renderer: (val) => JSON.stringify(val.map((action) => action.action)), - }, -]; - -export default function EventHandlers() { - const classes = useStyles(); - - const { data: eventHandlers, isFetching } = useEventHandlers(); - - return ( -
    -
    - - Conductor UI - Event Handler Definitions - - -
    - {eventHandlers && ( - - )} -
    -
    - ); -} diff --git a/ui/src/pages/definitions/Header.jsx b/ui/src/pages/definitions/Header.jsx deleted file mode 100644 index 926657d7b..000000000 --- a/ui/src/pages/definitions/Header.jsx +++ /dev/null @@ -1,30 +0,0 @@ -import React from "react"; -import { Tab, Tabs, NavLink, LinearProgress, Heading } from "../../components"; -import { makeStyles } from "@material-ui/styles"; -import sharedStyles from "../styles"; - -const useStyles = makeStyles(sharedStyles); - -export default function Header({ tabIndex, loading }) { - const classes = useStyles(); - - return ( -
    - {loading && } -
    - - Definitions - - - - - - -
    -
    - ); -} diff --git a/ui/src/pages/definitions/Task.jsx b/ui/src/pages/definitions/Task.jsx deleted file mode 100644 index d543dbe95..000000000 --- a/ui/src/pages/definitions/Task.jsx +++ /dev/null @@ -1,88 +0,0 @@ -import React from "react"; -import { NavLink, DataTable, Button } from "../../components"; -import { makeStyles } from "@material-ui/styles"; -import Header from "./Header"; -import sharedStyles from "../styles"; -import { Helmet } from "react-helmet"; -import AddIcon from "@material-ui/icons/Add"; -import { useTaskDefs } from "../../data/task"; - -const useStyles = makeStyles(sharedStyles); - -const columns = [ - { - name: "name", - renderer: (name) => {name}, - }, - { name: "description", grow: 2 }, - { name: "createTime", type: "date" }, - { name: "ownerEmail" }, - { name: "inputKeys", type: "json", sortable: false }, - { name: "outputKeys", type: "json", sortable: false }, - { name: "timeoutPolicy", grow: 0.5 }, - { name: "timeoutSeconds", grow: 0.5 }, - { name: "retryCount", grow: 0.5 }, - { name: "retryLogic" }, - { name: "retryDelaySeconds", grow: 0.5 }, - { name: "responseTimeoutSeconds", grow: 0.5 }, - { name: "inputTemplate", type: "json", sortable: false }, - { name: "rateLimitPerFrequency", grow: 0.5 }, - { name: "rateLimitFrequencyInSeconds", grow: 0.5 }, - { - name: "name", - label: "Executions", - id: "executions_link", - grow: 0.5, - renderer: (name) => ( - - Query - - ), - sortable: false, - searchable: false, - }, - { name: "concurrentExecLimit" }, - { name: "pollTimeoutSeconds" }, -]; - -export default function TaskDefinitions() { - const classes = useStyles(); - const { data: tasks, isFetching } = useTaskDefs(); - - return ( -
    - - Conductor UI - Task Definitions - - -
    - -
    -
    - -
    - - {tasks && ( - - )} -
    -
    - ); -} diff --git a/ui/src/pages/definitions/Workflow.jsx b/ui/src/pages/definitions/Workflow.jsx deleted file mode 100644 index cd4d5bfad..000000000 --- a/ui/src/pages/definitions/Workflow.jsx +++ /dev/null @@ -1,148 +0,0 @@ -import React, { useMemo } from "react"; -import { NavLink, DataTable, Button } from "../../components"; -import { makeStyles } from "@material-ui/styles"; -import _ from "lodash"; -import { useQueryState } from "react-router-use-location-state"; -import { useWorkflowDefs } from "../../data/workflow"; -import Header from "./Header"; -import sharedStyles from "../styles"; -import { Helmet } from "react-helmet"; -import AddIcon from "@material-ui/icons/Add"; - -const useStyles = makeStyles(sharedStyles); - -const columns = [ - { - name: "name", - renderer: (val) => ( - {val.trim()} - ), - }, - { name: "description", grow: 2 }, - { name: "createTime", type: "date" }, - { name: "version", label: "Latest Version", grow: 0.5 }, - { name: "schemaVersion", grow: 0.5 }, - { name: "restartable", grow: 0.5 }, - { name: "workflowStatusListenerEnabled", grow: 0.5 }, - { name: "ownerEmail" }, - { name: "inputParameters", type: "json", sortable: false }, - { name: "outputParameters", type: "json", sortable: false }, - { name: "timeoutPolicy", grow: 0.5 }, - { name: "timeoutSeconds", grow: 0.5 }, - { - id: "task_types", - name: "tasks", - label: "Task Types", - searchable: "calculated", - sortable: false, - renderer: (val) => { - const taskTypeSet = new Set(); - for (let task of val) { - taskTypeSet.add(task.type); - } - return Array.from(taskTypeSet).join(", "); - }, - }, - { - id: "task_count", - name: "tasks", - label: "Tasks", - searchable: "calculated", - sortable: false, - grow: 0.5, - renderer: (val) => (_.isArray(val) ? val.length : 0), - }, - { - id: "executions_link", - name: "name", - label: "Executions", - sortable: false, - searchable: false, - grow: 0.5, - renderer: (name) => ( - - Query - - ), - }, -]; - -export default function WorkflowDefinitions() { - const classes = useStyles(); - - const { data, isFetching } = useWorkflowDefs(); - - const [filterParam, setFilterParam] = useQueryState("filter", ""); - const filterObj = filterParam === "" ? undefined : JSON.parse(filterParam); - - const handleFilterChange = (obj) => { - if (obj) { - setFilterParam(JSON.stringify(obj)); - } else { - setFilterParam(""); - } - }; - - const workflows = useMemo(() => { - // Extract latest versions only - if (data) { - const unique = new Map(); - const types = new Set(); - for (let workflowDef of data) { - if (!unique.has(workflowDef.name)) { - unique.set(workflowDef.name, workflowDef); - } else if (unique.get(workflowDef.name).version < workflowDef.version) { - unique.set(workflowDef.name, workflowDef); - } - - for (let task of workflowDef.tasks) { - types.add(task.type); - } - } - - return Array.from(unique.values()); - } - }, [data]); - - return ( -
    - - Conductor UI - Workflow Definitions - -
    - -
    -
    - -
    - - {workflows && ( - - )} -
    -
    - ); -} diff --git a/ui/src/pages/execution/ActionModule.jsx b/ui/src/pages/execution/ActionModule.jsx deleted file mode 100644 index 853e97ee7..000000000 --- a/ui/src/pages/execution/ActionModule.jsx +++ /dev/null @@ -1,165 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/styles"; -import { isFailedTask } from "../../utils/helpers"; -import { PrimaryButton, DropdownButton } from "../../components"; - -import StopIcon from "@material-ui/icons/Stop"; -import PauseIcon from "@material-ui/icons/Pause"; -import RestartIcon from "@material-ui/icons/SettingsBackupRestore"; -import ReplayIcon from "@material-ui/icons/Replay"; -import ResumeIcon from "@material-ui/icons/PlayArrow"; -import FlareIcon from "@material-ui/icons/Flare"; - -import { - useRestartAction, - useRestartLatestAction, - useResumeAction, - useRetryResumeSubworkflowTasksAction, - useRetryAction, - useTerminateAction, - usePauseAction, -} from "../../data/actions"; - -const useStyles = makeStyles({ - menuIcon: { - marginRight: 10, - }, -}); - -export default function ActionModule({ execution, triggerReload }) { - const classes = useStyles(); - const { workflowId, workflowDefinition } = execution; - - const restartAction = useRestartAction({ workflowId, onSuccess }); - const restartLatestAction = useRestartLatestAction({ workflowId, onSuccess }); - const retryAction = useRetryAction({ workflowId, onSuccess }); - const retryResumeSubworkflowTasksAction = - useRetryResumeSubworkflowTasksAction({ workflowId, onSuccess }); - const terminateAction = useTerminateAction({ workflowId, onSuccess }); - const resumeAction = useResumeAction({ workflowId, onSuccess }); - const pauseAction = usePauseAction({ workflowId, onSuccess }); - - const { restartable } = workflowDefinition; - - function onSuccess() { - triggerReload(); - } - - if (execution.status === "COMPLETED") { - const options = []; - if (restartable) { - options.push({ - label: ( - <> - - Restart with Current Definitions - - ), - handler: () => restartAction.mutate(), - }); - - options.push({ - label: ( - <> - - Restart with Latest Definitions - - ), - handler: () => restartLatestAction.mutate(), - }); - } - - return Actions; - } else if (execution.status === "RUNNING") { - return ( - - - Terminate - - ), - handler: () => terminateAction.mutate(), - }, - { - label: ( - <> - - Pause - - ), - handler: () => pauseAction.mutate(), - }, - ]} - > - Actions - - ); - } else if (execution.status === "PAUSED") { - return ( - resumeAction.mutate()}> - - Resume - - ); - } else { - // FAILED, TIMED_OUT, TERMINATED - const options = []; - if (restartable) { - options.push({ - label: ( - <> - - Restart with Current Definitions - - ), - handler: () => restartAction.mutate(), - }); - - options.push({ - label: ( - <> - - Restart with Latest Definitions - - ), - handler: () => restartLatestAction.mutate(), - }); - } - - options.push({ - label: ( - <> - - Retry - From failed task - - ), - handler: () => retryAction.mutate(), - }); - - if ( - (execution.status === "FAILED" || execution.status === "TIMED_OUT") && - execution.tasks.find( - (task) => - task.workflowTask.type === "SUB_WORKFLOW" && isFailedTask(task.status) - ) - ) { - options.push({ - label: ( - <> - - Retry - Resume subworkflow - - ), - handler: () => retryResumeSubworkflowTasksAction.mutate(), - }); - } - - return Actions; - } -} diff --git a/ui/src/pages/execution/Execution.jsx b/ui/src/pages/execution/Execution.jsx deleted file mode 100644 index 8072ac5e0..000000000 --- a/ui/src/pages/execution/Execution.jsx +++ /dev/null @@ -1,309 +0,0 @@ -import React, { useMemo, useState, useEffect, useCallback } from "react"; -import { useQueryState } from "react-router-use-location-state"; -import Alert from "@material-ui/lab/Alert"; -import { - Tabs, - Tab, - NavLink, - SecondaryButton, - LinearProgress, - Heading, -} from "../../components"; -import { Tooltip } from "@material-ui/core"; -import { makeStyles } from "@material-ui/styles"; -import { useRouteMatch } from "react-router-dom"; -import TaskDetails from "./TaskDetails"; -import ExecutionSummary from "./ExecutionSummary"; -import ExecutionJson from "./ExecutionJson"; -import InputOutput from "./ExecutionInputOutput"; -import clsx from "clsx"; -import ActionModule from "./ActionModule"; -import IconButton from "@material-ui/core/IconButton"; -import CloseIcon from "@material-ui/icons/Close"; -import FullscreenIcon from "@material-ui/icons/Fullscreen"; -import FullscreenExitIcon from "@material-ui/icons/FullscreenExit"; -import RightPanel from "./RightPanel"; -import WorkflowDAG from "../../components/diagram/WorkflowDAG"; -import StatusBadge from "../../components/StatusBadge"; -import { Helmet } from "react-helmet"; -import sharedStyles from "../styles"; -import rison from "rison"; -import { useWorkflow } from "../../data/workflow"; - -const maxWindowWidth = window.innerWidth; -const INIT_DRAWER_WIDTH = 650; - -const useStyles = makeStyles({ - header: sharedStyles.header, - drawer: { - zIndex: 999, - position: "absolute", - top: 0, - right: 0, - bottom: 0, - width: (state) => (state.isFullWidth ? "100%" : state.drawerWidth), - }, - drawerHeader: { - display: "flex", - alignItems: "center", - padding: 10, - justifyContent: "flex-end", - height: 80, - flexShrink: 0, - boxShadow: "0 4px 8px 0 rgb(0 0 0 / 10%), 0 0 2px 0 rgb(0 0 0 / 10%)", - zIndex: 1, - backgroundColor: "#fff", - }, - dragger: { - display: (state) => (state.isFullWidth ? "none" : "block"), - width: "5px", - cursor: "ew-resize", - padding: "4px 0 0", - position: "absolute", - height: "100%", - zIndex: "100", - backgroundColor: "#f4f7f9", - }, - drawerMain: { - paddingLeft: (state) => (state.isFullWidth ? 0 : 4), - height: "100%", - display: "flex", - flexDirection: "column", - }, - drawerContent: { - flex: 1, - backgroundColor: "#fff", - display: "flex", - flexDirection: "column", - overflow: "hidden", - }, - content: { - height: "100%", - display: "flex", - flexDirection: "column", - }, - contentShift: { - marginRight: (state) => state.drawerWidth, - }, - tabContent: { - flex: 1, - overflow: "hidden", - display: "flex", - flexDirection: "column", - }, - headerSubtitle: { - marginBottom: 20, - }, - fr: { - display: "flex", - position: "relative", - float: "right", - marginRight: 50, - marginTop: 10, - zIndex: 1, - }, - frItem: { - display: "flex", - alignItems: "center", - marginRight: 15, - }, - rightPanel: { - height: "100%", - display: "flex", - flexDirection: "column", - }, -}); - -export default function Execution() { - const match = useRouteMatch(); - - const { - data: execution, - isFetching, - refetch: refresh, - } = useWorkflow(match.params.id); - - const [isFullWidth, setIsFullWidth] = useState(false); - const [isResizing, setIsResizing] = useState(false); - const [drawerWidth, setDrawerWidth] = useState(INIT_DRAWER_WIDTH); - - const [tabIndex, setTabIndex] = useQueryState("tabIndex", 0); - const [selectedTaskJson, setSelectedTaskJson] = useQueryState("task", ""); - - const dag = useMemo( - () => (execution ? new WorkflowDAG(execution) : null), - [execution] - ); - const selectedTask = useMemo( - () => (dag && selectedTaskJson ? rison.decode(selectedTaskJson) : null), - [dag, selectedTaskJson] - ); - - const classes = useStyles({ - isFullWidth, - drawerWidth, - }); - - const handleMousemove = useCallback( - (e) => { - // we don't want to do anything if we aren't resizing. - if (!isResizing) { - return; - } - - // Stop highlighting - e.preventDefault(); - const offsetRight = - document.body.offsetWidth - (e.clientX - document.body.offsetLeft); - const minWidth = 0; - const maxWidth = maxWindowWidth - 100; - if (offsetRight > minWidth && offsetRight < maxWidth) { - setDrawerWidth(offsetRight); - } - }, - [isResizing] - ); - - const handleMousedown = (e) => setIsResizing(true); - - const handleSelectTask = (task) => { - setSelectedTaskJson(rison.encode(task)); - }; - - const handleClose = () => { - setSelectedTaskJson(null); - }; - - const handleFullScreen = () => { - setIsFullWidth(true); - }; - - const handleFullScreenExit = () => { - setIsFullWidth(false); - }; - - // On load and destroy only - useEffect(() => { - const mouseUp = (e) => setIsResizing(false); - - document.addEventListener("mousemove", handleMousemove); - document.addEventListener("mouseup", mouseUp); - - return () => { - document.removeEventListener("mousemove", handleMousemove); - document.removeEventListener("mouseup", mouseUp); - }; - }, [handleMousemove]); - - return ( - <> - - Conductor UI - Execution - {match.params.id} - -
    - {isFetching && } - {execution && ( - <> -
    -
    - {execution.parentWorkflowId && ( -
    - - Parent Workflow - -
    - )} - - Refresh - - -
    - - {execution.workflowType || execution.workflowName}{" "} - - - - {execution.workflowId} - - - {execution.reasonForIncompletion && ( - - {execution.reasonForIncompletion} - - )} - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} - /> - setTabIndex(3)} /> - -
    -
    - {tabIndex === 0 && ( - - )} - {tabIndex === 1 && } - {tabIndex === 2 && } - {tabIndex === 3 && } -
    - - )} -
    - {selectedTask && ( -
    -
    handleMousedown(event)} - className={classes.dragger} - /> -
    -
    - {isFullWidth ? ( - - handleFullScreenExit()}> - - - - ) : ( - - handleFullScreen()}> - - - - )} - - handleClose()}> - - - -
    -
    - -
    -
    -
    - )} - - ); -} diff --git a/ui/src/pages/execution/ExecutionInputOutput.jsx b/ui/src/pages/execution/ExecutionInputOutput.jsx deleted file mode 100644 index 65a6202dc..000000000 --- a/ui/src/pages/execution/ExecutionInputOutput.jsx +++ /dev/null @@ -1,56 +0,0 @@ -import React from "react"; -import { Paper, ReactJson } from "../../components"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - wrapper: { - margin: 30, - height: "100%", - display: "flex", - flexDirection: "column", - overflow: "hidden", - }, - column: { - display: "flex", - flexDirection: "row", - gap: 15, - flex: 2, - marginBottom: 15, - overflow: "hidden", - }, - paper: { - flex: 1, - overflow: "hidden", - }, -}); - -export default function InputOutput({ execution }) { - const classes = useStyles(); - return ( -
    -
    - - - - - - -
    - - - -
    - ); -} diff --git a/ui/src/pages/execution/ExecutionJson.jsx b/ui/src/pages/execution/ExecutionJson.jsx deleted file mode 100644 index 85eb0f490..000000000 --- a/ui/src/pages/execution/ExecutionJson.jsx +++ /dev/null @@ -1,28 +0,0 @@ -import React from "react"; -import { Paper } from "../../components"; -import ReactJson from "../../components/ReactJson"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - paper: { - margin: 30, - flex: 1, - }, - wrapper: { - flex: 1, - display: "flex", - flexDirection: "column", - }, -}); - -export default function ExecutionJson({ execution }) { - const classes = useStyles(); - - return ( -
    - - - -
    - ); -} diff --git a/ui/src/pages/execution/ExecutionSummary.jsx b/ui/src/pages/execution/ExecutionSummary.jsx deleted file mode 100644 index 37ebeacd5..000000000 --- a/ui/src/pages/execution/ExecutionSummary.jsx +++ /dev/null @@ -1,63 +0,0 @@ -import React from "react"; -import { Paper, NavLink, KeyValueTable } from "../../components"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - paper: { - margin: 30, - }, - wrapper: { - overflowY: "auto", - }, -}); - -export default function ExecutionSummary({ execution }) { - const classes = useStyles(); - - // To accommodate unexecuted tasks, read type & name out of workflowTask - const data = [ - { label: "Workflow ID", value: execution.workflowId }, - { label: "Status", value: execution.status }, - { label: "Version", value: execution.workflowVersion }, - { label: "Start Time", value: execution.startTime, type: "date" }, - { label: "End Time", value: execution.endTime, type: "date" }, - { - label: "Duration", - value: execution.endTime - execution.startTime, - type: "duration", - }, - ]; - - if (execution.parentWorkflowId) { - data.push({ - label: "Parent Workflow ID", - value: ( - - {execution.parentWorkflowId} - - ), - }); - } - - if (execution.parentWorkflowTaskId) { - data.push({ - label: "Parent Task ID", - value: execution.parentWorkflowTaskId, - }); - } - - if (execution.reasonForIncompletion) { - data.push({ - label: "Reason for Incompletion", - value: execution.reasonForIncompletion, - }); - } - - return ( -
    - - - -
    - ); -} diff --git a/ui/src/pages/execution/Legend.jsx b/ui/src/pages/execution/Legend.jsx deleted file mode 100644 index cf661fdda..000000000 --- a/ui/src/pages/execution/Legend.jsx +++ /dev/null @@ -1,120 +0,0 @@ -import React, { Component } from "react"; -import WorkflowDAG from "../../components/diagram/WorkflowDAG"; -import WorkflowGraph from "../../components/diagram/WorkflowGraph"; - -const workflowDef = { - tasks: [ - { - name: "fork_join", - taskReferenceName: "fork", - type: "FORK_JOIN", - forkTasks: [ - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkChild_grp1a", - }, - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkChild_grp1b", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp2", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp3", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp4", - }, - ], - ], - }, - { - name: "join", - taskReferenceName: "join", - type: "JOIN", - joinOn: ["forkChild_par1", "forkChild_par2", "forkChild_ser1"], - }, - - { - name: "decision", - taskReferenceName: "decision", - type: "DECISION", - decisionCases: [ - [ - { - name: "simple_task", - type: "SIMPLE", - taskReferenceName: "completed", - }, - ], - [ - { - name: "simple_task", - type: "SIMPLE", - taskReferenceName: "failed", - }, - ], - ], - }, - { - name: "exclusive_join", - taskReferenceName: "exclusiveJoin", - type: "EXCLUSIVE_JOIN", - joinOn: ["completed", "failed"], - defaultExclusiveJoinTask: ["completed"], - }, - { - name: "subworkflow", - taskReferenceName: "subworkflow", - type: "SUB_WORKFLOW", - subworkflowParam: { name: "foo" }, - }, - { - name: "dynamic_fork", - taskReferenceName: "dynamic_fork", - type: "FORK_JOIN_DYNAMIC", - dynamicForkTasksParam: "dynamicTasks", - dynamicForkTasksInputParamName: "dynamicTasksInput", - }, - { - name: "join", - taskReferenceName: "dynamic_join", - type: "JOIN", - }, - ], -}; - -class Legend extends Component { - constructor() { - super(); - this.state = { - dag: new WorkflowDAG(null, workflowDef), - }; - } - render() { - const { dag } = this.state; - return ( -
    - -
    - ); - } -} - -export default Legend; diff --git a/ui/src/pages/execution/RightPanel.jsx b/ui/src/pages/execution/RightPanel.jsx deleted file mode 100644 index 4a7f6a8e0..000000000 --- a/ui/src/pages/execution/RightPanel.jsx +++ /dev/null @@ -1,170 +0,0 @@ -import React, { useState, useEffect } from "react"; -import { Tabs, Tab, ReactJson, Dropdown, Banner } from "../../components"; - -import TaskSummary from "./TaskSummary"; -import TaskLogs from "./TaskLogs"; - -import { makeStyles } from "@material-ui/styles"; -import _ from "lodash"; - -const useStyles = makeStyles({ - banner: { - margin: 15, - }, - dfSelect: { - padding: 15, - backgroundColor: "#efefef", - }, - tabContent: { - flex: 1, - overflowY: "auto", - }, -}); - -export default function RightPanel({ selectedTask, dag, onTaskChange }) { - const [tabIndex, setTabIndex] = useState(0); - - const classes = useStyles(); - - useEffect(() => { - setTabIndex(0); // Reset to Status Tab on ref change - }, [selectedTask]); - - if (!selectedTask) { - return null; - } - - const dfOptions = selectedTask ? dag.dfChildInfo(selectedTask.ref) : null; - const { ref, taskId } = selectedTask; - - let taskResult, - retryOptions = null; - const node = dag.graph.node(ref); - if (node.taskResults.length > 1) { - retryOptions = node.taskResults; - } - - if (taskId) { - taskResult = node.taskResults.find((task) => task.taskId === taskId); - } else { - taskResult = _.last(node.taskResults); - } - - return ( - <> - {dfOptions && ( -
    - { - onTaskChange({ ref: v.ref }); - }} - options={dfOptions} - disableClearable - value={dfOptions.find((opt) => opt.ref === selectedTask.ref)} - getOptionLabel={(x) => `${dropdownIcon(x.status)} ${x.ref}`} - style={{ marginBottom: 20, width: 500 }} - /> -
    - )} - - {retryOptions && ( -
    - { - onTaskChange({ - ref: taskResult.referenceTaskName, - taskId: v.taskId, - }); - }} - options={retryOptions} - value={retryOptions.find((opt) => opt.taskId === taskResult.taskId)} - getOptionLabel={(t) => - `${dropdownIcon(t.status)} Attempt ${t.retryCount} - ${t.taskId}` - } - style={{ marginBottom: 20, width: 500 }} - /> -
    - )} - - - setTabIndex(0)} /> - setTabIndex(1)} - disabled={!taskResult.status} - /> - setTabIndex(2)} - disabled={!taskResult.status} - /> - setTabIndex(3)} - disabled={!taskResult.status} - /> - setTabIndex(4)} - disabled={!taskResult.status} - /> - setTabIndex(5)} /> - -
    - {tabIndex === 0 && } - {tabIndex === 1 && ( - - )} - {tabIndex === 2 && ( - <> - {taskResult.externalOutputPayloadStoragePath && ( - - This task has externalized output. Please reference{" "} - externalOutputPayloadStoragePath for the storage - location. - - )} - - - )} - {tabIndex === 3 && } - {tabIndex === 4 && ( - - )} - {tabIndex === 5 && ( - - )} -
    - - ); -} - -function dropdownIcon(status) { - let icon; - switch (status) { - case "COMPLETED": - icon = "\u2705"; - break; // Green-checkmark - case "COMPLETED_WITH_ERRORS": - icon = "\u2757"; - break; // Exclamation - case "CANCELED": - icon = "\uD83D\uDED1"; - break; // stopsign - case "IN_PROGRESS": - case "SCHEDULED": - icon = "\u231B"; - break; // hourglass - default: - icon = "\u274C"; // red-X - } - return icon + "\u2003"; -} diff --git a/ui/src/pages/execution/TaskDetails.jsx b/ui/src/pages/execution/TaskDetails.jsx deleted file mode 100644 index 379dd23f7..000000000 --- a/ui/src/pages/execution/TaskDetails.jsx +++ /dev/null @@ -1,61 +0,0 @@ -import React, { useState } from "react"; -import { Tabs, Tab, Paper } from "../../components"; -import Timeline from "./Timeline"; -import TaskList from "./TaskList"; -import WorkflowGraph from "../../components/diagram/WorkflowGraph"; -import { makeStyles } from "@material-ui/styles"; - -const useStyles = makeStyles({ - taskWrapper: { - overflowY: "auto", - padding: 30, - height: "100%", - }, -}); - -export default function TaskDetails({ - execution, - dag, - selectedTask, - setSelectedTask, -}) { - const [tabIndex, setTabIndex] = useState(0); - const classes = useStyles(); - - return ( -
    - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} /> - - - {tabIndex === 0 && ( - - )} - {tabIndex === 1 && ( - - )} - {tabIndex === 2 && ( - - )} - -
    - ); -} diff --git a/ui/src/pages/execution/TaskList.jsx b/ui/src/pages/execution/TaskList.jsx deleted file mode 100644 index 1b3c1362e..000000000 --- a/ui/src/pages/execution/TaskList.jsx +++ /dev/null @@ -1,87 +0,0 @@ -import React from "react"; -import { Link } from "@material-ui/core"; -import { DataTable } from "../../components"; -import _ from "lodash"; - -export default function TaskList({ selectedTask, tasks, dag, onClick }) { - const taskDetailFields = [ - { name: "seq", grow: 0.2 }, - { - name: "taskId", - renderer: (taskId, row, idx) => { - return ( - handleClick(row)}> - {taskId} - - ); - }, - }, - { name: "workflowTask.name", id: "taskName", label: "Task Name" }, - { name: "referenceTaskName", label: "Ref" }, - { name: "workflowTask.type", id: "taskType", label: "Type", grow: 0.5 }, - { name: "scheduledTime", type: "date" }, - { name: "startTime", type: "date" }, - { name: "endTime", type: "date" }, - { name: "status", grow: 0.5 }, - { name: "updateTime", type: "date" }, - { name: "callbackAfterSeconds" }, - { name: "pollCount" }, - ]; - - let selectedTaskIdx = -1; - if (selectedTask) { - const { ref, taskId } = selectedTask; - if (taskId) { - selectedTaskIdx = tasks.findIndex((t) => t.taskId === taskId); - } else { - selectedTaskIdx = _.findLastIndex( - tasks, - (t) => t.referenceTaskName === ref - ); - } - } - - if (selectedTaskIdx === -1) selectedTaskIdx = null; - - function handleClick(row) { - if (!_.isEmpty(row)) { - if (onClick) { - const task = row; - const node = dag.graph.node(task.referenceTaskName); - - // If there are more than 1 task associated, use task ID - if (node.taskResults.length > 1) { - onClick({ - ref: task.referenceTaskName, - taskId: task.taskId, - }); - } else { - onClick({ - ref: task.referenceTaskName, - }); - } - } - } else { - if (onClick) onClick(null); - } - } - - return ( - - ); -} diff --git a/ui/src/pages/execution/TaskLogs.jsx b/ui/src/pages/execution/TaskLogs.jsx deleted file mode 100644 index ecf817824..000000000 --- a/ui/src/pages/execution/TaskLogs.jsx +++ /dev/null @@ -1,26 +0,0 @@ -import React from "react"; -import { useLogs } from "../../data/misc"; -import { DataTable, Text, LinearProgress } from "../../components"; - -export default function TaskLogs({ task }) { - const { taskId } = task; - const { data: log, isFetching } = useLogs({ taskId }); - - if (isFetching) { - return ; - } - return log && log.length > 0 ? ( - - ) : ( - - No logs available - - ); -} diff --git a/ui/src/pages/execution/TaskSummary.jsx b/ui/src/pages/execution/TaskSummary.jsx deleted file mode 100644 index 5bc41c6a2..000000000 --- a/ui/src/pages/execution/TaskSummary.jsx +++ /dev/null @@ -1,127 +0,0 @@ -import React from "react"; -import _ from "lodash"; -import { NavLink, KeyValueTable } from "../../components"; -import { useTime } from "../../hooks/useTime"; - -export default function TaskSummary({ taskResult }) { - const now = useTime(); - - // To accommodate unexecuted tasks, read type & name & ref out of workflow - const data = [ - { label: "Task Type", value: taskResult.workflowTask.type }, - { label: "Status", value: taskResult.status || "Not executed" }, - { label: "Task Name", value: taskResult.workflowTask.name }, - { - label: "Task Reference", - value: taskResult.workflowTask.taskReferenceName, - }, - ]; - - if (taskResult.domain) { - data.push({ label: "Domain", value: taskResult.domain }); - } - - if (taskResult.taskId) { - data.push({ label: "Task Execution ID", value: taskResult.taskId }); - } - - if (_.isFinite(taskResult.retryCount)) { - data.push({ label: "Retry Count", value: taskResult.retryCount }); - } - - if (taskResult.scheduledTime) { - data.push({ - label: "Scheduled Time", - value: taskResult.scheduledTime > 0 && taskResult.scheduledTime, - type: "date", - }); - } - if (taskResult.startTime) { - data.push({ - label: "Start Time", - value: taskResult.startTime > 0 && taskResult.startTime, - type: "date", - }); - } - if (taskResult.endTime) { - data.push({ label: "End Time", value: taskResult.endTime, type: "date" }); - } - if (taskResult.startTime && taskResult.endTime) { - data.push({ - label: "Duration", - value: - taskResult.startTime > 0 && taskResult.endTime - taskResult.startTime, - type: "duration", - }); - } - if (taskResult.startTime && taskResult.status === "IN_PROGRESS") { - data.push({ - label: "Current Elapsed Time", - value: taskResult.startTime > 0 && now - taskResult.startTime, - type: "duration", - }); - } - if (!_.isNil(taskResult.retrycount)) { - data.push({ label: "Retry Count", value: taskResult.retryCount }); - } - if (taskResult.reasonForIncompletion) { - data.push({ - label: "Reason for Incompletion", - value: taskResult.reasonForIncompletion, - }); - } - if (taskResult.workerId) { - data.push({ - label: "Worker", - value: taskResult.workerId, - type: "workerId", - }); - } - if (!_.isNil(taskResult.callbackAfterSeconds)) { - data.push({ label: "Callback After Seconds", value: taskResult.callbackAfterSeconds }); - } - if (!_.isNil(taskResult.pollCount)) { - data.push({ label: "Poll Count", value: taskResult.pollCount }); - } - if (taskResult.taskType === "DECISION") { - data.push({ - label: "Evaluated Case", - value: taskResult.outputData.caseOutput[0], - }); - } - if (taskResult.workflowTask.type === "SUB_WORKFLOW") { - data.push({ - label: "Subworkflow Definition", - value: ( - - {taskResult.workflowTask.subWorkflowParam.name}{" "} - - ), - }); - if (_.get(taskResult, "outputData.subWorkflowId")) { - data.push({ - label: "Subworkflow ID", - value: ( - - {taskResult.outputData.subWorkflowId} - - ), - }); - } - } - - if (taskResult.externalOutputPayloadStoragePath) { - data.push({ - label: "External Output", - value: taskResult.externalOutputPayloadStoragePath, - }); - } - - return ; -} diff --git a/ui/src/pages/execution/Timeline.jsx b/ui/src/pages/execution/Timeline.jsx deleted file mode 100644 index 1b2348e27..000000000 --- a/ui/src/pages/execution/Timeline.jsx +++ /dev/null @@ -1,136 +0,0 @@ -import React, { useMemo } from "react"; -import Timeline from "react-vis-timeline-2"; -import { timestampRenderer, durationRenderer } from "../../utils/helpers"; -import _ from "lodash"; -import "./timeline.scss"; -import ZoomOutMapIcon from "@material-ui/icons/ZoomOutMap"; -import { IconButton, Tooltip } from "@material-ui/core"; - -export default function TimelineComponent({ - dag, - tasks, - onClick, - selectedTask, -}) { - const timelineRef = React.useRef(); - - let selectedId = null; - if (selectedTask) { - if (selectedTask.taskId) { - selectedId = selectedTask.taskId; - } else { - const node = dag.graph.node(selectedTask.ref); - if (_.isEmpty(node.taskResults)) { - selectedId = null; - } else { - selectedId = _.last(node.taskResults).taskId; - } - } - } - - const { items, groups } = useMemo(() => { - const groupMap = new Map(); - for (const task of tasks) { - groupMap.set(task.referenceTaskName, { - id: task.referenceTaskName, - content: `${task.referenceTaskName} (${task.workflowTask.name})`, - }); - } - - const items = tasks - .filter((t) => t.startTime > 0 || t.endTime > 0) - .map((task) => { - const predecessors = dag.graph.predecessors(task.referenceTaskName); - - const dfParent = predecessors - ?.map((t) => dag.graph.node(t)) - .find((t) => t.type === "FORK_JOIN_DYNAMIC"); - const startTime = - task.startTime > 0 - ? new Date(task.startTime) - : new Date(task.endTime); - const endTime = - task.endTime > 0 ? new Date(task.endTime) : new Date(task.startTime); - const duration = durationRenderer( - endTime.getTime() - startTime.getTime() - ); - const retval = { - id: task.taskId, - group: task.referenceTaskName, - content: `${duration}`, - start: startTime, - end: endTime, - title: `${task.referenceTaskName} (${ - task.status - })
    ${timestampRenderer(startTime)} - ${timestampRenderer( - endTime - )}`, - className: `status_${task.status}`, - }; - - if (dfParent || task.type === "FORK_JOIN_DYNAMIC") { - //retval.subgroup=task.referenceTaskName - const gp = groupMap.get(dfParent.ref); - if (!gp.nestedGroups) { - gp.nestedGroups = []; - } - groupMap.get(task.referenceTaskName).treeLevel = 2; - gp.nestedGroups.push(task.referenceTaskName); - } - - return retval; - }); - - return { - items: items, - groups: Array.from(groupMap.values()), - }; - }, [tasks, dag]); - - const onFit = () => { - timelineRef.current.timeline.fit(); - }; - - const handleClick = (e) => { - const { group, item, what } = e; - if (group && what !== "background") { - if (_.size(dag.graph.node(group).taskResults) > 1) { - onClick({ - ref: group, - taskId: item, - }); - } else { - onClick({ ref: group }); - } - } - }; - - return ( -
    -
    - Ctrl-scroll to zoom.{" "} - - - - - -
    -
    - -
    -
    -
    - ); -} diff --git a/ui/src/pages/execution/timeline.scss b/ui/src/pages/execution/timeline.scss deleted file mode 100644 index 252bc9e12..000000000 --- a/ui/src/pages/execution/timeline.scss +++ /dev/null @@ -1,55 +0,0 @@ -@mixin barColor($colorfg, $colorbg: #fff) { - background-color: $colorbg; - border-color: $colorfg; - color: $colorfg; -} - -.vis-timeline { - border: none; -} - -.vis-panel { - &.vis-top, - &.vis-center { - border-left: none; - } -} -.vis-label { - .vis-inner { - margin-left: 5px; - } - &.vis-nested-group.vis-group-level-2 { - background: white; - } -} - -.vis-item { - &.status_COMPLETED { - @include barColor(#163e1d, #aee1b8); - } - &.status_COMPLETED_WITH_ERRORS { - @include barColor(#8b5b02, #feeac5); - } - &.status_IN_PROGRESS, - &.status_SCHEDULED { - @include barColor(#11497a, #cbe2f7); - } - //&.status_CANCELED { @include barColor(#26194b, #ded5f8); } - &.status_FAILED, - &.status_FAILED_WITH_TERMINAL_ERROR, - &.status_TIMED_OUT, - &.status_DF_PARTIAL, - &.status_CANCELED { - @include barColor(#7f050b, #f9c6c9); - } - &.status_SKIPPED { - @include barColor(gray); - } - &.vis-selected { - filter: brightness(70%); - } - .vis-item-content { - font-size: 10px; - padding: 0px 3px 0px 3px; - } -} diff --git a/ui/src/pages/executions/BulkActionModule.jsx b/ui/src/pages/executions/BulkActionModule.jsx deleted file mode 100644 index 4c9794d28..000000000 --- a/ui/src/pages/executions/BulkActionModule.jsx +++ /dev/null @@ -1,165 +0,0 @@ -import React, { useState } from "react"; -import { - Dialog, - DialogContent, - DialogActions, - DialogTitle, -} from "@material-ui/core"; -import { makeStyles } from "@material-ui/styles"; -import { - DataTable, - DropdownButton, - LinearProgress, - PrimaryButton, - Heading, -} from "../../components"; -import { - useBulkRestartAction, - useBulkRestartLatestAction, - useBulkResumeAction, - useBulkTerminateAction, - useBulkPauseAction, - useBulkRetryAction, -} from "../../data/bulkactions"; - -const useStyles = makeStyles({ - actionBar: { - display: "flex", - alignItems: "center", - paddingRight: 10, - "&>div, &>p": { - marginRight: 10, - }, - width: "100%", - justifyContent: "space-between", - }, -}); - -export default function BulkActionModule({ selectedRows }) { - const selectedIds = selectedRows.map((row) => row.workflowId); - const [results, setResults] = useState(); - const classes = useStyles(); - - const { mutate: pauseAction, isLoading: pauseLoading } = useBulkPauseAction({ - onSuccess, - }); - const { mutate: resumeAction, isLoading: resumeLoading } = - useBulkResumeAction({ onSuccess }); - const { mutate: restartCurrentAction, isLoading: restartCurrentLoading } = - useBulkRestartAction({ onSuccess }); - const { mutate: restartLatestAction, isLoading: restartLatestLoading } = - useBulkRestartLatestAction({ onSuccess }); - const { mutate: retryAction, isLoading: retryLoading } = useBulkRetryAction({ - onSuccess, - }); - const { mutate: terminateAction, isLoading: terminateLoading } = - useBulkTerminateAction({ onSuccess }); - - const isLoading = - pauseLoading || - resumeLoading || - restartCurrentLoading || - restartLatestLoading || - retryLoading || - terminateLoading; - - function onSuccess(data, variables, context) { - const retval = { - bulkErrorResults: Object.entries(data.bulkErrorResults).map( - ([key, value]) => ({ - workflowId: key, - message: value, - }) - ), - bulkSuccessfulResults: data.bulkSuccessfulResults.map((value) => ({ - workflowId: value, - })), - }; - setResults(retval); - } - - function handleClose() { - setResults(null); - } - - return ( -
    - {selectedRows.length} Workflows Selected. - pauseAction({ body: JSON.stringify(selectedIds) }), - }, - { - label: "Resume", - handler: () => resumeAction({ body: JSON.stringify(selectedIds) }), - }, - { - label: "Restart with current definitions", - handler: () => - restartCurrentAction({ body: JSON.stringify(selectedIds) }), - }, - { - label: "Restart with latest definitions", - handler: () => - restartLatestAction({ body: JSON.stringify(selectedIds) }), - }, - { - label: "Retry", - handler: () => retryAction({ body: JSON.stringify(selectedIds) }), - }, - { - label: "Terminate", - handler: () => - terminateAction({ body: JSON.stringify(selectedIds) }), - }, - ]} - > - Bulk Action - - {(results || isLoading) && ( - - - - Batch Actions - - {isLoading && } - - - {results && ( - - - - - )} - - - Close - - - )} -
    - ); -} diff --git a/ui/src/pages/executions/ResultsTable.jsx b/ui/src/pages/executions/ResultsTable.jsx deleted file mode 100644 index afe3f49f8..000000000 --- a/ui/src/pages/executions/ResultsTable.jsx +++ /dev/null @@ -1,169 +0,0 @@ -import React, { useState, useRef, useEffect } from "react"; -import { - Paper, - NavLink, - DataTable, - LinearProgress, - TertiaryButton, - Text, -} from "../../components"; -import { Alert, AlertTitle } from "@material-ui/lab"; -import { makeStyles } from "@material-ui/styles"; -import BulkActionModule from "./BulkActionModule"; -import executionsStyles from "./executionsStyles"; -import sharedStyles from "../styles"; - -const useStyles = makeStyles({ - ...executionsStyles, - ...sharedStyles, -}); - -const executionFields = [ - { name: "startTime", type: "date" }, - { - name: "workflowId", - grow: 2, - renderer: (workflowId) => ( - {workflowId} - ), - }, - { name: "workflowType", grow: 2 }, - { name: "version", grow: 0.5 }, - { name: "correlationId", grow: 2 }, - { name: "updateTime", type: "date" }, - { name: "endTime", type: "date" }, - { name: "status" }, - { name: "input", grow: 2, wrap: true }, - { name: "output", grow: 2 }, - { name: "reasonForIncompletion" }, - { name: "executionTime" }, - { name: "event" }, - { name: "failedReferenceTaskNames", grow: 2 }, - { name: "externalInputPayloadStoragePath" }, - { name: "externalOutputPayloadStoragePath" }, - { name: "priority" }, -]; - -function ShowMore({ - rowsPerPage, - rowCount, - onChangePage, - onChangeRowsPerPage, - currentPage, -}) { - return ( -
    - onChangePage(currentPage + 1)}> - Show More Results - -
    - ); -} - -export default function ResultsTable({ - resultObj, - error, - busy, - page, - rowsPerPage, - sort, - setPage, - setSort, - setRowsPerPage, - showMore, -}) { - const classes = useStyles(); - let totalHits = 0; - if (resultObj) { - if (resultObj.totalHits) { - totalHits = resultObj.totalHits; - } else { - if (resultObj.results) { - totalHits = resultObj.results.length; - } - } - } - const [selectedRows, setSelectedRows] = useState([]); - const [toggleCleared, setToggleCleared] = useState(false); - const tableRef = useRef(null); - - const defaultSortField = sort ? sort.split(":")[0] : null; - const defaultSortDirection = sort ? sort.split(":")[1] : null; - - useEffect(() => { - setSelectedRows([]); - setToggleCleared((t) => !t); - }, [resultObj]); - - return ( - - {busy && } - {error && ( - - Query Failed - {error.message} - - )} - {!resultObj && !error && ( - - Click "Search" to submit query. - - )} - {resultObj && ( - 0 && ` Page ${page} of ${totalHits}`} - data={resultObj.results} - columns={executionFields} - defaultShowColumns={[ - "startTime", - "workflowType", - "workflowId", - "endTime", - "status", - ]} - localStorageKey="executionsTable" - keyField="workflowId" - paginationServer - paginationTotalRows={totalHits} - paginationDefaultPage={page} - paginationPerPage={rowsPerPage} - onChangeRowsPerPage={(rowsPerPage) => setRowsPerPage(rowsPerPage)} - onChangePage={(page) => setPage(page)} - sortServer - defaultSortField={defaultSortField} - defaultSortAsc={defaultSortDirection === "ASC"} - onSort={(column, sortDirection) => { - setSort(column.id, sortDirection); - }} - selectableRows - contextComponent={ - - } - onSelectedRowsChange={({ selectedRows }) => - setSelectedRows(selectedRows) - } - clearSelectedRows={toggleCleared} - customStyles={{ - header: { - style: { - overflow: "visible", - }, - }, - contextMenu: { - style: { - display: "none", - }, - activeStyle: { - display: "flex", - }, - }, - }} - paginationComponent={showMore ? ShowMore : null} - /> - )} - - ); -} diff --git a/ui/src/pages/executions/SearchTabs.jsx b/ui/src/pages/executions/SearchTabs.jsx deleted file mode 100644 index 13d00f48a..000000000 --- a/ui/src/pages/executions/SearchTabs.jsx +++ /dev/null @@ -1,11 +0,0 @@ -import React from "react"; -import { Tab, Tabs, NavLink } from "../../components"; - -export default function SearchTabs({ tabIndex }) { - return ( - - - - - ); -} diff --git a/ui/src/pages/executions/TaskSearch.jsx b/ui/src/pages/executions/TaskSearch.jsx deleted file mode 100644 index b5fdf6cb8..000000000 --- a/ui/src/pages/executions/TaskSearch.jsx +++ /dev/null @@ -1,227 +0,0 @@ -import React, { useState, useMemo } from "react"; -import _ from "lodash"; -import { FormControl, Grid, InputLabel } from "@material-ui/core"; -import { - Paper, - PrimaryButton, - Heading, - Dropdown, - Input, -} from "../../components"; - -import { useTaskSearch, useTaskNames } from "../../data/task"; -import { useWorkflowNames } from "../../data/workflow"; -import DateRangePicker from "../../components/DateRangePicker"; -import { useQueryState } from "react-router-use-location-state"; -import SearchTabs from "./SearchTabs"; -import ResultsTable from "./ResultsTable"; -import { DEFAULT_ROWS_PER_PAGE } from "../../components/DataTable"; - -import { makeStyles } from "@material-ui/styles"; -import clsx from "clsx"; -import executionsStyles from "./executionsStyles"; -import sharedStyles from "../styles"; - -const useStyles = makeStyles({ - ...executionsStyles, - ...sharedStyles, -}); - -const DEFAULT_SORT = "startTime:DESC"; -const MS_IN_DAY = 86400000; - -export default function TaskSearchPanel() { - const classes = useStyles(); - - const [workflowType, setWorkflowType] = useQueryState("workflowType", []); - const [tasks, setTasks] = useQueryState("tasks", []); - const [taskId, setTaskId] = useQueryState("taskId", ""); - const [startFrom, setStartFrom] = useQueryState("startFrom", ""); - const [startTo, setStartTo] = useQueryState("startTo", ""); - const [freeText, setFreeText] = useQueryState("taskText", ""); - const [lookback, setLookback] = useQueryState("lookback", ""); - const [sort, setSort] = useQueryState("sort", DEFAULT_SORT); - const [queryFT, setQueryFT] = useState(buildQuery); - - // For dropdowns - const workflowNames = useWorkflowNames(); - const taskNames = useTaskNames(); - - const searchReady = !( - _.isEmpty(workflowType) && - _.isEmpty(tasks) && - _.isEmpty(taskId) && - _.isEmpty(freeText) - ); - - const { data, isFetching, fetchNextPage, refetch } = useTaskSearch({ - sort, - query: queryFT.query, - freeText: queryFT.freeText, - rowsPerPage: DEFAULT_ROWS_PER_PAGE, - searchReady, - }); - const results = useMemo( - () => - data - ? [].concat.apply( - [], - data.pages.map((page) => page.results) - ) - : [], - [data] - ); - - function buildQuery() { - const clauses = []; - if (!_.isEmpty(workflowType)) { - clauses.push(`workflowType IN (${workflowType.join(",")})`); - } - if (!_.isEmpty(taskId)) { - clauses.push(`taskId="${taskId}"`); - } - if (!_.isEmpty(tasks)) { - clauses.push(`taskType IN (${tasks.join(",")})`); - } - if (!_.isEmpty(lookback)) { - clauses.push(`startTime>${new Date().getTime() - lookback * MS_IN_DAY}`); - } - if (!_.isEmpty(startFrom)) { - clauses.push(`startTime>${new Date(startFrom).getTime()}`); - } - if (!_.isEmpty(startTo)) { - clauses.push(`startTime<${new Date(startTo).getTime()}`); - } - - return { - query: clauses.join(" AND "), - freeText: _.isEmpty(freeText) ? "*" : freeText, - }; - } - - function handleSearch() { - const oldQuery = queryFT; - const newQuery = buildQuery(); - setQueryFT(newQuery); - - if (oldQuery === newQuery) { - refetch(); - } - } - - function handlePage(page) { - fetchNextPage(); - } - - function handleSort(changedColumn, direction) { - setSort(`${changedColumn}:${direction.toUpperCase()}`); - } - - const handleLookback = (val) => { - setStartFrom(""); - setStartTo(""); - setLookback(val); - }; - - const handleStartFrom = (val) => { - setLookback(""); - setStartFrom(val); - }; - - const handleStartTo = (val) => { - setLookback(""); - setStartTo(val); - }; - - return ( -
    - - Workflow Executions - - - - - - - setWorkflowType(val)} - value={workflowType} - /> - - - - - - setTasks(val)} - value={tasks} - /> - - - - - - - - - - - - - -   - Search - - - - - -
    - ); -} diff --git a/ui/src/pages/executions/WorkflowSearch.jsx b/ui/src/pages/executions/WorkflowSearch.jsx deleted file mode 100644 index f89c25edd..000000000 --- a/ui/src/pages/executions/WorkflowSearch.jsx +++ /dev/null @@ -1,228 +0,0 @@ -import React, { useState } from "react"; -import _ from "lodash"; -import { FormControl, Grid, InputLabel } from "@material-ui/core"; -import { - Paper, - Heading, - PrimaryButton, - Dropdown, - Input, -} from "../../components"; - -import { workflowStatuses } from "../../utils/constants"; -import { useQueryState } from "react-router-use-location-state"; -import SearchTabs from "./SearchTabs"; -import ResultsTable from "./ResultsTable"; -import DateRangePicker from "../../components/DateRangePicker"; -import { DEFAULT_ROWS_PER_PAGE } from "../../components/DataTable"; -import { useWorkflowSearch, useWorkflowNames } from "../../data/workflow"; - -import { makeStyles } from "@material-ui/styles"; -import clsx from "clsx"; -import executionsStyles from "./executionsStyles"; -import sharedStyles from "../styles"; - -const useStyles = makeStyles({ - ...executionsStyles, - ...sharedStyles, -}); - -const DEFAULT_SORT = "startTime:DESC"; -const MS_IN_DAY = 86400000; - -export default function WorkflowPanel() { - const classes = useStyles(); - - const [freeText, setFreeText] = useQueryState("freeText", ""); - const [status, setStatus] = useQueryState("status", []); - const [workflowType, setWorkflowType] = useQueryState("workflowType", []); - const [workflowId, setWorkflowId] = useQueryState("workflowId", ""); - const [startFrom, setStartFrom] = useQueryState("startFrom", ""); - const [startTo, setStartTo] = useQueryState("startTo", ""); - const [lookback, setLookback] = useQueryState("lookback", ""); - - const [page, setPage] = useQueryState("page", 1); - const [rowsPerPage, setRowsPerPage] = useQueryState( - "rowsPerPage", - DEFAULT_ROWS_PER_PAGE - ); - const [sort, setSort] = useQueryState("sort", DEFAULT_SORT); - const [queryFT, setQueryFT] = useState(buildQuery); - - const { - data: resultObj, - error, - isFetching, - refetch, - } = useWorkflowSearch({ - page, - rowsPerPage, - sort, - query: queryFT.query, - freeText: queryFT.freeText, - }); - - // For dropdown - const workflowNames = useWorkflowNames(); - - function buildQuery() { - const clauses = []; - if (!_.isEmpty(workflowType)) { - clauses.push(`workflowType IN (${workflowType.join(",")})`); - } - if (!_.isEmpty(workflowId)) { - clauses.push(`workflowId="${workflowId}"`); - } - if (!_.isEmpty(status)) { - clauses.push(`status IN (${status.join(",")})`); - } - if (!_.isEmpty(lookback)) { - clauses.push(`startTime>${new Date().getTime() - lookback * MS_IN_DAY}`); - } - if (!_.isEmpty(startFrom)) { - clauses.push(`startTime>${new Date(startFrom).getTime()}`); - } - if (!_.isEmpty(startTo)) { - clauses.push(`startTime<${new Date(startTo).getTime()}`); - } - - return { - query: clauses.join(" AND "), - freeText: _.isEmpty(freeText) ? "*" : freeText, - }; - } - - function doSearch() { - setPage(1); - const oldQueryFT = queryFT; - const newQueryFT = buildQuery(); - setQueryFT(newQueryFT); - - // Only force refetch if query didn't change. Else let react-query detect difference and refetch automatically - if (_.isEqual(oldQueryFT, newQueryFT)) { - refetch(); - } - } - - const handlePage = (page) => { - setPage(page); - }; - - const handleSort = (changedColumn, direction) => { - const sort = `${changedColumn}:${direction.toUpperCase()}`; - setPage(1); - setSort(sort); - }; - - const handleRowsPerPage = (rowsPerPage) => { - setPage(1); - setRowsPerPage(rowsPerPage); - }; - - const handleLookback = (val) => { - setStartFrom(""); - setStartTo(""); - setLookback(val); - }; - - const handleStartFrom = (val) => { - setLookback(""); - setStartFrom(val); - }; - - const handleStartTo = (val) => { - setLookback(""); - setStartTo(val); - }; - - return ( -
    - - Workflow Executions - - - - - - setWorkflowType(val)} - value={workflowType} - /> - - - - - - setStatus(val)} - value={status} - /> - - - - - - - - - - - - - -   - Search - - - - - -
    - ); -} diff --git a/ui/src/pages/executions/executionsStyles.js b/ui/src/pages/executions/executionsStyles.js deleted file mode 100644 index 54cfd19e4..000000000 --- a/ui/src/pages/executions/executionsStyles.js +++ /dev/null @@ -1,23 +0,0 @@ -export default { - clickSearch: { - width: "100%", - padding: 30, - display: "block", - textAlign: "center", - }, - paper: { - marginBottom: 30, - }, - heading: { - marginBottom: 30, - }, - controls: { - padding: 15, - }, - popupIndicator: { - backgroundColor: "red", - }, - banner: { - marginBottom: 15, - }, -}; diff --git a/ui/src/pages/kitchensink/DataTableDemo.jsx b/ui/src/pages/kitchensink/DataTableDemo.jsx deleted file mode 100644 index b388a366e..000000000 --- a/ui/src/pages/kitchensink/DataTableDemo.jsx +++ /dev/null @@ -1,18 +0,0 @@ -import React from "react"; -import data from "./sampleMovieData"; -import { DataTable } from "../../components"; - -export default () => { - const columns = [ - { name: "title" }, - { name: "director" }, - { name: "year" }, - { name: "plot", grow: 0.5 }, - ]; - - return ( - <> - - - ); -}; diff --git a/ui/src/pages/kitchensink/DiagramTest.jsx b/ui/src/pages/kitchensink/DiagramTest.jsx deleted file mode 100644 index 2eea1a358..000000000 --- a/ui/src/pages/kitchensink/DiagramTest.jsx +++ /dev/null @@ -1,120 +0,0 @@ -import React, { Component } from "react"; -import WorkflowDAG from "../../components/diagram/WorkflowDAG"; -import WorkflowGraph from "../../components/diagram/WorkflowGraph"; -const workflowDef = { - tasks: [ - { - name: "fork_join", - taskReferenceName: "fork", - type: "FORK_JOIN", - forkTasks: [ - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkChild_grp1a", - }, - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkChild_grp1b", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp2", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp3", - }, - ], - [ - { - name: "forkChild", - type: "SIMPLE", - taskReferenceName: "forkchild_grp4", - }, - ], - ], - }, - { - name: "join", - taskReferenceName: "join", - type: "JOIN", - joinOn: ["forkChild_par1", "forkChild_par2", "forkChild_ser1"], - }, - - { - name: "decision", - taskReferenceName: "decision", - type: "DECISION", - decisionCases: [ - [ - { - name: "simple_task", - type: "SIMPLE", - taskReferenceName: "completed", - }, - ], - [ - { - name: "simple_task", - type: "SIMPLE", - taskReferenceName: "failed", - }, - ], - ], - }, - { - name: "exclusive_join", - taskReferenceName: "exclusiveJoin", - type: "EXCLUSIVE_JOIN", - joinOn: ["completed", "failed"], - defaultExclusiveJoinTask: ["completed"], - }, - { - name: "subworkflow", - taskReferenceName: "subworkflow", - type: "SUB_WORKFLOW", - subworkflowParam: { name: "foo" }, - }, - { - name: "dynamic_fork", - taskReferenceName: "dynamic_fork", - type: "FORK_JOIN_DYNAMIC", - dynamicForkTasksParam: "dynamicTasks", - dynamicForkTasksInputParamName: "dynamicTasksInput", - }, - { - name: "join", - taskReferenceName: "dynamic_join", - type: "JOIN", - }, - ], -}; - -class DiagramTest extends Component { - constructor() { - super(); - this.state = { - dag: new WorkflowDAG(null, workflowDef), - }; - } - - render() { - const { dag } = this.state; - return ( -
    - -
    - ); - } -} - -export default DiagramTest; diff --git a/ui/src/pages/kitchensink/EnhancedTable.jsx b/ui/src/pages/kitchensink/EnhancedTable.jsx deleted file mode 100644 index 4ab9b2657..000000000 --- a/ui/src/pages/kitchensink/EnhancedTable.jsx +++ /dev/null @@ -1,348 +0,0 @@ -import React from "react"; -import PropTypes from "prop-types"; -import clsx from "clsx"; -import { lighten, makeStyles } from "@material-ui/core/styles"; -import { - Table, - TableBody, - TableCell, - TableContainer, - TableHead, - TablePagination, - TableRow, - TableSortLabel, - Toolbar, - Checkbox, - FormControlLabel, - Switch, -} from "@material-ui/core"; -import { Paper, Text, Heading } from "../../components"; - -function createData(name, calories, fat, carbs, protein) { - return { name, calories, fat, carbs, protein }; -} - -const rows = [ - createData("Cupcake", 305, 3.7, 67, 4.3), - createData("Donut", 452, 25.0, 51, 4.9), - createData("Eclair", 262, 16.0, 24, 6.0), - createData("Frozen yoghurt", 159, 6.0, 24, 4.0), - createData("Gingerbread", 356, 16.0, 49, 3.9), - createData("Honeycomb", 408, 3.2, 87, 6.5), - createData("Ice cream sandwich", 237, 9.0, 37, 4.3), - createData("Jelly Bean", 375, 0.0, 94, 0.0), - createData("KitKat", 518, 26.0, 65, 7.0), - createData("Lollipop", 392, 0.2, 98, 0.0), - createData("Marshmallow", 318, 0, 81, 2.0), - createData("Nougat", 360, 19.0, 9, 37.0), - createData("Oreo", 437, 18.0, 63, 4.0), -]; - -function descendingComparator(a, b, orderBy) { - if (b[orderBy] < a[orderBy]) { - return -1; - } - if (b[orderBy] > a[orderBy]) { - return 1; - } - return 0; -} - -function getComparator(order, orderBy) { - return order === "desc" - ? (a, b) => descendingComparator(a, b, orderBy) - : (a, b) => -descendingComparator(a, b, orderBy); -} - -function stableSort(array, comparator) { - const stabilizedThis = array.map((el, index) => [el, index]); - stabilizedThis.sort((a, b) => { - const order = comparator(a[0], b[0]); - if (order !== 0) return order; - return a[1] - b[1]; - }); - return stabilizedThis.map((el) => el[0]); -} - -const headCells = [ - { - id: "name", - numeric: false, - disablePadding: true, - label: "Dessert (100g serving)", - }, - { id: "calories", numeric: true, disablePadding: false, label: "Calories" }, - { id: "fat", numeric: true, disablePadding: false, label: "Fat (g)" }, - { id: "carbs", numeric: true, disablePadding: false, label: "Carbs (g)" }, - { id: "protein", numeric: true, disablePadding: false, label: "Protein (g)" }, -]; - -function EnhancedTableHead(props) { - const { - classes, - onSelectAllClick, - order, - orderBy, - numSelected, - rowCount, - onRequestSort, - } = props; - const createSortHandler = (property) => (event) => { - onRequestSort(event, property); - }; - - return ( - - - - 0 && numSelected < rowCount} - checked={rowCount > 0 && numSelected === rowCount} - onChange={onSelectAllClick} - inputProps={{ "aria-label": "select all desserts" }} - /> - - {headCells.map((headCell) => ( - - - {headCell.label} - {orderBy === headCell.id ? ( - - {order === "desc" ? "sorted descending" : "sorted ascending"} - - ) : null} - - - ))} - - - ); -} - -EnhancedTableHead.propTypes = { - classes: PropTypes.object.isRequired, - numSelected: PropTypes.number.isRequired, - onRequestSort: PropTypes.func.isRequired, - onSelectAllClick: PropTypes.func.isRequired, - order: PropTypes.oneOf(["asc", "desc"]).isRequired, - orderBy: PropTypes.string.isRequired, - rowCount: PropTypes.number.isRequired, -}; - -const useToolbarStyles = makeStyles((theme) => ({ - root: { - paddingLeft: theme.spacing(2), - paddingRight: theme.spacing(1), - }, - highlight: - theme.palette.type === "light" - ? { - color: theme.palette.secondary.main, - backgroundColor: lighten(theme.palette.secondary.light, 0.85), - } - : { - color: theme.palette.text.primary, - backgroundColor: theme.palette.secondary.dark, - }, - title: { - flex: "1 1 100%", - }, -})); - -const EnhancedTableToolbar = (props) => { - const classes = useToolbarStyles(); - const { numSelected } = props; - - return ( - 0, - })} - > - {numSelected > 0 ? {numSelected} selected : null} - - ); -}; - -EnhancedTableToolbar.propTypes = { - numSelected: PropTypes.number.isRequired, -}; - -const useStyles = makeStyles((theme) => ({ - root: { - width: "100%", - }, - paper: { - width: "100%", - marginBottom: theme.spacing(2), - }, - table: { - minWidth: 750, - }, - visuallyHidden: { - border: 0, - clip: "rect(0 0 0 0)", - height: 1, - margin: -1, - overflow: "hidden", - padding: 0, - position: "absolute", - top: 20, - width: 1, - }, -})); - -export default function EnhancedTable() { - const classes = useStyles(); - const [order, setOrder] = React.useState("asc"); - const [orderBy, setOrderBy] = React.useState("calories"); - const [selected, setSelected] = React.useState([]); - const [page, setPage] = React.useState(0); - const [dense, setDense] = React.useState(false); - const [rowsPerPage, setRowsPerPage] = React.useState(5); - - const handleRequestSort = (event, property) => { - const isAsc = orderBy === property && order === "asc"; - setOrder(isAsc ? "desc" : "asc"); - setOrderBy(property); - }; - - const handleSelectAllClick = (event) => { - if (event.target.checked) { - const newSelecteds = rows.map((n) => n.name); - setSelected(newSelecteds); - return; - } - setSelected([]); - }; - - const handleClick = (event, name) => { - const selectedIndex = selected.indexOf(name); - let newSelected = []; - - if (selectedIndex === -1) { - newSelected = newSelected.concat(selected, name); - } else if (selectedIndex === 0) { - newSelected = newSelected.concat(selected.slice(1)); - } else if (selectedIndex === selected.length - 1) { - newSelected = newSelected.concat(selected.slice(0, -1)); - } else if (selectedIndex > 0) { - newSelected = newSelected.concat( - selected.slice(0, selectedIndex), - selected.slice(selectedIndex + 1) - ); - } - - setSelected(newSelected); - }; - - const handleChangePage = (event, newPage) => { - setPage(newPage); - }; - - const handleChangeRowsPerPage = (event) => { - setRowsPerPage(parseInt(event.target.value, 10)); - setPage(0); - }; - - const handleChangeDense = (event) => { - setDense(event.target.checked); - }; - - const isSelected = (name) => selected.indexOf(name) !== -1; - - const emptyRows = - rowsPerPage - Math.min(rowsPerPage, rows.length - page * rowsPerPage); - - return ( -
    - - - Native MUI Table - - - - - - - {stableSort(rows, getComparator(order, orderBy)) - .slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage) - .map((row, index) => { - const isItemSelected = isSelected(row.name); - const labelId = `enhanced-table-checkbox-${index}`; - - return ( - handleClick(event, row.name)} - role="checkbox" - aria-checked={isItemSelected} - tabIndex={-1} - key={row.name} - selected={isItemSelected} - > - - - - - {row.name} - - {row.calories} - {row.fat} - {row.carbs} - {row.protein} - - ); - })} - {emptyRows > 0 && ( - - - - )} - -
    -
    - - } - label="Dense padding" - /> -
    -
    - ); -} diff --git a/ui/src/pages/kitchensink/Examples.jsx b/ui/src/pages/kitchensink/Examples.jsx deleted file mode 100644 index f1966e7b2..000000000 --- a/ui/src/pages/kitchensink/Examples.jsx +++ /dev/null @@ -1,3 +0,0 @@ -export default function Examples() { - return null; -} diff --git a/ui/src/pages/kitchensink/Gantt.jsx b/ui/src/pages/kitchensink/Gantt.jsx deleted file mode 100644 index 028adc858..000000000 --- a/ui/src/pages/kitchensink/Gantt.jsx +++ /dev/null @@ -1,85 +0,0 @@ -import React, { Component } from "react"; -import Timeline from "react-vis-timeline-2"; -import moment from "moment"; -import { Paper } from "../../components"; - -function createItem(id, startTime) { - return { - id: id, - group: id, - content: "item " + id, - start: startTime, - end: startTime.clone().add(1, "minute"), - }; -} - -const initialGroups = [], - initialItems = []; -const now = moment().minutes(0).seconds(0).milliseconds(0); -const itemCount = 20; -for (let i = 0; i < itemCount; i++) { - const start = now.clone().add(Math.random() * 200, "minutes"); - initialGroups.push({ id: i, content: "group " + i }); - initialItems.push(createItem(i, start)); -} - -export default class Gantt extends Component { - timelineRef = React.createRef(); - - constructor(props) { - super(props); - - this.state = { - selectedIds: [], - }; - } - - /* - onAddItem = () => { - var nextId = this.timelineRef.current.items.length + 1; - const group = Math.floor(Math.random() * groupCount); - this.timelineRef.current.items.add(createItem(nextId, group, names[group], moment())); - this.timelineRef.current.timeline.fit(); - }; - */ - - onFit = () => { - this.timelineRef.current.timeline.fit(); - }; - - render() { - return ( - -

    This example demonstrate using groups.

    - - -
    - -
    -
    -
    - ); - } - - clickHandler = () => { - const { group } = this.props; - var items = this.timelineRef.current.items.get(); - const selectedIds = items - .filter((item) => item.group === group) - .map((item) => item.id); - this.setState({ - selectedIds, - }); - }; -} diff --git a/ui/src/pages/kitchensink/KitchenSink.jsx b/ui/src/pages/kitchensink/KitchenSink.jsx deleted file mode 100644 index 66e2cbf87..000000000 --- a/ui/src/pages/kitchensink/KitchenSink.jsx +++ /dev/null @@ -1,449 +0,0 @@ -import React, { useState } from "react"; -import { Form, Formik } from "formik"; -import { - Checkbox, - Grid, - Switch, - MenuItem, - InputLabel, - FormControl, - IconButton, - Toolbar, -} from "@material-ui/core"; -import DeleteIcon from "@material-ui/icons/Delete"; -import { - PrimaryButton, - SecondaryButton, - TertiaryButton, - ButtonGroup, - SplitButton, - DropdownButton, - Paper, - Tab, - Tabs, - NavLink, - Heading, - Text, - Input, - Select, - Button, -} from "../../components"; -import ZoomInIcon from "@material-ui/icons/ZoomIn"; -import * as Yup from "yup"; -import EnhancedTable from "./EnhancedTable"; -import DataTableDemo from "./DataTableDemo"; -import top100Films from "./sampleMovieData"; -import Dropdown from "../../components/Dropdown"; -import sharedStyles from "../styles"; -import { makeStyles } from "@material-ui/styles"; -import clsx from "clsx"; -import FormikInput from "../../components/formik/FormikInput"; -import FormikJsonInput from "../../components/formik/FormikJsonInput"; - -const useStyles = makeStyles(sharedStyles); - -export default function KitchenSink() { - const classes = useStyles(); - return ( -
    - - -

    This is a Hawkins-like theme based on vanilla Material-UI.

    -
    - - - - - Gantt - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - ); -} - -const FormikSection = () => { - const [formState, setFormState] = useState(); - return ( - - Formik - setFormState(values)} - > -
    - - - - - -
    - -
    {JSON.stringify(formState)}
    -
    -
    - ); -}; - -const ToolbarSection = () => { - return ( - - - Toolbar - - - - Label - {" "} - - - - - - - ); -}; - -const HeadingSection = () => { - return ( - - Heading Level Zero - Heading Level One - Heading Level Two - Heading Level Three - Heading Level Four - Heading Level Five - Text Level Zero - Text Level One - Text Level Two - -
    Default <div>
    -
    Default <p>
    -
    - ); -}; - -const TabsSection = () => { - const [tabIndex, setTabIndex] = useState(0); - return ( - - - Tabs - - - Page Level - - - Full Width - - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} /> - setTabIndex(3)} /> - -
    Tab content {tabIndex}
    -
    - - - Fixed Width - - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} /> - setTabIndex(3)} /> - -
    Tab content {tabIndex}
    -
    - - - Contextual - - - - Full Width - - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} /> - setTabIndex(3)} /> - -
    Tab content {tabIndex}
    -
    - - Fixed Width - - - - - setTabIndex(0)} /> - setTabIndex(1)} /> - setTabIndex(2)} /> - setTabIndex(3)} /> - -
    Tab content {tabIndex}
    -
    -
    - ); -}; - -const Buttons = () => ( - - - Button - - - - - Primary - - - Secondary - - - Tertiary - - - - - - alert("you clicked 1"), - }, - { - label: "Squash and merge", - handler: () => alert("you clicked 2"), - }, - { - label: "Rebase and merge", - handler: () => alert("you clicked 3"), - }, - ]} - onPrimaryClick={() => alert("main button")} - > - Split Button - - - - alert("you clicked 1"), - }, - { - label: "Squash and merge", - handler: () => alert("you clicked 2"), - }, - { - label: "Rebase and merge", - handler: () => alert("you clicked 3"), - }, - ]} - > - Dropdown Button - - - - - - - - - - - - -); - -const Toggles = () => { - const [toggleChecked, setToggleChecked] = useState(false); - - return ( - - - Toggle - - setToggleChecked(!toggleChecked)} - color="primary" - /> - - ); -}; - -const Checkboxes = () => { - const [toggleChecked, setToggleChecked] = useState(false); - - return ( - - - Checkbox - - setToggleChecked(!toggleChecked)} - color="primary" - /> - - ); -}; - -const Inputs = () => ( - - - Input - - - - - - - - - - Input Label via FormControl/InputLabel - - - - - -); - -const Selects = () => { - const [value, setValue] = useState(10); - return ( - - - Select - - - - - - - - - option.title} - /> - - option.title} - /> - - option.title} - /> - - option.title} - defaultValue={[top100Films[13]]} - style={{ width: 500 }} - filterSelectedOptions - /> - - ); -}; diff --git a/ui/src/pages/kitchensink/sampleMovieData.js b/ui/src/pages/kitchensink/sampleMovieData.js deleted file mode 100644 index ad26468e9..000000000 --- a/ui/src/pages/kitchensink/sampleMovieData.js +++ /dev/null @@ -1,1773 +0,0 @@ -// Author https://github.com/yegor-sytnyk/movies-list - -export default [ - { - id: 1, - title: "Beetlejuice", - year: "1988", - runtime: "92", - genres: ["Comedy", "Fantasy"], - director: "Tim Burton", - actors: "Alec Baldwin, Geena Davis, Annie McEnroe, Maurice Page", - plot: 'A couple of recently deceased ghosts contract the services of a "bio-exorcist" in order to remove the obnoxious new owners of their house.', - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTUwODE3MDE0MV5BMl5BanBnXkFtZTgwNTk1MjI4MzE@._V1_SX300.jpg", - }, - { - id: 2, - title: "The Cotton Club", - year: "1984", - runtime: "127", - genres: ["Crime", "Drama", "Music"], - director: "Francis Ford Coppola", - actors: "Richard Gere, Gregory Hines, Diane Lane, Lonette McKee", - plot: "The Cotton Club was a famous night club in Harlem. The story follows the people that visited the club, those that ran it, and is peppered with the Jazz music that made it so famous.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTU5ODAyNzA4OV5BMl5BanBnXkFtZTcwNzYwNTIzNA@@._V1_SX300.jpg", - }, - { - id: 3, - title: "The Shawshank Redemption", - year: "1994", - runtime: "142", - genres: ["Crime", "Drama"], - director: "Frank Darabont", - actors: "Tim Robbins, Morgan Freeman, Bob Gunton, William Sadler", - plot: "Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BODU4MjU4NjIwNl5BMl5BanBnXkFtZTgwMDU2MjEyMDE@._V1_SX300.jpg", - }, - { - id: 4, - title: "Crocodile Dundee", - year: "1986", - runtime: "97", - genres: ["Adventure", "Comedy"], - director: "Peter Faiman", - actors: "Paul Hogan, Linda Kozlowski, John Meillon, David Gulpilil", - plot: "An American reporter goes to the Australian outback to meet an eccentric crocodile poacher and invites him to New York City.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTg0MTU1MTg4NF5BMl5BanBnXkFtZTgwMDgzNzYxMTE@._V1_SX300.jpg", - }, - { - id: 5, - title: "Valkyrie", - year: "2008", - runtime: "121", - genres: ["Drama", "History", "Thriller"], - director: "Bryan Singer", - actors: "Tom Cruise, Kenneth Branagh, Bill Nighy, Tom Wilkinson", - plot: "A dramatization of the 20 July assassination and political coup plot by desperate renegade German Army officers against Hitler during World War II.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTg3Njc2ODEyN15BMl5BanBnXkFtZTcwNTAwMzc3NA@@._V1_SX300.jpg", - }, - { - id: 6, - title: "Ratatouille", - year: "2007", - runtime: "111", - genres: ["Animation", "Comedy", "Family"], - director: "Brad Bird, Jan Pinkava", - actors: "Patton Oswalt, Ian Holm, Lou Romano, Brian Dennehy", - plot: "A rat who can cook makes an unusual alliance with a young kitchen worker at a famous restaurant.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTMzODU0NTkxMF5BMl5BanBnXkFtZTcwMjQ4MzMzMw@@._V1_SX300.jpg", - }, - { - id: 7, - title: "City of God", - year: "2002", - runtime: "130", - genres: ["Crime", "Drama"], - director: "Fernando Meirelles, Kátia Lund", - actors: - "Alexandre Rodrigues, Leandro Firmino, Phellipe Haagensen, Douglas Silva", - plot: "Two boys growing up in a violent neighborhood of Rio de Janeiro take different paths: one becomes a photographer, the other a drug dealer.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA4ODQ3ODkzNV5BMl5BanBnXkFtZTYwOTc4NDI3._V1_SX300.jpg", - }, - { - id: 8, - title: "Memento", - year: "2000", - runtime: "113", - genres: ["Mystery", "Thriller"], - director: "Christopher Nolan", - actors: "Guy Pearce, Carrie-Anne Moss, Joe Pantoliano, Mark Boone Junior", - plot: "A man juggles searching for his wife's murderer and keeping his short-term memory loss from being an obstacle.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNThiYjM3MzktMDg3Yy00ZWQ3LTk3YWEtN2M0YmNmNWEwYTE3XkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 9, - title: "The Intouchables", - year: "2011", - runtime: "112", - genres: ["Biography", "Comedy", "Drama"], - director: "Olivier Nakache, Eric Toledano", - actors: "François Cluzet, Omar Sy, Anne Le Ny, Audrey Fleurot", - plot: "After he becomes a quadriplegic from a paragliding accident, an aristocrat hires a young man from the projects to be his caregiver.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTYxNDA3MDQwNl5BMl5BanBnXkFtZTcwNTU4Mzc1Nw@@._V1_SX300.jpg", - }, - { - id: 10, - title: "Stardust", - year: "2007", - runtime: "127", - genres: ["Adventure", "Family", "Fantasy"], - director: "Matthew Vaughn", - actors: "Ian McKellen, Bimbo Hart, Alastair MacIntosh, David Kelly", - plot: "In a countryside town bordering on a magical land, a young man makes a promise to his beloved that he'll retrieve a fallen star by venturing into the magical realm.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjkyMTE1OTYwNF5BMl5BanBnXkFtZTcwMDIxODYzMw@@._V1_SX300.jpg", - }, - { - id: 11, - title: "Apocalypto", - year: "2006", - runtime: "139", - genres: ["Action", "Adventure", "Drama"], - director: "Mel Gibson", - actors: - "Rudy Youngblood, Dalia Hernández, Jonathan Brewer, Morris Birdyellowhead", - plot: "As the Mayan kingdom faces its decline, the rulers insist the key to prosperity is to build more temples and offer human sacrifices. Jaguar Paw, a young man captured for sacrifice, flees to avoid his fate.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNTM1NjYyNTY5OV5BMl5BanBnXkFtZTcwMjgwNTMzMQ@@._V1_SX300.jpg", - }, - { - id: 12, - title: "Taxi Driver", - year: "1976", - runtime: "113", - genres: ["Crime", "Drama"], - director: "Martin Scorsese", - actors: "Diahnne Abbott, Frank Adu, Victor Argo, Gino Ardito", - plot: "A mentally unstable Vietnam War veteran works as a night-time taxi driver in New York City where the perceived decadence and sleaze feeds his urge for violent action, attempting to save a preadolescent prostitute in the process.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNGQxNDgzZWQtZTNjNi00M2RkLWExZmEtNmE1NjEyZDEwMzA5XkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 13, - title: "No Country for Old Men", - year: "2007", - runtime: "122", - genres: ["Crime", "Drama", "Thriller"], - director: "Ethan Coen, Joel Coen", - actors: "Tommy Lee Jones, Javier Bardem, Josh Brolin, Woody Harrelson", - plot: "Violence and mayhem ensue after a hunter stumbles upon a drug deal gone wrong and more than two million dollars in cash near the Rio Grande.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA5Njk3MjM4OV5BMl5BanBnXkFtZTcwMTc5MTE1MQ@@._V1_SX300.jpg", - }, - { - id: 14, - title: "Planet 51", - year: "2009", - runtime: "91", - genres: ["Animation", "Adventure", "Comedy"], - director: "Jorge Blanco, Javier Abad, Marcos Martínez", - actors: "Jessica Biel, John Cleese, Gary Oldman, Dwayne Johnson", - plot: "An alien civilization is invaded by Astronaut Chuck Baker, who believes that the planet was uninhabited. Wanted by the military, Baker must get back to his ship before it goes into orbit without him.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTUyOTAyNTA5Ml5BMl5BanBnXkFtZTcwODU2OTM0Mg@@._V1_SX300.jpg", - }, - { - id: 15, - title: "Looper", - year: "2012", - runtime: "119", - genres: ["Action", "Crime", "Drama"], - director: "Rian Johnson", - actors: "Joseph Gordon-Levitt, Bruce Willis, Emily Blunt, Paul Dano", - plot: "In 2074, when the mob wants to get rid of someone, the target is sent into the past, where a hired gun awaits - someone like Joe - who one day learns the mob wants to 'close the loop' by sending back Joe's future self for assassination.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTY3NTY0MjEwNV5BMl5BanBnXkFtZTcwNTE3NDA1OA@@._V1_SX300.jpg", - }, - { - id: 16, - title: "Corpse Bride", - year: "2005", - runtime: "77", - genres: ["Animation", "Drama", "Family"], - director: "Tim Burton, Mike Johnson", - actors: "Johnny Depp, Helena Bonham Carter, Emily Watson, Tracey Ullman", - plot: "When a shy groom practices his wedding vows in the inadvertent presence of a deceased young woman, she rises from the grave assuming he has married her.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTk1MTY1NjU4MF5BMl5BanBnXkFtZTcwNjIzMTEzMw@@._V1_SX300.jpg", - }, - { - id: 17, - title: "The Third Man", - year: "1949", - runtime: "93", - genres: ["Film-Noir", "Mystery", "Thriller"], - director: "Carol Reed", - actors: "Joseph Cotten, Alida Valli, Orson Welles, Trevor Howard", - plot: "Pulp novelist Holly Martins travels to shadowy, postwar Vienna, only to find himself investigating the mysterious death of an old friend, Harry Lime.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjMwNzMzMTQ0Ml5BMl5BanBnXkFtZTgwNjExMzUwNjE@._V1_SX300.jpg", - }, - { - id: 18, - title: "The Beach", - year: "2000", - runtime: "119", - genres: ["Adventure", "Drama", "Romance"], - director: "Danny Boyle", - actors: - "Leonardo DiCaprio, Daniel York, Patcharawan Patarakijjanon, Virginie Ledoyen", - plot: "Twenty-something Richard travels to Thailand and finds himself in possession of a strange map. Rumours state that it leads to a solitary beach paradise, a tropical bliss - excited and intrigued, he sets out to find it.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BN2ViYTFiZmUtOTIxZi00YzIxLWEyMzUtYjQwZGNjMjNhY2IwXkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 19, - title: "Scarface", - year: "1983", - runtime: "170", - genres: ["Crime", "Drama"], - director: "Brian De Palma", - actors: - "Al Pacino, Steven Bauer, Michelle Pfeiffer, Mary Elizabeth Mastrantonio", - plot: "In Miami in 1980, a determined Cuban immigrant takes over a drug cartel and succumbs to greed.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjAzOTM4MzEwNl5BMl5BanBnXkFtZTgwMzU1OTc1MDE@._V1_SX300.jpg", - }, - { - id: 20, - title: "Sid and Nancy", - year: "1986", - runtime: "112", - genres: ["Biography", "Drama", "Music"], - director: "Alex Cox", - actors: "Gary Oldman, Chloe Webb, David Hayman, Debby Bishop", - plot: "Morbid biographical story of Sid Vicious, bassist with British punk group the Sex Pistols, and his girlfriend Nancy Spungen. When the Sex Pistols break up after their fateful US tour, ...", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjExNjA5NzY4M15BMl5BanBnXkFtZTcwNjQ2NzI5NA@@._V1_SX300.jpg", - }, - { - id: 21, - title: "Black Swan", - year: "2010", - runtime: "108", - genres: ["Drama", "Thriller"], - director: "Darren Aronofsky", - actors: "Natalie Portman, Mila Kunis, Vincent Cassel, Barbara Hershey", - plot: 'A committed dancer wins the lead role in a production of Tchaikovsky\'s "Swan Lake" only to find herself struggling to maintain her sanity.', - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNzY2NzI4OTE5MF5BMl5BanBnXkFtZTcwMjMyNDY4Mw@@._V1_SX300.jpg", - }, - { - id: 22, - title: "Inception", - year: "2010", - runtime: "148", - genres: ["Action", "Adventure", "Sci-Fi"], - director: "Christopher Nolan", - actors: "Leonardo DiCaprio, Joseph Gordon-Levitt, Ellen Page, Tom Hardy", - plot: "A thief, who steals corporate secrets through use of dream-sharing technology, is given the inverse task of planting an idea into the mind of a CEO.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjAxMzY3NjcxNF5BMl5BanBnXkFtZTcwNTI5OTM0Mw@@._V1_SX300.jpg", - }, - { - id: 23, - title: "The Deer Hunter", - year: "1978", - runtime: "183", - genres: ["Drama", "War"], - director: "Michael Cimino", - actors: "Robert De Niro, John Cazale, John Savage, Christopher Walken", - plot: "An in-depth examination of the ways in which the U.S. Vietnam War impacts and disrupts the lives of people in a small industrial town in Pennsylvania.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTYzYmRmZTQtYjk2NS00MDdlLTkxMDAtMTE2YTM2ZmNlMTBkXkEyXkFqcGdeQXVyNjU0OTQ0OTY@._V1_SX300.jpg", - }, - { - id: 24, - title: "Chasing Amy", - year: "1997", - runtime: "113", - genres: ["Comedy", "Drama", "Romance"], - director: "Kevin Smith", - actors: "Ethan Suplee, Ben Affleck, Scott Mosier, Jason Lee", - plot: "Holden and Banky are comic book artists. Everything's going good for them until they meet Alyssa, also a comic book artist. Holden falls for her, but his hopes are crushed when he finds out she's gay.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BZDM3MTg2MGUtZDM0MC00NzMwLWE5NjItOWFjNjA2M2I4YzgxXkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 25, - title: "Django Unchained", - year: "2012", - runtime: "165", - genres: ["Drama", "Western"], - director: "Quentin Tarantino", - actors: "Jamie Foxx, Christoph Waltz, Leonardo DiCaprio, Kerry Washington", - plot: "With the help of a German bounty hunter, a freed slave sets out to rescue his wife from a brutal Mississippi plantation owner.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjIyNTQ5NjQ1OV5BMl5BanBnXkFtZTcwODg1MDU4OA@@._V1_SX300.jpg", - }, - { - id: 26, - title: "The Silence of the Lambs", - year: "1991", - runtime: "118", - genres: ["Crime", "Drama", "Thriller"], - director: "Jonathan Demme", - actors: - "Jodie Foster, Lawrence A. Bonney, Kasi Lemmons, Lawrence T. Wrentz", - plot: "A young F.B.I. cadet must confide in an incarcerated and manipulative killer to receive his help on catching another serial killer who skins his victims.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQ2NzkzMDI4OF5BMl5BanBnXkFtZTcwMDA0NzE1NA@@._V1_SX300.jpg", - }, - { - id: 27, - title: "American Beauty", - year: "1999", - runtime: "122", - genres: ["Drama", "Romance"], - director: "Sam Mendes", - actors: "Kevin Spacey, Annette Bening, Thora Birch, Wes Bentley", - plot: "A sexually frustrated suburban father has a mid-life crisis after becoming infatuated with his daughter's best friend.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjM4NTI5NzYyNV5BMl5BanBnXkFtZTgwNTkxNTYxMTE@._V1_SX300.jpg", - }, - { - id: 28, - title: "Snatch", - year: "2000", - runtime: "102", - genres: ["Comedy", "Crime"], - director: "Guy Ritchie", - actors: "Benicio Del Toro, Dennis Farina, Vinnie Jones, Brad Pitt", - plot: "Unscrupulous boxing promoters, violent bookmakers, a Russian gangster, incompetent amateur robbers, and supposedly Jewish jewelers fight to track down a priceless stolen diamond.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTA2NDYxOGYtYjU1Mi00Y2QzLTgxMTQtMWI1MGI0ZGQ5MmU4XkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 29, - title: "Midnight Express", - year: "1978", - runtime: "121", - genres: ["Crime", "Drama", "Thriller"], - director: "Alan Parker", - actors: "Brad Davis, Irene Miracle, Bo Hopkins, Paolo Bonacelli", - plot: "Billy Hayes, an American college student, is caught smuggling drugs out of Turkey and thrown into prison.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQyMDA5MzkyOF5BMl5BanBnXkFtZTgwOTYwNTcxMTE@._V1_SX300.jpg", - }, - { - id: 30, - title: "Pulp Fiction", - year: "1994", - runtime: "154", - genres: ["Crime", "Drama"], - director: "Quentin Tarantino", - actors: "Tim Roth, Amanda Plummer, Laura Lovelace, John Travolta", - plot: "The lives of two mob hit men, a boxer, a gangster's wife, and a pair of diner bandits intertwine in four tales of violence and redemption.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTkxMTA5OTAzMl5BMl5BanBnXkFtZTgwNjA5MDc3NjE@._V1_SX300.jpg", - }, - { - id: 31, - title: "Lock, Stock and Two Smoking Barrels", - year: "1998", - runtime: "107", - genres: ["Comedy", "Crime"], - director: "Guy Ritchie", - actors: "Jason Flemyng, Dexter Fletcher, Nick Moran, Jason Statham", - plot: "A botched card game in London triggers four friends, thugs, weed-growers, hard gangsters, loan sharks and debt collectors to collide with each other in a series of unexpected events, all for the sake of weed, cash and two antique shotguns.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTAyN2JmZmEtNjAyMy00NzYwLThmY2MtYWQ3OGNhNjExMmM4XkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 32, - title: "Lucky Number Slevin", - year: "2006", - runtime: "110", - genres: ["Crime", "Drama", "Mystery"], - director: "Paul McGuigan", - actors: "Josh Hartnett, Bruce Willis, Lucy Liu, Morgan Freeman", - plot: "A case of mistaken identity lands Slevin into the middle of a war being plotted by two of the city's most rival crime bosses: The Rabbi and The Boss. Slevin is under constant surveillance by relentless Detective Brikowski as well as the infamous assassin Goodkat and finds himself having to hatch his own ingenious plot to get them before they get him.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMzc1OTEwMTk4OF5BMl5BanBnXkFtZTcwMTEzMDQzMQ@@._V1_SX300.jpg", - }, - { - id: 33, - title: "Rear Window", - year: "1954", - runtime: "112", - genres: ["Mystery", "Thriller"], - director: "Alfred Hitchcock", - actors: "James Stewart, Grace Kelly, Wendell Corey, Thelma Ritter", - plot: "A wheelchair-bound photographer spies on his neighbours from his apartment window and becomes convinced one of them has committed murder.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNGUxYWM3M2MtMGM3Mi00ZmRiLWE0NGQtZjE5ODI2OTJhNTU0XkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 34, - title: "Pan's Labyrinth", - year: "2006", - runtime: "118", - genres: ["Drama", "Fantasy", "War"], - director: "Guillermo del Toro", - actors: "Ivana Baquero, Sergi López, Maribel Verdú, Doug Jones", - plot: "In the falangist Spain of 1944, the bookish young stepdaughter of a sadistic army officer escapes into an eerie but captivating fantasy world.", - posterUrl: "", - }, - { - id: 35, - title: "Shutter Island", - year: "2010", - runtime: "138", - genres: ["Mystery", "Thriller"], - director: "Martin Scorsese", - actors: "Leonardo DiCaprio, Mark Ruffalo, Ben Kingsley, Max von Sydow", - plot: "In 1954, a U.S. marshal investigates the disappearance of a murderess who escaped from a hospital for the criminally insane.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTMxMTIyNzMxMV5BMl5BanBnXkFtZTcwOTc4OTI3Mg@@._V1_SX300.jpg", - }, - { - id: 36, - title: "Reservoir Dogs", - year: "1992", - runtime: "99", - genres: ["Crime", "Drama", "Thriller"], - director: "Quentin Tarantino", - actors: "Harvey Keitel, Tim Roth, Michael Madsen, Chris Penn", - plot: "After a simple jewelry heist goes terribly wrong, the surviving criminals begin to suspect that one of them is a police informant.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNjE5ZDJiZTQtOGE2YS00ZTc5LTk0OGUtOTg2NjdjZmVlYzE2XkEyXkFqcGdeQXVyMzM4MjM0Nzg@._V1_SX300.jpg", - }, - { - id: 37, - title: "The Shining", - year: "1980", - runtime: "146", - genres: ["Drama", "Horror"], - director: "Stanley Kubrick", - actors: "Jack Nicholson, Shelley Duvall, Danny Lloyd, Scatman Crothers", - plot: "A family heads to an isolated hotel for the winter where an evil and spiritual presence influences the father into violence, while his psychic son sees horrific forebodings from the past and of the future.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BODMxMjE3NTA4Ml5BMl5BanBnXkFtZTgwNDc0NTIxMDE@._V1_SX300.jpg", - }, - { - id: 38, - title: "Midnight in Paris", - year: "2011", - runtime: "94", - genres: ["Comedy", "Fantasy", "Romance"], - director: "Woody Allen", - actors: "Owen Wilson, Rachel McAdams, Kurt Fuller, Mimi Kennedy", - plot: "While on a trip to Paris with his fiancée's family, a nostalgic screenwriter finds himself mysteriously going back to the 1920s everyday at midnight.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTM4NjY1MDQwMl5BMl5BanBnXkFtZTcwNTI3Njg3NA@@._V1_SX300.jpg", - }, - { - id: 39, - title: "Les Misérables", - year: "2012", - runtime: "158", - genres: ["Drama", "Musical", "Romance"], - director: "Tom Hooper", - actors: "Hugh Jackman, Russell Crowe, Anne Hathaway, Amanda Seyfried", - plot: "In 19th-century France, Jean Valjean, who for decades has been hunted by the ruthless policeman Javert after breaking parole, agrees to care for a factory worker's daughter. The decision changes their lives forever.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTQ4NDI3NDg4M15BMl5BanBnXkFtZTcwMjY5OTI1OA@@._V1_SX300.jpg", - }, - { - id: 40, - title: "L.A. Confidential", - year: "1997", - runtime: "138", - genres: ["Crime", "Drama", "Mystery"], - director: "Curtis Hanson", - actors: "Kevin Spacey, Russell Crowe, Guy Pearce, James Cromwell", - plot: "As corruption grows in 1950s LA, three policemen - one strait-laced, one brutal, and one sleazy - investigate a series of murders with their own brand of justice.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNWEwNDhhNWUtYWMzNi00ZTNhLWFiZDAtMjBjZmJhMTU0ZTY2XkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 41, - title: "Moneyball", - year: "2011", - runtime: "133", - genres: ["Biography", "Drama", "Sport"], - director: "Bennett Miller", - actors: "Brad Pitt, Jonah Hill, Philip Seymour Hoffman, Robin Wright", - plot: "Oakland A's general manager Billy Beane's successful attempt to assemble a baseball team on a lean budget by employing computer-generated analysis to acquire new players.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjAxOTU3Mzc1M15BMl5BanBnXkFtZTcwMzk1ODUzNg@@._V1_SX300.jpg", - }, - { - id: 42, - title: "The Hangover", - year: "2009", - runtime: "100", - genres: ["Comedy"], - director: "Todd Phillips", - actors: "Bradley Cooper, Ed Helms, Zach Galifianakis, Justin Bartha", - plot: "Three buddies wake up from a bachelor party in Las Vegas, with no memory of the previous night and the bachelor missing. They make their way around the city in order to find their friend before his wedding.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTU1MDA1MTYwMF5BMl5BanBnXkFtZTcwMDcxMzA1Mg@@._V1_SX300.jpg", - }, - { - id: 43, - title: "The Great Beauty", - year: "2013", - runtime: "141", - genres: ["Drama"], - director: "Paolo Sorrentino", - actors: "Toni Servillo, Carlo Verdone, Sabrina Ferilli, Carlo Buccirosso", - plot: "Jep Gambardella has seduced his way through the lavish nightlife of Rome for decades, but after his 65th birthday and a shock from the past, Jep looks past the nightclubs and parties to find a timeless landscape of absurd, exquisite beauty.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQ0ODg1OTQ2Nl5BMl5BanBnXkFtZTgwNTc2MDY1MDE@._V1_SX300.jpg", - }, - { - id: 44, - title: "Gran Torino", - year: "2008", - runtime: "116", - genres: ["Drama"], - director: "Clint Eastwood", - actors: "Clint Eastwood, Christopher Carley, Bee Vang, Ahney Her", - plot: "Disgruntled Korean War veteran Walt Kowalski sets out to reform his neighbor, a Hmong teenager who tried to steal Kowalski's prized possession: a 1972 Gran Torino.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTQyMTczMTAxMl5BMl5BanBnXkFtZTcwOTc1ODE0Mg@@._V1_SX300.jpg", - }, - { - id: 45, - title: "Mary and Max", - year: "2009", - runtime: "92", - genres: ["Animation", "Comedy", "Drama"], - director: "Adam Elliot", - actors: "Toni Collette, Philip Seymour Hoffman, Barry Humphries, Eric Bana", - plot: "A tale of friendship between two unlikely pen pals: Mary, a lonely, eight-year-old girl living in the suburbs of Melbourne, and Max, a forty-four-year old, severely obese man living in New York.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQ1NDIyNTA1Nl5BMl5BanBnXkFtZTcwMjc2Njk3OA@@._V1_SX300.jpg", - }, - { - id: 46, - title: "Flight", - year: "2012", - runtime: "138", - genres: ["Drama", "Thriller"], - director: "Robert Zemeckis", - actors: - "Nadine Velazquez, Denzel Washington, Carter Cabassa, Adam C. Edwards", - plot: "An airline pilot saves almost all his passengers on his malfunctioning airliner which eventually crashed, but an investigation into the accident reveals something troubling.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTUxMjI1OTMxNl5BMl5BanBnXkFtZTcwNjc3NTY1OA@@._V1_SX300.jpg", - }, - { - id: 47, - title: "One Flew Over the Cuckoo's Nest", - year: "1975", - runtime: "133", - genres: ["Drama"], - director: "Milos Forman", - actors: "Michael Berryman, Peter Brocco, Dean R. Brooks, Alonzo Brown", - plot: "A criminal pleads insanity after getting into trouble again and once in the mental institution rebels against the oppressive nurse and rallies up the scared patients.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BYmJkODkwOTItZThjZC00MTE0LWIxNzQtYTM3MmQwMGI1OWFiXkEyXkFqcGdeQXVyNjUwNzk3NDc@._V1_SX300.jpg", - }, - { - id: 48, - title: "Requiem for a Dream", - year: "2000", - runtime: "102", - genres: ["Drama"], - director: "Darren Aronofsky", - actors: "Ellen Burstyn, Jared Leto, Jennifer Connelly, Marlon Wayans", - plot: "The drug-induced utopias of four Coney Island people are shattered when their addictions run deep.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTkzODMzODYwOF5BMl5BanBnXkFtZTcwODM2NjA2NQ@@._V1_SX300.jpg", - }, - { - id: 49, - title: "The Truman Show", - year: "1998", - runtime: "103", - genres: ["Comedy", "Drama", "Sci-Fi"], - director: "Peter Weir", - actors: "Jim Carrey, Laura Linney, Noah Emmerich, Natascha McElhone", - plot: "An insurance salesman/adjuster discovers his entire life is actually a television show.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMDIzODcyY2EtMmY2MC00ZWVlLTgwMzAtMjQwOWUyNmJjNTYyXkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 50, - title: "The Artist", - year: "2011", - runtime: "100", - genres: ["Comedy", "Drama", "Romance"], - director: "Michel Hazanavicius", - actors: "Jean Dujardin, Bérénice Bejo, John Goodman, James Cromwell", - plot: "A silent movie star meets a young dancer, but the arrival of talking pictures sends their careers in opposite directions.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMzk0NzQxMTM0OV5BMl5BanBnXkFtZTcwMzU4MDYyNQ@@._V1_SX300.jpg", - }, - { - id: 51, - title: "Forrest Gump", - year: "1994", - runtime: "142", - genres: ["Comedy", "Drama"], - director: "Robert Zemeckis", - actors: - "Tom Hanks, Rebecca Williams, Sally Field, Michael Conner Humphreys", - plot: "Forrest Gump, while not intelligent, has accidentally been present at many historic moments, but his true love, Jenny Curran, eludes him.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BYThjM2MwZGMtMzg3Ny00NGRkLWE4M2EtYTBiNWMzOTY0YTI4XkEyXkFqcGdeQXVyNDYyMDk5MTU@._V1_SX300.jpg", - }, - { - id: 52, - title: "The Hobbit: The Desolation of Smaug", - year: "2013", - runtime: "161", - genres: ["Adventure", "Fantasy"], - director: "Peter Jackson", - actors: "Ian McKellen, Martin Freeman, Richard Armitage, Ken Stott", - plot: "The dwarves, along with Bilbo Baggins and Gandalf the Grey, continue their quest to reclaim Erebor, their homeland, from Smaug. Bilbo Baggins is in possession of a mysterious and magical ring.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMzU0NDY0NDEzNV5BMl5BanBnXkFtZTgwOTIxNDU1MDE@._V1_SX300.jpg", - }, - { - id: 53, - title: "Vicky Cristina Barcelona", - year: "2008", - runtime: "96", - genres: ["Drama", "Romance"], - director: "Woody Allen", - actors: - "Rebecca Hall, Scarlett Johansson, Christopher Evan Welch, Chris Messina", - plot: "Two girlfriends on a summer holiday in Spain become enamored with the same painter, unaware that his ex-wife, with whom he has a tempestuous relationship, is about to re-enter the picture.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTU2NDQ4MTg2MV5BMl5BanBnXkFtZTcwNDUzNjU3MQ@@._V1_SX300.jpg", - }, - { - id: 54, - title: "Slumdog Millionaire", - year: "2008", - runtime: "120", - genres: ["Drama", "Romance"], - director: "Danny Boyle, Loveleen Tandan", - actors: "Dev Patel, Saurabh Shukla, Anil Kapoor, Rajendranath Zutshi", - plot: 'A Mumbai teen reflects on his upbringing in the slums when he is accused of cheating on the Indian Version of "Who Wants to be a Millionaire?"', - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTU2NTA5NzI0N15BMl5BanBnXkFtZTcwMjUxMjYxMg@@._V1_SX300.jpg", - }, - { - id: 55, - title: "Lost in Translation", - year: "2003", - runtime: "101", - genres: ["Drama"], - director: "Sofia Coppola", - actors: - "Scarlett Johansson, Bill Murray, Akiko Takeshita, Kazuyoshi Minamimagoe", - plot: "A faded movie star and a neglected young woman form an unlikely bond after crossing paths in Tokyo.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTI2NDI5ODk4N15BMl5BanBnXkFtZTYwMTI3NTE3._V1_SX300.jpg", - }, - { - id: 56, - title: "Match Point", - year: "2005", - runtime: "119", - genres: ["Drama", "Romance", "Thriller"], - director: "Woody Allen", - actors: - "Jonathan Rhys Meyers, Alexander Armstrong, Paul Kaye, Matthew Goode", - plot: "At a turning point in his life, a former tennis pro falls for an actress who happens to be dating his friend and soon-to-be brother-in-law.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTMzNzY4MzE5NF5BMl5BanBnXkFtZTcwMzQ1MDMzMQ@@._V1_SX300.jpg", - }, - { - id: 57, - title: "Psycho", - year: "1960", - runtime: "109", - genres: ["Horror", "Mystery", "Thriller"], - director: "Alfred Hitchcock", - actors: "Anthony Perkins, Vera Miles, John Gavin, Janet Leigh", - plot: "A Phoenix secretary embezzles $40,000 from her employer's client, goes on the run, and checks into a remote motel run by a young man under the domination of his mother.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMDI3OWRmOTEtOWJhYi00N2JkLTgwNGItMjdkN2U0NjFiZTYwXkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 58, - title: "North by Northwest", - year: "1959", - runtime: "136", - genres: ["Action", "Adventure", "Crime"], - director: "Alfred Hitchcock", - actors: "Cary Grant, Eva Marie Saint, James Mason, Jessie Royce Landis", - plot: "A hapless New York advertising executive is mistaken for a government agent by a group of foreign spies, and is pursued across the country while he looks for a way to survive.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjQwMTQ0MzgwNl5BMl5BanBnXkFtZTgwNjc4ODE4MzE@._V1_SX300.jpg", - }, - { - id: 59, - title: "Madagascar: Escape 2 Africa", - year: "2008", - runtime: "89", - genres: ["Animation", "Action", "Adventure"], - director: "Eric Darnell, Tom McGrath", - actors: "Ben Stiller, Chris Rock, David Schwimmer, Jada Pinkett Smith", - plot: "The animals try to fly back to New York City, but crash-land on an African wildlife refuge, where Alex is reunited with his parents.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjExMDA4NDcwMl5BMl5BanBnXkFtZTcwODAxNTQ3MQ@@._V1_SX300.jpg", - }, - { - id: 60, - title: "Despicable Me 2", - year: "2013", - runtime: "98", - genres: ["Animation", "Adventure", "Comedy"], - director: "Pierre Coffin, Chris Renaud", - actors: "Steve Carell, Kristen Wiig, Benjamin Bratt, Miranda Cosgrove", - plot: "When Gru, the world's most super-bad turned super-dad has been recruited by a team of officials to stop lethal muscle and a host of Gru's own, He has to fight back with new gadgetry, cars, and more minion madness.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjExNjAyNTcyMF5BMl5BanBnXkFtZTgwODQzMjQ3MDE@._V1_SX300.jpg", - }, - { - id: 61, - title: "Downfall", - year: "2004", - runtime: "156", - genres: ["Biography", "Drama", "History"], - director: "Oliver Hirschbiegel", - actors: - "Bruno Ganz, Alexandra Maria Lara, Corinna Harfouch, Ulrich Matthes", - plot: "Traudl Junge, the final secretary for Adolf Hitler, tells of the Nazi dictator's final days in his Berlin bunker at the end of WWII.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM1OTI1MjE2Nl5BMl5BanBnXkFtZTcwMTEwMzc4NA@@._V1_SX300.jpg", - }, - { - id: 62, - title: "Madagascar", - year: "2005", - runtime: "86", - genres: ["Animation", "Adventure", "Comedy"], - director: "Eric Darnell, Tom McGrath", - actors: "Ben Stiller, Chris Rock, David Schwimmer, Jada Pinkett Smith", - plot: "Spoiled by their upbringing with no idea what wild life is really like, four animals from New York Central Zoo escape, unwittingly assisted by four absconding penguins, and find themselves in Madagascar, among a bunch of merry lemurs", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY4NDUwMzQxMF5BMl5BanBnXkFtZTcwMDgwNjgyMQ@@._V1_SX300.jpg", - }, - { - id: 63, - title: "Madagascar 3: Europe's Most Wanted", - year: "2012", - runtime: "93", - genres: ["Animation", "Adventure", "Comedy"], - director: "Eric Darnell, Tom McGrath, Conrad Vernon", - actors: "Ben Stiller, Chris Rock, David Schwimmer, Jada Pinkett Smith", - plot: "Alex, Marty, Gloria and Melman are still fighting to get home to their beloved Big Apple. Their journey takes them through Europe where they find the perfect cover: a traveling circus, which they reinvent - Madagascar style.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM2MTIzNzk2MF5BMl5BanBnXkFtZTcwMDcwMzQxNw@@._V1_SX300.jpg", - }, - { - id: 64, - title: "God Bless America", - year: "2011", - runtime: "105", - genres: ["Comedy", "Crime"], - director: "Bobcat Goldthwait", - actors: - "Joel Murray, Tara Lynne Barr, Melinda Page Hamilton, Mackenzie Brooke Smith", - plot: "On a mission to rid society of its most repellent citizens, terminally ill Frank makes an unlikely accomplice in 16-year-old Roxy.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQwMTc1MzA4NF5BMl5BanBnXkFtZTcwNzQwMTgzNw@@._V1_SX300.jpg", - }, - { - id: 65, - title: "The Social Network", - year: "2010", - runtime: "120", - genres: ["Biography", "Drama"], - director: "David Fincher", - actors: "Jesse Eisenberg, Rooney Mara, Bryan Barter, Dustin Fitzsimons", - plot: "Harvard student Mark Zuckerberg creates the social networking site that would become known as Facebook, but is later sued by two brothers who claimed he stole their idea, and the co-founder who was later squeezed out of the business.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM2ODk0NDAwMF5BMl5BanBnXkFtZTcwNTM1MDc2Mw@@._V1_SX300.jpg", - }, - { - id: 66, - title: "The Pianist", - year: "2002", - runtime: "150", - genres: ["Biography", "Drama", "War"], - director: "Roman Polanski", - actors: "Adrien Brody, Emilia Fox, Michal Zebrowski, Ed Stoppard", - plot: "A Polish Jewish musician struggles to survive the destruction of the Warsaw ghetto of World War II.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTc4OTkyOTA3OF5BMl5BanBnXkFtZTYwMDIxNjk5._V1_SX300.jpg", - }, - { - id: 67, - title: "Alive", - year: "1993", - runtime: "120", - genres: ["Adventure", "Biography", "Drama"], - director: "Frank Marshall", - actors: "Ethan Hawke, Vincent Spano, Josh Hamilton, Bruce Ramsay", - plot: "Uruguayan rugby team stranded in the snow swept Andes are forced to use desperate measures to survive after a plane crash.", - posterUrl: "", - }, - { - id: 68, - title: "Casablanca", - year: "1942", - runtime: "102", - genres: ["Drama", "Romance", "War"], - director: "Michael Curtiz", - actors: "Humphrey Bogart, Ingrid Bergman, Paul Henreid, Claude Rains", - plot: "In Casablanca, Morocco in December 1941, a cynical American expatriate meets a former lover, with unforeseen complications.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjQwNDYyNTk2N15BMl5BanBnXkFtZTgwMjQ0OTMyMjE@._V1_SX300.jpg", - }, - { - id: 69, - title: "American Gangster", - year: "2007", - runtime: "157", - genres: ["Biography", "Crime", "Drama"], - director: "Ridley Scott", - actors: "Denzel Washington, Russell Crowe, Chiwetel Ejiofor, Josh Brolin", - plot: "In 1970s America, a detective works to bring down the drug empire of Frank Lucas, a heroin kingpin from Manhattan, who is smuggling the drug into the country from the Far East.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTkyNzY5MDA5MV5BMl5BanBnXkFtZTcwMjg4MzI3MQ@@._V1_SX300.jpg", - }, - { - id: 70, - title: "Catch Me If You Can", - year: "2002", - runtime: "141", - genres: ["Biography", "Crime", "Drama"], - director: "Steven Spielberg", - actors: "Leonardo DiCaprio, Tom Hanks, Christopher Walken, Martin Sheen", - plot: "The true story of Frank Abagnale Jr. who, before his 19th birthday, successfully conned millions of dollars' worth of checks as a Pan Am pilot, doctor, and legal prosecutor.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY5MzYzNjc5NV5BMl5BanBnXkFtZTYwNTUyNTc2._V1_SX300.jpg", - }, - { - id: 71, - title: "American History X", - year: "1998", - runtime: "119", - genres: ["Crime", "Drama"], - director: "Tony Kaye", - actors: "Edward Norton, Edward Furlong, Beverly D'Angelo, Jennifer Lien", - plot: "A former neo-nazi skinhead tries to prevent his younger brother from going down the same wrong path that he did.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BZjA0MTM4MTQtNzY5MC00NzY3LWI1ZTgtYzcxMjkyMzU4MDZiXkEyXkFqcGdeQXVyNDYyMDk5MTU@._V1_SX300.jpg", - }, - { - id: 72, - title: "Casino", - year: "1995", - runtime: "178", - genres: ["Biography", "Crime", "Drama"], - director: "Martin Scorsese", - actors: "Robert De Niro, Sharon Stone, Joe Pesci, James Woods", - plot: "Greed, deception, money, power, and murder occur between two best friends, a mafia underboss and a casino owner, for a trophy wife over a gambling empire.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTcxOWYzNDYtYmM4YS00N2NkLTk0NTAtNjg1ODgwZjAxYzI3XkEyXkFqcGdeQXVyNTA4NzY1MzY@._V1_SX300.jpg", - }, - { - id: 73, - title: "Pirates of the Caribbean: At World's End", - year: "2007", - runtime: "169", - genres: ["Action", "Adventure", "Fantasy"], - director: "Gore Verbinski", - actors: "Johnny Depp, Geoffrey Rush, Orlando Bloom, Keira Knightley", - plot: "Captain Barbossa, Will Turner and Elizabeth Swann must sail off the edge of the map, navigate treachery and betrayal, find Jack Sparrow, and make their final alliances for one last decisive battle.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjIyNjkxNzEyMl5BMl5BanBnXkFtZTYwMjc3MDE3._V1_SX300.jpg", - }, - { - id: 74, - title: "Pirates of the Caribbean: On Stranger Tides", - year: "2011", - runtime: "136", - genres: ["Action", "Adventure", "Fantasy"], - director: "Rob Marshall", - actors: "Johnny Depp, Penélope Cruz, Geoffrey Rush, Ian McShane", - plot: "Jack Sparrow and Barbossa embark on a quest to find the elusive fountain of youth, only to discover that Blackbeard and his daughter are after it too.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjE5MjkwODI3Nl5BMl5BanBnXkFtZTcwNjcwMDk4NA@@._V1_SX300.jpg", - }, - { - id: 75, - title: "Crash", - year: "2004", - runtime: "112", - genres: ["Crime", "Drama", "Thriller"], - director: "Paul Haggis", - actors: "Karina Arroyave, Dato Bakhtadze, Sandra Bullock, Don Cheadle", - plot: "Los Angeles citizens with vastly separate lives collide in interweaving stories of race, loss and redemption.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BOTk1OTA1MjIyNV5BMl5BanBnXkFtZTcwODQxMTkyMQ@@._V1_SX300.jpg", - }, - { - id: 76, - title: "Pirates of the Caribbean: The Curse of the Black Pearl", - year: "2003", - runtime: "143", - genres: ["Action", "Adventure", "Fantasy"], - director: "Gore Verbinski", - actors: "Johnny Depp, Geoffrey Rush, Orlando Bloom, Keira Knightley", - plot: "Blacksmith Will Turner teams up with eccentric pirate \"Captain\" Jack Sparrow to save his love, the governor's daughter, from Jack's former pirate allies, who are now undead.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjAyNDM4MTc2N15BMl5BanBnXkFtZTYwNDk0Mjc3._V1_SX300.jpg", - }, - { - id: 77, - title: "The Lord of the Rings: The Return of the King", - year: "2003", - runtime: "201", - genres: ["Action", "Adventure", "Drama"], - director: "Peter Jackson", - actors: "Noel Appleby, Ali Astin, Sean Astin, David Aston", - plot: "Gandalf and Aragorn lead the World of Men against Sauron's army to draw his gaze from Frodo and Sam as they approach Mount Doom with the One Ring.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjE4MjA1NTAyMV5BMl5BanBnXkFtZTcwNzM1NDQyMQ@@._V1_SX300.jpg", - }, - { - id: 78, - title: "Oldboy", - year: "2003", - runtime: "120", - genres: ["Drama", "Mystery", "Thriller"], - director: "Chan-wook Park", - actors: "Min-sik Choi, Ji-tae Yu, Hye-jeong Kang, Dae-han Ji", - plot: "After being kidnapped and imprisoned for 15 years, Oh Dae-Su is released, only to find that he must find his captor in 5 days.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTI3NTQyMzU5M15BMl5BanBnXkFtZTcwMTM2MjgyMQ@@._V1_SX300.jpg", - }, - { - id: 79, - title: "Chocolat", - year: "2000", - runtime: "121", - genres: ["Drama", "Romance"], - director: "Lasse Hallström", - actors: - "Alfred Molina, Carrie-Anne Moss, Aurelien Parent Koenig, Antonio Gil", - plot: "A woman and her daughter open a chocolate shop in a small French village that shakes up the rigid morality of the community.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA4MDI3NTQwMV5BMl5BanBnXkFtZTcwNjIzNDcyMQ@@._V1_SX300.jpg", - }, - { - id: 80, - title: "Casino Royale", - year: "2006", - runtime: "144", - genres: ["Action", "Adventure", "Thriller"], - director: "Martin Campbell", - actors: "Daniel Craig, Eva Green, Mads Mikkelsen, Judi Dench", - plot: "Armed with a licence to kill, Secret Agent James Bond sets out on his first mission as 007 and must defeat a weapons dealer in a high stakes game of poker at Casino Royale, but things are not what they seem.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM5MjI4NDExNF5BMl5BanBnXkFtZTcwMDM1MjMzMQ@@._V1_SX300.jpg", - }, - { - id: 81, - title: "WALL·E", - year: "2008", - runtime: "98", - genres: ["Animation", "Adventure", "Family"], - director: "Andrew Stanton", - actors: "Ben Burtt, Elissa Knight, Jeff Garlin, Fred Willard", - plot: "In the distant future, a small waste-collecting robot inadvertently embarks on a space journey that will ultimately decide the fate of mankind.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTczOTA3MzY2N15BMl5BanBnXkFtZTcwOTYwNjE2MQ@@._V1_SX300.jpg", - }, - { - id: 82, - title: "The Wolf of Wall Street", - year: "2013", - runtime: "180", - genres: ["Biography", "Comedy", "Crime"], - director: "Martin Scorsese", - actors: "Leonardo DiCaprio, Jonah Hill, Margot Robbie, Matthew McConaughey", - plot: "Based on the true story of Jordan Belfort, from his rise to a wealthy stock-broker living the high life to his fall involving crime, corruption and the federal government.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjIxMjgxNTk0MF5BMl5BanBnXkFtZTgwNjIyOTg2MDE@._V1_SX300.jpg", - }, - { - id: 83, - title: "Hellboy II: The Golden Army", - year: "2008", - runtime: "120", - genres: ["Action", "Adventure", "Fantasy"], - director: "Guillermo del Toro", - actors: "Ron Perlman, Selma Blair, Doug Jones, John Alexander", - plot: "The mythical world starts a rebellion against humanity in order to rule the Earth, so Hellboy and his team must save the world from the rebellious creatures.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA5NzgyMjc2Nl5BMl5BanBnXkFtZTcwOTU3MDI3MQ@@._V1_SX300.jpg", - }, - { - id: 84, - title: "Sunset Boulevard", - year: "1950", - runtime: "110", - genres: ["Drama", "Film-Noir", "Romance"], - director: "Billy Wilder", - actors: "William Holden, Gloria Swanson, Erich von Stroheim, Nancy Olson", - plot: "A hack screenwriter writes a screenplay for a former silent-film star who has faded into Hollywood obscurity.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTc3NDYzODAwNV5BMl5BanBnXkFtZTgwODg1MTczMTE@._V1_SX300.jpg", - }, - { - id: 85, - title: "I-See-You.Com", - year: "2006", - runtime: "92", - genres: ["Comedy"], - director: "Eric Steven Stahl", - actors: "Beau Bridges, Rosanna Arquette, Mathew Botuchis, Shiri Appleby", - plot: "A 17-year-old boy buys mini-cameras and displays the footage online at I-see-you.com. The cash rolls in as the site becomes a major hit. Everyone seems to have fun until it all comes crashing down....", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTYwMDUzNzA5Nl5BMl5BanBnXkFtZTcwMjQ2Njk3MQ@@._V1_SX300.jpg", - }, - { - id: 86, - title: "The Grand Budapest Hotel", - year: "2014", - runtime: "99", - genres: ["Adventure", "Comedy", "Crime"], - director: "Wes Anderson", - actors: "Ralph Fiennes, F. Murray Abraham, Mathieu Amalric, Adrien Brody", - plot: "The adventures of Gustave H, a legendary concierge at a famous hotel from the fictional Republic of Zubrowka between the first and second World Wars, and Zero Moustafa, the lobby boy who becomes his most trusted friend.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMzM5NjUxOTEyMl5BMl5BanBnXkFtZTgwNjEyMDM0MDE@._V1_SX300.jpg", - }, - { - id: 87, - title: "The Hitchhiker's Guide to the Galaxy", - year: "2005", - runtime: "109", - genres: ["Adventure", "Comedy", "Sci-Fi"], - director: "Garth Jennings", - actors: "Bill Bailey, Anna Chancellor, Warwick Davis, Yasiin Bey", - plot: 'Mere seconds before the Earth is to be demolished by an alien construction crew, journeyman Arthur Dent is swept off the planet by his friend Ford Prefect, a researcher penning a new edition of "The Hitchhiker\'s Guide to the Galaxy."', - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjEwOTk4NjU2MF5BMl5BanBnXkFtZTYwMDA3NzI3._V1_SX300.jpg", - }, - { - id: 88, - title: "Once Upon a Time in America", - year: "1984", - runtime: "229", - genres: ["Crime", "Drama"], - director: "Sergio Leone", - actors: "Robert De Niro, James Woods, Elizabeth McGovern, Joe Pesci", - plot: "A former Prohibition-era Jewish gangster returns to the Lower East Side of Manhattan over thirty years later, where he once again must confront the ghosts and regrets of his old life.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMGFkNWI4MTMtNGQ0OC00MWVmLTk3MTktOGYxN2Y2YWVkZWE2XkEyXkFqcGdeQXVyNjU0OTQ0OTY@._V1_SX300.jpg", - }, - { - id: 89, - title: "Oblivion", - year: "2013", - runtime: "124", - genres: ["Action", "Adventure", "Mystery"], - director: "Joseph Kosinski", - actors: "Tom Cruise, Morgan Freeman, Olga Kurylenko, Andrea Riseborough", - plot: "A veteran assigned to extract Earth's remaining resources begins to question what he knows about his mission and himself.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQwMDY0MTA4MF5BMl5BanBnXkFtZTcwNzI3MDgxOQ@@._V1_SX300.jpg", - }, - { - id: 90, - title: "V for Vendetta", - year: "2005", - runtime: "132", - genres: ["Action", "Drama", "Thriller"], - director: "James McTeigue", - actors: "Natalie Portman, Hugo Weaving, Stephen Rea, Stephen Fry", - plot: 'In a future British tyranny, a shadowy freedom fighter, known only by the alias of "V", plots to overthrow it with the help of a young woman.', - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BOTI5ODc3NzExNV5BMl5BanBnXkFtZTcwNzYxNzQzMw@@._V1_SX300.jpg", - }, - { - id: 91, - title: "Gattaca", - year: "1997", - runtime: "106", - genres: ["Drama", "Sci-Fi", "Thriller"], - director: "Andrew Niccol", - actors: "Ethan Hawke, Uma Thurman, Gore Vidal, Xander Berkeley", - plot: "A genetically inferior man assumes the identity of a superior one in order to pursue his lifelong dream of space travel.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNDQxOTc0MzMtZmRlOS00OWQ5LWI2ZDctOTAwNmMwOTYxYzlhXkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 92, - title: "Silver Linings Playbook", - year: "2012", - runtime: "122", - genres: ["Comedy", "Drama", "Romance"], - director: "David O. Russell", - actors: "Bradley Cooper, Jennifer Lawrence, Robert De Niro, Jacki Weaver", - plot: "After a stint in a mental institution, former teacher Pat Solitano moves back in with his parents and tries to reconcile with his ex-wife. Things get more challenging when Pat meets Tiffany, a mysterious girl with problems of her own.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM2MTI5NzA3MF5BMl5BanBnXkFtZTcwODExNTc0OA@@._V1_SX300.jpg", - }, - { - id: 93, - title: "Alice in Wonderland", - year: "2010", - runtime: "108", - genres: ["Adventure", "Family", "Fantasy"], - director: "Tim Burton", - actors: "Johnny Depp, Mia Wasikowska, Helena Bonham Carter, Anne Hathaway", - plot: "Nineteen-year-old Alice returns to the magical world from her childhood adventure, where she reunites with her old friends and learns of her true destiny: to end the Red Queen's reign of terror.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTMwNjAxMTc0Nl5BMl5BanBnXkFtZTcwODc3ODk5Mg@@._V1_SX300.jpg", - }, - { - id: 94, - title: "Gandhi", - year: "1982", - runtime: "191", - genres: ["Biography", "Drama"], - director: "Richard Attenborough", - actors: "Ben Kingsley, Candice Bergen, Edward Fox, John Gielgud", - plot: "Gandhi's character is fully explained as a man of nonviolence. Through his patience, he is able to drive the British out of the subcontinent. And the stubborn nature of Jinnah and his commitment towards Pakistan is portrayed.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMzJiZDRmOWUtYjE2MS00Mjc1LTg1ZDYtNTQxYWJkZTg1OTM4XkEyXkFqcGdeQXVyNjUwNzk3NDc@._V1_SX300.jpg", - }, - { - id: 95, - title: "Pacific Rim", - year: "2013", - runtime: "131", - genres: ["Action", "Adventure", "Sci-Fi"], - director: "Guillermo del Toro", - actors: "Charlie Hunnam, Diego Klattenhoff, Idris Elba, Rinko Kikuchi", - plot: "As a war between humankind and monstrous sea creatures wages on, a former pilot and a trainee are paired up to drive a seemingly obsolete special weapon in a desperate effort to save the world from the apocalypse.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY3MTI5NjQ4Nl5BMl5BanBnXkFtZTcwOTU1OTU0OQ@@._V1_SX300.jpg", - }, - { - id: 96, - title: "Kiss Kiss Bang Bang", - year: "2005", - runtime: "103", - genres: ["Comedy", "Crime", "Mystery"], - director: "Shane Black", - actors: "Robert Downey Jr., Val Kilmer, Michelle Monaghan, Corbin Bernsen", - plot: "A murder mystery brings together a private eye, a struggling actress, and a thief masquerading as an actor.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTY5NDExMDA3M15BMl5BanBnXkFtZTYwNTc2MzA3._V1_SX300.jpg", - }, - { - id: 97, - title: "The Quiet American", - year: "2002", - runtime: "101", - genres: ["Drama", "Mystery", "Romance"], - director: "Phillip Noyce", - actors: "Michael Caine, Brendan Fraser, Do Thi Hai Yen, Rade Serbedzija", - plot: "An older British reporter vies with a young U.S. doctor for the affections of a beautiful Vietnamese woman.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjE2NTUxNTE3Nl5BMl5BanBnXkFtZTYwNTczMTg5._V1_SX300.jpg", - }, - { - id: 98, - title: "Cloud Atlas", - year: "2012", - runtime: "172", - genres: ["Drama", "Sci-Fi"], - director: "Tom Tykwer, Lana Wachowski, Lilly Wachowski", - actors: "Tom Hanks, Halle Berry, Jim Broadbent, Hugo Weaving", - plot: "An exploration of how the actions of individual lives impact one another in the past, present and future, as one soul is shaped from a killer into a hero, and an act of kindness ripples across centuries to inspire a revolution.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTczMTgxMjc4NF5BMl5BanBnXkFtZTcwNjM5MTA2OA@@._V1_SX300.jpg", - }, - { - id: 99, - title: "The Impossible", - year: "2012", - runtime: "114", - genres: ["Drama", "Thriller"], - director: "J.A. Bayona", - actors: "Naomi Watts, Ewan McGregor, Tom Holland, Samuel Joslin", - plot: "The story of a tourist family in Thailand caught in the destruction and chaotic aftermath of the 2004 Indian Ocean tsunami.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA5NTA3NzQ5Nl5BMl5BanBnXkFtZTcwOTYxNjY0OA@@._V1_SX300.jpg", - }, - { - id: 100, - title: "All Quiet on the Western Front", - year: "1930", - runtime: "136", - genres: ["Drama", "War"], - director: "Lewis Milestone", - actors: "Louis Wolheim, Lew Ayres, John Wray, Arnold Lucy", - plot: "A young soldier faces profound disillusionment in the soul-destroying horror of World War I.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNTM5OTg2NDY1NF5BMl5BanBnXkFtZTcwNTQ4MTMwNw@@._V1_SX300.jpg", - }, - { - id: 101, - title: "The English Patient", - year: "1996", - runtime: "162", - genres: ["Drama", "Romance", "War"], - director: "Anthony Minghella", - actors: - "Ralph Fiennes, Juliette Binoche, Willem Dafoe, Kristin Scott Thomas", - plot: "At the close of WWII, a young nurse tends to a badly-burned plane crash victim. His past is shown in flashbacks, revealing an involvement in a fateful love affair.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNDg2OTcxNDE0OF5BMl5BanBnXkFtZTgwOTg2MDM0MDE@._V1_SX300.jpg", - }, - { - id: 102, - title: "Dallas Buyers Club", - year: "2013", - runtime: "117", - genres: ["Biography", "Drama"], - director: "Jean-Marc Vallée", - actors: "Matthew McConaughey, Jennifer Garner, Jared Leto, Denis O'Hare", - plot: "In 1985 Dallas, electrician and hustler Ron Woodroof works around the system to help AIDS patients get the medication they need after he is diagnosed with the disease.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTYwMTA4MzgyNF5BMl5BanBnXkFtZTgwMjEyMjE0MDE@._V1_SX300.jpg", - }, - { - id: 103, - title: "Frida", - year: "2002", - runtime: "123", - genres: ["Biography", "Drama", "Romance"], - director: "Julie Taymor", - actors: "Salma Hayek, Mía Maestro, Alfred Molina, Antonio Banderas", - plot: "A biography of artist Frida Kahlo, who channeled the pain of a crippling injury and her tempestuous marriage into her work.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTMyODUyMDY1OV5BMl5BanBnXkFtZTYwMDA2OTU3._V1_SX300.jpg", - }, - { - id: 104, - title: "Before Sunrise", - year: "1995", - runtime: "105", - genres: ["Drama", "Romance"], - director: "Richard Linklater", - actors: "Ethan Hawke, Julie Delpy, Andrea Eckert, Hanno Pöschl", - plot: "A young man and woman meet on a train in Europe, and wind up spending one evening together in Vienna. Unfortunately, both know that this will probably be their only night together.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTQyMTM3MTQxMl5BMl5BanBnXkFtZTcwMDAzNjQ4Mg@@._V1_SX300.jpg", - }, - { - id: 105, - title: "The Rum Diary", - year: "2011", - runtime: "120", - genres: ["Comedy", "Drama"], - director: "Bruce Robinson", - actors: "Johnny Depp, Aaron Eckhart, Michael Rispoli, Amber Heard", - plot: "American journalist Paul Kemp takes on a freelance job in Puerto Rico for a local newspaper during the 1960s and struggles to find a balance between island culture and the expatriates who live there.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM5ODA4MjYxM15BMl5BanBnXkFtZTcwMTM3NTE5Ng@@._V1_SX300.jpg", - }, - { - id: 106, - title: "The Last Samurai", - year: "2003", - runtime: "154", - genres: ["Action", "Drama", "History"], - director: "Edward Zwick", - actors: "Ken Watanabe, Tom Cruise, William Atherton, Chad Lindberg", - plot: "An American military advisor embraces the Samurai culture he was hired to destroy after he is captured in battle.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMzkyNzQ1Mzc0NV5BMl5BanBnXkFtZTcwODg3MzUzMw@@._V1_SX300.jpg", - }, - { - id: 107, - title: "Chinatown", - year: "1974", - runtime: "130", - genres: ["Drama", "Mystery", "Thriller"], - director: "Roman Polanski", - actors: "Jack Nicholson, Faye Dunaway, John Huston, Perry Lopez", - plot: "A private detective hired to expose an adulterer finds himself caught up in a web of deceit, corruption and murder.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BN2YyNDE5NzItMjAwNC00MGQxLTllNjktZGIzMWFkZjA3OWQ0XkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, - { - id: 108, - title: "Calvary", - year: "2014", - runtime: "102", - genres: ["Comedy", "Drama"], - director: "John Michael McDonagh", - actors: "Brendan Gleeson, Chris O'Dowd, Kelly Reilly, Aidan Gillen", - plot: "After he is threatened during a confession, a good-natured priest must battle the dark forces closing in around him.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTc3MjQ1MjE2M15BMl5BanBnXkFtZTgwNTMzNjE4MTE@._V1_SX300.jpg", - }, - { - id: 109, - title: "Before Sunset", - year: "2004", - runtime: "80", - genres: ["Drama", "Romance"], - director: "Richard Linklater", - actors: "Ethan Hawke, Julie Delpy, Vernon Dobtcheff, Louise Lemoine Torrès", - plot: "Nine years after Jesse and Celine first met, they encounter each other again on the French leg of Jesse's book tour.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTQ1MjAwNTM5Ml5BMl5BanBnXkFtZTYwNDM0MTc3._V1_SX300.jpg", - }, - { - id: 110, - title: "Spirited Away", - year: "2001", - runtime: "125", - genres: ["Animation", "Adventure", "Family"], - director: "Hayao Miyazaki", - actors: "Rumi Hiiragi, Miyu Irino, Mari Natsuki, Takashi Naitô", - plot: "During her family's move to the suburbs, a sullen 10-year-old girl wanders into a world ruled by gods, witches, and spirits, and where humans are changed into beasts.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjYxMDcyMzIzNl5BMl5BanBnXkFtZTYwNDg2MDU3._V1_SX300.jpg", - }, - { - id: 111, - title: "Indochine", - year: "1992", - runtime: "159", - genres: ["Drama", "Romance"], - director: "Régis Wargnier", - actors: "Catherine Deneuve, Vincent Perez, Linh Dan Pham, Jean Yanne", - plot: "This story is set in 1930, at the time when French colonial rule in Indochina is ending. A widowed French woman who works in the rubber fields, raises a Vietnamese princess as if she was ...", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTM1MTkzNzA3NF5BMl5BanBnXkFtZTYwNTI2MzU5._V1_SX300.jpg", - }, - { - id: 112, - title: "Birdman or (The Unexpected Virtue of Ignorance)", - year: "2014", - runtime: "119", - genres: ["Comedy", "Drama", "Romance"], - director: "Alejandro G. Iñárritu", - actors: "Michael Keaton, Emma Stone, Kenny Chin, Jamahl Garrison-Lowe", - plot: "Illustrated upon the progress of his latest Broadway play, a former popular actor's struggle to cope with his current life as a wasted actor is shown.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BODAzNDMxMzAxOV5BMl5BanBnXkFtZTgwMDMxMjA4MjE@._V1_SX300.jpg", - }, - { - id: 113, - title: "Boyhood", - year: "2014", - runtime: "165", - genres: ["Drama"], - director: "Richard Linklater", - actors: - "Ellar Coltrane, Patricia Arquette, Elijah Smith, Lorelei Linklater", - plot: "The life of Mason, from early childhood to his arrival at college.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTYzNDc2MDc0N15BMl5BanBnXkFtZTgwOTcwMDQ5MTE@._V1_SX300.jpg", - }, - { - id: 114, - title: "12 Angry Men", - year: "1957", - runtime: "96", - genres: ["Crime", "Drama"], - director: "Sidney Lumet", - actors: "Martin Balsam, John Fiedler, Lee J. Cobb, E.G. Marshall", - plot: "A jury holdout attempts to prevent a miscarriage of justice by forcing his colleagues to reconsider the evidence.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BODQwOTc5MDM2N15BMl5BanBnXkFtZTcwODQxNTEzNA@@._V1_SX300.jpg", - }, - { - id: 115, - title: "The Imitation Game", - year: "2014", - runtime: "114", - genres: ["Biography", "Drama", "Thriller"], - director: "Morten Tyldum", - actors: - "Benedict Cumberbatch, Keira Knightley, Matthew Goode, Rory Kinnear", - plot: "During World War II, mathematician Alan Turing tries to crack the enigma code with help from fellow mathematicians.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNDkwNTEyMzkzNl5BMl5BanBnXkFtZTgwNTAwNzk3MjE@._V1_SX300.jpg", - }, - { - id: 116, - title: "Interstellar", - year: "2014", - runtime: "169", - genres: ["Adventure", "Drama", "Sci-Fi"], - director: "Christopher Nolan", - actors: "Ellen Burstyn, Matthew McConaughey, Mackenzie Foy, John Lithgow", - plot: "A team of explorers travel through a wormhole in space in an attempt to ensure humanity's survival.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjIxNTU4MzY4MF5BMl5BanBnXkFtZTgwMzM4ODI3MjE@._V1_SX300.jpg", - }, - { - id: 117, - title: "Big Nothing", - year: "2006", - runtime: "86", - genres: ["Comedy", "Crime", "Thriller"], - director: "Jean-Baptiste Andrea", - actors: "David Schwimmer, Simon Pegg, Alice Eve, Natascha McElhone", - plot: "A frustrated, unemployed teacher joining forces with a scammer and his girlfriend in a blackmailing scheme.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY5NTc2NjYwOV5BMl5BanBnXkFtZTcwMzk5OTY0MQ@@._V1_SX300.jpg", - }, - { - id: 118, - title: "Das Boot", - year: "1981", - runtime: "149", - genres: ["Adventure", "Drama", "Thriller"], - director: "Wolfgang Petersen", - actors: - "Jürgen Prochnow, Herbert Grönemeyer, Klaus Wennemann, Hubertus Bengsch", - plot: "The claustrophobic world of a WWII German U-boat; boredom, filth, and sheer terror.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjE5Mzk5OTQ0Nl5BMl5BanBnXkFtZTYwNzUwMTQ5._V1_SX300.jpg", - }, - { - id: 119, - title: "Shrek 2", - year: "2004", - runtime: "93", - genres: ["Animation", "Adventure", "Comedy"], - director: "Andrew Adamson, Kelly Asbury, Conrad Vernon", - actors: "Mike Myers, Eddie Murphy, Cameron Diaz, Julie Andrews", - plot: "Princess Fiona's parents invite her and Shrek to dinner to celebrate her marriage. If only they knew the newlyweds were both ogres.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTk4MTMwNjI4M15BMl5BanBnXkFtZTcwMjExMzUyMQ@@._V1_SX300.jpg", - }, - { - id: 120, - title: "Sin City", - year: "2005", - runtime: "124", - genres: ["Crime", "Thriller"], - director: "Frank Miller, Robert Rodriguez, Quentin Tarantino", - actors: "Jessica Alba, Devon Aoki, Alexis Bledel, Powers Boothe", - plot: "A film that explores the dark and miserable town, Basin City, and tells the story of three different people, all caught up in violent corruption.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BODZmYjMwNzEtNzVhNC00ZTRmLTk2M2UtNzE1MTQ2ZDAxNjc2XkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_SX300.jpg", - }, - { - id: 121, - title: "Nebraska", - year: "2013", - runtime: "115", - genres: ["Adventure", "Comedy", "Drama"], - director: "Alexander Payne", - actors: "Bruce Dern, Will Forte, June Squibb, Bob Odenkirk", - plot: "An aging, booze-addled father makes the trip from Montana to Nebraska with his estranged son in order to claim a million-dollar Mega Sweepstakes Marketing prize.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTU2Mjk2NDkyMl5BMl5BanBnXkFtZTgwNTk0NzcyMDE@._V1_SX300.jpg", - }, - { - id: 122, - title: "Shrek", - year: "2001", - runtime: "90", - genres: ["Animation", "Adventure", "Comedy"], - director: "Andrew Adamson, Vicky Jenson", - actors: "Mike Myers, Eddie Murphy, Cameron Diaz, John Lithgow", - plot: "After his swamp is filled with magical creatures, an ogre agrees to rescue a princess for a villainous lord in order to get his land back.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTk2NTE1NTE0M15BMl5BanBnXkFtZTgwNjY4NTYxMTE@._V1_SX300.jpg", - }, - { - id: 123, - title: "Mr. & Mrs. Smith", - year: "2005", - runtime: "120", - genres: ["Action", "Comedy", "Crime"], - director: "Doug Liman", - actors: "Brad Pitt, Angelina Jolie, Vince Vaughn, Adam Brody", - plot: "A bored married couple is surprised to learn that they are both assassins hired by competing agencies to kill each other.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTUxMzcxNzQzOF5BMl5BanBnXkFtZTcwMzQxNjUyMw@@._V1_SX300.jpg", - }, - { - id: 124, - title: "Original Sin", - year: "2001", - runtime: "116", - genres: ["Drama", "Mystery", "Romance"], - director: "Michael Cristofer", - actors: "Antonio Banderas, Angelina Jolie, Thomas Jane, Jack Thompson", - plot: "A woman along with her lover, plan to con a rich man by marrying him and on earning his trust running away with all his money. Everything goes as planned until she actually begins to fall in love with him.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BODg3Mjg0MDY4M15BMl5BanBnXkFtZTcwNjY5MDQ2NA@@._V1_SX300.jpg", - }, - { - id: 125, - title: "Shrek Forever After", - year: "2010", - runtime: "93", - genres: ["Animation", "Adventure", "Comedy"], - director: "Mike Mitchell", - actors: "Mike Myers, Eddie Murphy, Cameron Diaz, Antonio Banderas", - plot: "Rumpelstiltskin tricks a mid-life crisis burdened Shrek into allowing himself to be erased from existence and cast in a dark alternate timeline where Rumpel rules supreme.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTY0OTU1NzkxMl5BMl5BanBnXkFtZTcwMzI2NDUzMw@@._V1_SX300.jpg", - }, - { - id: 126, - title: "Before Midnight", - year: "2013", - runtime: "109", - genres: ["Drama", "Romance"], - director: "Richard Linklater", - actors: - "Ethan Hawke, Julie Delpy, Seamus Davey-Fitzpatrick, Jennifer Prior", - plot: "We meet Jesse and Celine nine years on in Greece. Almost two decades have passed since their first meeting on that train bound for Vienna.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjA5NzgxODE2NF5BMl5BanBnXkFtZTcwNTI1NTI0OQ@@._V1_SX300.jpg", - }, - { - id: 127, - title: "Despicable Me", - year: "2010", - runtime: "95", - genres: ["Animation", "Adventure", "Comedy"], - director: "Pierre Coffin, Chris Renaud", - actors: "Steve Carell, Jason Segel, Russell Brand, Julie Andrews", - plot: "When a criminal mastermind uses a trio of orphan girls as pawns for a grand scheme, he finds their love is profoundly changing him for the better.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY3NjY0MTQ0Nl5BMl5BanBnXkFtZTcwMzQ2MTc0Mw@@._V1_SX300.jpg", - }, - { - id: 128, - title: "Troy", - year: "2004", - runtime: "163", - genres: ["Adventure"], - director: "Wolfgang Petersen", - actors: "Julian Glover, Brian Cox, Nathan Jones, Adoni Maropis", - plot: "An adaptation of Homer's great epic, the film follows the assault on Troy by the united Greek forces and chronicles the fates of the men involved.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTk5MzU1MDMwMF5BMl5BanBnXkFtZTcwNjczODMzMw@@._V1_SX300.jpg", - }, - { - id: 129, - title: "The Hobbit: An Unexpected Journey", - year: "2012", - runtime: "169", - genres: ["Adventure", "Fantasy"], - director: "Peter Jackson", - actors: "Ian McKellen, Martin Freeman, Richard Armitage, Ken Stott", - plot: "A reluctant hobbit, Bilbo Baggins, sets out to the Lonely Mountain with a spirited group of dwarves to reclaim their mountain home - and the gold within it - from the dragon Smaug.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTcwNTE4MTUxMl5BMl5BanBnXkFtZTcwMDIyODM4OA@@._V1_SX300.jpg", - }, - { - id: 130, - title: "The Great Gatsby", - year: "2013", - runtime: "143", - genres: ["Drama", "Romance"], - director: "Baz Luhrmann", - actors: "Lisa Adam, Frank Aldridge, Amitabh Bachchan, Steve Bisley", - plot: "A writer and wall street trader, Nick, finds himself drawn to the past and lifestyle of his millionaire neighbor, Jay Gatsby.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTkxNTk1ODcxNl5BMl5BanBnXkFtZTcwMDI1OTMzOQ@@._V1_SX300.jpg", - }, - { - id: 131, - title: "Ice Age", - year: "2002", - runtime: "81", - genres: ["Animation", "Adventure", "Comedy"], - director: "Chris Wedge, Carlos Saldanha", - actors: "Ray Romano, John Leguizamo, Denis Leary, Goran Visnjic", - plot: "Set during the Ice Age, a sabertooth tiger, a sloth, and a wooly mammoth find a lost human infant, and they try to return him to his tribe.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjEyNzI1ODA0MF5BMl5BanBnXkFtZTYwODIxODY3._V1_SX300.jpg", - }, - { - id: 132, - title: "The Lord of the Rings: The Fellowship of the Ring", - year: "2001", - runtime: "178", - genres: ["Action", "Adventure", "Drama"], - director: "Peter Jackson", - actors: "Alan Howard, Noel Appleby, Sean Astin, Sala Baker", - plot: "A meek Hobbit from the Shire and eight companions set out on a journey to destroy the powerful One Ring and save Middle Earth from the Dark Lord Sauron.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNTEyMjAwMDU1OV5BMl5BanBnXkFtZTcwNDQyNTkxMw@@._V1_SX300.jpg", - }, - { - id: 133, - title: "The Lord of the Rings: The Two Towers", - year: "2002", - runtime: "179", - genres: ["Action", "Adventure", "Drama"], - director: "Peter Jackson", - actors: "Bruce Allpress, Sean Astin, John Bach, Sala Baker", - plot: "While Frodo and Sam edge closer to Mordor with the help of the shifty Gollum, the divided fellowship makes a stand against Sauron's new ally, Saruman, and his hordes of Isengard.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTAyNDU0NjY4NTheQTJeQWpwZ15BbWU2MDk4MTY2Nw@@._V1_SX300.jpg", - }, - { - id: 134, - title: "Ex Machina", - year: "2015", - runtime: "108", - genres: ["Drama", "Mystery", "Sci-Fi"], - director: "Alex Garland", - actors: "Domhnall Gleeson, Corey Johnson, Oscar Isaac, Alicia Vikander", - plot: "A young programmer is selected to participate in a ground-breaking experiment in synthetic intelligence by evaluating the human qualities of a breath-taking humanoid A.I.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTUxNzc0OTIxMV5BMl5BanBnXkFtZTgwNDI3NzU2NDE@._V1_SX300.jpg", - }, - { - id: 135, - title: "The Theory of Everything", - year: "2014", - runtime: "123", - genres: ["Biography", "Drama", "Romance"], - director: "James Marsh", - actors: "Eddie Redmayne, Felicity Jones, Tom Prior, Sophie Perry", - plot: "A look at the relationship between the famous physicist Stephen Hawking and his wife.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTAwMTU4MDA3NDNeQTJeQWpwZ15BbWU4MDk4NTMxNTIx._V1_SX300.jpg", - }, - { - id: 136, - title: "Shogun", - year: "1980", - runtime: "60", - genres: ["Adventure", "Drama", "History"], - director: "N/A", - actors: "Richard Chamberlain, Toshirô Mifune, Yôko Shimada, Furankî Sakai", - plot: "A English navigator becomes both a player and pawn in the complex political games in feudal Japan.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTY1ODI4NzYxMl5BMl5BanBnXkFtZTcwNDA4MzUxMQ@@._V1_SX300.jpg", - }, - { - id: 137, - title: "Spotlight", - year: "2015", - runtime: "128", - genres: ["Biography", "Crime", "Drama"], - director: "Tom McCarthy", - actors: "Mark Ruffalo, Michael Keaton, Rachel McAdams, Liev Schreiber", - plot: "The true story of how the Boston Globe uncovered the massive scandal of child molestation and cover-up within the local Catholic Archdiocese, shaking the entire Catholic Church to its core.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjIyOTM5OTIzNV5BMl5BanBnXkFtZTgwMDkzODE2NjE@._V1_SX300.jpg", - }, - { - id: 138, - title: "Vertigo", - year: "1958", - runtime: "128", - genres: ["Mystery", "Romance", "Thriller"], - director: "Alfred Hitchcock", - actors: "James Stewart, Kim Novak, Barbara Bel Geddes, Tom Helmore", - plot: "A San Francisco detective suffering from acrophobia investigates the strange activities of an old friend's wife, all the while becoming dangerously obsessed with her.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BNzY0NzQyNzQzOF5BMl5BanBnXkFtZTcwMTgwNTk4OQ@@._V1_SX300.jpg", - }, - { - id: 139, - title: "Whiplash", - year: "2014", - runtime: "107", - genres: ["Drama", "Music"], - director: "Damien Chazelle", - actors: "Miles Teller, J.K. Simmons, Paul Reiser, Melissa Benoist", - plot: "A promising young drummer enrolls at a cut-throat music conservatory where his dreams of greatness are mentored by an instructor who will stop at nothing to realize a student's potential.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTU4OTQ3MDUyMV5BMl5BanBnXkFtZTgwOTA2MjU0MjE@._V1_SX300.jpg", - }, - { - id: 140, - title: "The Lives of Others", - year: "2006", - runtime: "137", - genres: ["Drama", "Thriller"], - director: "Florian Henckel von Donnersmarck", - actors: "Martina Gedeck, Ulrich Mühe, Sebastian Koch, Ulrich Tukur", - plot: "In 1984 East Berlin, an agent of the secret police, conducting surveillance on a writer and his lover, finds himself becoming increasingly absorbed by their lives.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BNDUzNjYwNDYyNl5BMl5BanBnXkFtZTcwNjU3ODQ0MQ@@._V1_SX300.jpg", - }, - { - id: 141, - title: "Hotel Rwanda", - year: "2004", - runtime: "121", - genres: ["Drama", "History", "War"], - director: "Terry George", - actors: "Xolani Mali, Don Cheadle, Desmond Dube, Hakeem Kae-Kazim", - plot: "Paul Rusesabagina was a hotel manager who housed over a thousand Tutsi refugees during their struggle against the Hutu militia in Rwanda.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTI2MzQyNTc1M15BMl5BanBnXkFtZTYwMjExNjc3._V1_SX300.jpg", - }, - { - id: 142, - title: "The Martian", - year: "2015", - runtime: "144", - genres: ["Adventure", "Drama", "Sci-Fi"], - director: "Ridley Scott", - actors: "Matt Damon, Jessica Chastain, Kristen Wiig, Jeff Daniels", - plot: "An astronaut becomes stranded on Mars after his team assume him dead, and must rely on his ingenuity to find a way to signal to Earth that he is alive.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMTc2MTQ3MDA1Nl5BMl5BanBnXkFtZTgwODA3OTI4NjE@._V1_SX300.jpg", - }, - { - id: 143, - title: "To Kill a Mockingbird", - year: "1962", - runtime: "129", - genres: ["Crime", "Drama"], - director: "Robert Mulligan", - actors: "Gregory Peck, John Megna, Frank Overton, Rosemary Murphy", - plot: "Atticus Finch, a lawyer in the Depression-era South, defends a black man against an undeserved rape charge, and his kids against prejudice.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMjA4MzI1NDY2Nl5BMl5BanBnXkFtZTcwMTcyODc5Mw@@._V1_SX300.jpg", - }, - { - id: 144, - title: "The Hateful Eight", - year: "2015", - runtime: "187", - genres: ["Crime", "Drama", "Mystery"], - director: "Quentin Tarantino", - actors: - "Samuel L. Jackson, Kurt Russell, Jennifer Jason Leigh, Walton Goggins", - plot: "In the dead of a Wyoming winter, a bounty hunter and his prisoner find shelter in a cabin currently inhabited by a collection of nefarious characters.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BMjA1MTc1NTg5NV5BMl5BanBnXkFtZTgwOTM2MDEzNzE@._V1_SX300.jpg", - }, - { - id: 145, - title: "A Separation", - year: "2011", - runtime: "123", - genres: ["Drama", "Mystery"], - director: "Asghar Farhadi", - actors: "Peyman Moaadi, Leila Hatami, Sareh Bayat, Shahab Hosseini", - plot: "A married couple are faced with a difficult decision - to improve the life of their child by moving to another country or to stay in Iran and look after a deteriorating parent who has Alzheimer's disease.", - posterUrl: - "http://ia.media-imdb.com/images/M/MV5BMTYzMzU4NDUwOF5BMl5BanBnXkFtZTcwMTM5MjA5Ng@@._V1_SX300.jpg", - }, - { - id: 146, - title: "The Big Short", - year: "2015", - runtime: "130", - genres: ["Biography", "Comedy", "Drama"], - director: "Adam McKay", - actors: "Ryan Gosling, Rudy Eisenzopf, Casey Groves, Charlie Talbert", - plot: "Four denizens in the world of high-finance predict the credit and housing bubble collapse of the mid-2000s, and decide to take on the big banks for their greed and lack of foresight.", - posterUrl: - "https://images-na.ssl-images-amazon.com/images/M/MV5BNDc4MThhN2EtZjMzNC00ZDJmLThiZTgtNThlY2UxZWMzNjdkXkEyXkFqcGdeQXVyNDk3NzU2MTQ@._V1_SX300.jpg", - }, -]; diff --git a/ui/src/pages/misc/TaskQueue.jsx b/ui/src/pages/misc/TaskQueue.jsx deleted file mode 100644 index 5bf770884..000000000 --- a/ui/src/pages/misc/TaskQueue.jsx +++ /dev/null @@ -1,85 +0,0 @@ -import React from "react"; -import { useRouteMatch } from "react-router-dom"; -import sharedStyles from "../styles"; -import { useTaskQueueInfo, useTaskNames } from "../../data/task"; -import { makeStyles } from "@material-ui/styles"; -import { Helmet } from "react-helmet"; -import { usePushHistory } from "../../components/NavLink"; - -import { - Paper, - DataTable, - LinearProgress, - Heading, - Dropdown, -} from "../../components"; -import _ from "lodash"; - -const useStyles = makeStyles(sharedStyles); - -export default function TaskDefinition() { - const taskNames = useTaskNames(); - const pushHistory = usePushHistory(); - const classes = useStyles(); - const match = useRouteMatch(); - const taskName = match.params.name || ""; - - const { data, isFetching } = useTaskQueueInfo(taskName); - - const size = _.get(data, "size"); - const pollData = _.get(data, "pollData"); - - function setTaskName(name) { - if (name === null) { - name = ""; - } - pushHistory(`/taskQueue/${name}`); - } - - return ( -
    - - Conductor UI - Task Queue - -
    - - Task Queue Info - - setTaskName(val)} - disableClearable - getOptionSelected={(option, value) => { - // Accept empty string - if (value === "") return false; - return value === option; - }} - value={taskName} - /> -
    - {isFetching && } -
    - {!_.isUndefined(size) && !_.isUndefined(pollData) && ( - - - Queue Size: {size}{" "} - - - - )} -
    -
    - ); -} diff --git a/ui/src/pages/styles.js b/ui/src/pages/styles.js deleted file mode 100644 index 4286eadc7..000000000 --- a/ui/src/pages/styles.js +++ /dev/null @@ -1,31 +0,0 @@ -import { colors } from "../theme/variables"; - -export default { - wrapper: { - overflowY: "scroll", - overflowX: "hidden", - height: "100%", - }, - padded: { - padding: 30, - }, - header: { - backgroundColor: colors.gray14, - padding: "20px 30px 0 30px", - zIndex: 1, - }, - paddingBottom: { - paddingBottom: 25, - }, - tabContent: { - padding: 30, - }, - buttonRow: { - marginBottom: 15, - display: "flex", - justifyContent: "flex-end", - }, - field: { - marginBottom: 15, - }, -}; diff --git a/ui/src/pages/workbench/ExecutionHistory.jsx b/ui/src/pages/workbench/ExecutionHistory.jsx deleted file mode 100644 index 90a8acaf8..000000000 --- a/ui/src/pages/workbench/ExecutionHistory.jsx +++ /dev/null @@ -1,91 +0,0 @@ -import { - List, - ListItem, - ListItemText, - Toolbar, - IconButton, -} from "@material-ui/core"; -import { StatusBadge, Text, NavLink } from "../../components"; -import { makeStyles } from "@material-ui/styles"; -import { colors } from "../../theme/variables"; -import _ from "lodash"; -import { useInvalidateWorkflows, useWorkflows } from "../../data/workflow"; -import { formatRelative } from "date-fns"; -import RefreshIcon from "@material-ui/icons/Refresh"; - -const useStyles = makeStyles({ - sidebar: { - width: 360, - border: "0px solid rgba(0, 0, 0, 0)", - zIndex: 1, - boxShadow: "0 2px 4px 0 rgb(0 0 0 / 10%), 0 0 2px 0 rgb(0 0 0 / 10%)", - background: "#fff", - display: "flex", - flexDirection: "column", - }, - toolbar: { - backgroundColor: colors.gray14, - }, - list: { - overflowY: "auto", - flex: 1, - }, -}); - -export default function ExecutionHistory({ run }) { - const classes = useStyles(); - const workflowRecords = run ? run.workflowRecords : []; - const workflowIds = workflowRecords.map((record) => `${record.workflowId}`); - const results = - useWorkflows(workflowIds, { - staleTime: 60000, - }) || []; - const resultsMap = new Map( - results - .filter((r) => r.isSuccess) - .map((result) => [result.data.workflowId, result.data]) - ); - const invalidateWorkflows = useInvalidateWorkflows(); - - function handleRefresh() { - invalidateWorkflows(workflowIds); - } - - return ( -
    - - - Execution History - - - - - - - {Array.from(resultsMap.values()).map((workflow) => ( - - - {workflow.workflowId} - - } - secondary={ - - {" "} - {formatRelative(new Date(workflow.startTime), new Date())} - - } - secondaryTypographyProps={{ component: "div" }} - /> - - ))} - {_.isEmpty(workflowRecords) && ( - - No execution history. - - )} - -
    - ); -} diff --git a/ui/src/pages/workbench/RunHistory.tsx b/ui/src/pages/workbench/RunHistory.tsx deleted file mode 100644 index dac785b22..000000000 --- a/ui/src/pages/workbench/RunHistory.tsx +++ /dev/null @@ -1,165 +0,0 @@ -import { useImperativeHandle, useState, forwardRef } from "react"; -import { useLocalStorage } from "../../utils/localstorage"; -import { Text } from "../../components"; -import { - List, - ListItem, - ListItemText, - ListItemSecondaryAction, - Toolbar, - IconButton, -} from "@material-ui/core"; -import { makeStyles } from "@material-ui/styles"; -import { immutableReplaceAt } from "../../utils/helpers"; -import { formatRelative } from "date-fns"; -import DeleteIcon from "@material-ui/icons/DeleteForever"; -import { colors } from "../../theme/variables"; -import CloseIcon from "@material-ui/icons/Close"; -import _ from "lodash"; -import { useEnv } from "../../plugins/env"; - -const useStyles = makeStyles({ - sidebar: { - width: 300, - border: "0px solid rgba(0, 0, 0, 0)", - zIndex: 1, - boxShadow: "0 2px 4px 0 rgb(0 0 0 / 10%), 0 0 2px 0 rgb(0 0 0 / 10%)", - background: "#fff", - display: "flex", - flexDirection: "column", - }, - toolbar: { - backgroundColor: colors.gray14, - }, - title: { - fontWeight: "bold", - flex: 1, - }, - list: { - overflowY: "auto", - cursor: "pointer", - flex: 1, - }, -}); -type RunPayload = any; -type RunEntry = { - runPayload: RunPayload; - workflowRecords: WorkflowRecord[]; - createTime: number; -}; -type WorkflowRecord = { - workflowId: string; -}; - -type RunHistoryProps = { - onRunSelected: (run: RunEntry | undefined) => void; -}; - -const RUN_HISTORY_SCHEMA_VER = 1; - -const RunHistory = forwardRef((props: RunHistoryProps, ref) => { - const { onRunSelected } = props; - const { stack } = useEnv(); - const classes = useStyles(); - const [selectedCreateTime, setSelectedCreateTime] = useState< - number | undefined - >(undefined); - const [runHistory, setRunHistory]: readonly [ - RunEntry[], - (v: RunEntry[]) => void - ] = useLocalStorage(`runHistory_${stack}_${RUN_HISTORY_SCHEMA_VER}`, []); - - useImperativeHandle(ref, () => ({ - pushNewRun: (runPayload: RunPayload) => { - const createTime = new Date().getTime(); - const newRun = { - runPayload: runPayload, - workflowRecords: [], - createTime: createTime, - }; - setRunHistory([newRun, ...runHistory]); - setSelectedCreateTime(createTime); - - return newRun; - }, - updateRun: (createTime: number, workflowId: string) => { - const idx = runHistory.findIndex((v) => v.createTime === createTime); - const currRun = runHistory[idx]; - const oldRecords = currRun.workflowRecords; - const updatedRun = { - runPayload: currRun.runPayload, - workflowRecords: [ - { - workflowId: workflowId, - }, - ...oldRecords, - ], - createTime: currRun.createTime, - }; - - setRunHistory(immutableReplaceAt(runHistory, idx, updatedRun)); - onRunSelected(updatedRun); - }, - })); - - function handleSelectRun(run: RunEntry) { - if (onRunSelected) onRunSelected(run); - setSelectedCreateTime(run.createTime); - } - - function handleDeleteAll() { - if (window.confirm("Delete all run history in this browser?")) { - setRunHistory([]); - } - } - - function handleDeleteItem(run: RunEntry) { - const newHistory = runHistory.filter( - (v) => v.createTime !== run.createTime - ); - if (newHistory.length > 0) { - setSelectedCreateTime(newHistory[0].createTime); - onRunSelected(newHistory[0]); - } else { - console.log("Empty history"); - setSelectedCreateTime(undefined); - onRunSelected(undefined); - } - setRunHistory(newHistory); - } - - return ( -
    - - - Run History - - - - - - - {runHistory.map((run) => ( - handleSelectRun(run)} - > - - - handleDeleteItem(run)}> - - - - - ))} - {_.isEmpty(runHistory) && No saved runs.} - -
    - ); -}); - -export default RunHistory; diff --git a/ui/src/pages/workbench/Workbench.jsx b/ui/src/pages/workbench/Workbench.jsx deleted file mode 100644 index 17e58d0dc..000000000 --- a/ui/src/pages/workbench/Workbench.jsx +++ /dev/null @@ -1,98 +0,0 @@ -import { useState, useRef } from "react"; -import { makeStyles } from "@material-ui/styles"; -import { Helmet } from "react-helmet"; -import RunHistory from "./RunHistory"; -import WorkbenchForm from "./WorkbenchForm"; -import { colors } from "../../theme/variables"; -import { useStartWorkflow } from "../../data/workflow"; -import ExecutionHistory from "./ExecutionHistory"; - -const useStyles = makeStyles({ - wrapper: { - height: "100%", - overflow: "hidden", - display: "flex", - flexDirection: "row", - position: "relative", - }, - name: { - width: "50%", - }, - submitButton: { - float: "right", - }, - toolbar: { - backgroundColor: colors.gray14, - }, - workflowName: { - fontWeight: "bold", - }, - main: { - flex: 1, - display: "flex", - flexDirection: "column", - }, - row: { - display: "flex", - flexDirection: "row", - }, - fields: { - margin: 30, - flex: 1, - display: "flex", - flexDirection: "column", - gap: 15, - }, - runInfo: { - marginLeft: -350, - }, -}); - -export default function Workbench() { - const classes = useStyles(); - - const runHistoryRef = useRef(); - const [run, setRun] = useState(undefined); - - const { mutate: startWorkflow } = useStartWorkflow({ - onSuccess: (workflowId, variables) => { - runHistoryRef.current.updateRun(variables.createTime, workflowId); - }, - }); - - const handleRunSelect = (run) => { - setRun(run); - }; - - const handleSaveRun = (runPayload) => { - const newRun = runHistoryRef.current.pushNewRun(runPayload); - setRun(newRun); - return newRun; - }; - - const handleExecuteRun = (createTime, runPayload) => { - startWorkflow({ - createTime, - body: runPayload, - }); - }; - - return ( - <> - - Conductor UI - Workbench - - -
    - - - - -
    - - ); -} diff --git a/ui/src/pages/workbench/WorkbenchForm.jsx b/ui/src/pages/workbench/WorkbenchForm.jsx deleted file mode 100644 index 73154779f..000000000 --- a/ui/src/pages/workbench/WorkbenchForm.jsx +++ /dev/null @@ -1,256 +0,0 @@ -import { useMemo } from "react"; -import { Text, Pill } from "../../components"; -import { Toolbar, IconButton, Tooltip } from "@material-ui/core"; -import FormikInput from "../../components/formik/FormikInput"; -import FormikJsonInput from "../../components/formik/FormikJsonInput"; -import FormikDropdown from "../../components/formik/FormikDropdown"; -import { makeStyles } from "@material-ui/styles"; -import _ from "lodash"; -import { Form, setNestedObjectValues, withFormik } from "formik"; -import { - useWorkflowNamesAndVersions, - useWorkflowDef, -} from "../../data/workflow"; -import FormikVersionDropdown from "../../components/formik/FormikVersionDropdown"; -import PlayArrowIcon from "@material-ui/icons/PlayArrow"; -import PlaylistAddIcon from "@material-ui/icons/PlaylistAdd"; -import SaveIcon from "@material-ui/icons/Save"; -import { colors } from "../../theme/variables"; -import { timestampRenderer } from "../../utils/helpers"; -import * as Yup from "yup"; - -const useStyles = makeStyles({ - name: { - width: "50%", - }, - submitButton: { - float: "right", - }, - toolbar: { - backgroundColor: colors.gray14, - }, - workflowName: { - fontWeight: "bold", - }, - main: { - flex: 1, - display: "flex", - flexDirection: "column", - overflow: "auto", - }, - fields: { - width: "100%", - padding: 30, - flex: 1, - display: "flex", - flexDirection: "column", - overflowX: "hidden", - overflowY: "auto", - gap: 15, - }, -}); - -Yup.addMethod(Yup.string, "isJson", function () { - return this.test("is-json", "is not valid json", (value) => { - if (_.isEmpty(value)) return true; - - try { - JSON.parse(value); - } catch (e) { - return false; - } - return true; - }); -}); -const validationSchema = Yup.object({ - workflowName: Yup.string().required("Workflow Name is required"), - workflowInput: Yup.string().isJson(), - taskToDomain: Yup.string().isJson(), -}); - -export default withFormik({ - enableReinitialize: true, - mapPropsToValues: ({ selectedRun }) => - runPayloadToFormData(_.get(selectedRun, "runPayload")), - validationSchema: validationSchema, -})(WorkbenchForm); - -function WorkbenchForm(props) { - const { - values, - validateForm, - setTouched, - setFieldValue, - dirty, - selectedRun, - saveRun, - executeRun, - } = props; - const classes = useStyles(); - const { workflowName, workflowVersion } = values; - const createTime = selectedRun ? selectedRun.createTime : undefined; - - const { data: namesAndVersions } = useWorkflowNamesAndVersions(); - const workflowNames = useMemo( - () => (namesAndVersions ? Array.from(namesAndVersions.keys()) : []), - [namesAndVersions] - ); - const { refetch } = useWorkflowDef(workflowName, workflowVersion, null, { - onSuccess: populateInput, - enabled: false, - }); - - function triggerPopulateInput() { - refetch(); - } - - function populateInput(workflowDef) { - let bootstrap = {}; - - if (!_.isEmpty(values.workflowInput)) { - const existing = JSON.parse(values.workflowInput); - bootstrap = _.pickBy(existing, (v) => v !== ""); - } - - if (workflowDef.inputParameters) { - for (let param of workflowDef.inputParameters) { - if (!_.has(bootstrap, param)) { - bootstrap[param] = ""; - } - } - - setFieldValue("workflowInput", JSON.stringify(bootstrap, null, 2)); - } - } - - function handleRun() { - validateForm().then((errors) => { - if (Object.keys(errors).length === 0) { - const payload = formDataToRunPayload(values); - if (!dirty && createTime) { - console.log("Executing pre-existing run. Append workflowRecord"); - executeRun(createTime, payload); - } else { - console.log("Executing new run. Save first then execute"); - const newRun = saveRun(payload); - executeRun(newRun.createTime, payload); - } - } else { - // Handle validation error manually (not using handleSubmit) - setTouched(setNestedObjectValues(errors, true)); - } - }); - } - - function handleSave() { - validateForm().then((errors) => { - if (Object.keys(errors).length === 0) { - const payload = formDataToRunPayload(values); - saveRun(payload); - } else { - setTouched(setNestedObjectValues(errors, true)); - } - }); - } - - return ( -
    - - Workflow Workbench - - - - - - - -
    - - - -
    -
    - - -
    - - - -
    -
    - - {dirty && } - {createTime && Created: {timestampRenderer(createTime)}} -
    - -
    - - - - - - - - - -
    -
    - ); -} - -function runPayloadToFormData(runPayload) { - return { - workflowName: _.get(runPayload, "name", ""), - workflowVersion: _.get(runPayload, "version", ""), - workflowInput: _.has(runPayload, "input") - ? JSON.stringify(runPayload.input, null, 2) - : "", - correlationId: _.get(runPayload, "correlationId", ""), - taskToDomain: _.has(runPayload, "taskToDomain") - ? JSON.stringify(runPayload.taskToDomain, null, 2) - : "", - }; -} - -function formDataToRunPayload(form) { - let runPayload = { - name: form.workflowName, - }; - if (form.workflowVersion) { - runPayload.version = form.workflowVersion; - } - if (form.workflowInput) { - runPayload.input = JSON.parse(form.workflowInput); - } - if (form.correlationId) { - runPayload.correlationId = form.correlationId; - } - if (form.taskToDomain) { - runPayload.taskToDomain = JSON.parse(form.taskToDomain); - } - return runPayload; -} - -// runHistoryRef.current.pushRun(runPayload); diff --git a/ui/src/plugins/AppBarModules.jsx b/ui/src/plugins/AppBarModules.jsx deleted file mode 100644 index 730e518ac..000000000 --- a/ui/src/plugins/AppBarModules.jsx +++ /dev/null @@ -1,3 +0,0 @@ -export default function AppBarModules() { - return null; -} diff --git a/ui/src/plugins/AppLogo.jsx b/ui/src/plugins/AppLogo.jsx deleted file mode 100644 index 01e842a28..000000000 --- a/ui/src/plugins/AppLogo.jsx +++ /dev/null @@ -1,14 +0,0 @@ -import React from "react"; -import { makeStyles } from "@material-ui/core/styles"; - -const useStyles = makeStyles((theme) => ({ - logo: { - height: 55, - marginRight: 30, - }, -})); - -export default function AppLogo() { - const classes = useStyles(); - return Conductor; -} diff --git a/ui/src/plugins/CustomAppBarButtons.jsx b/ui/src/plugins/CustomAppBarButtons.jsx deleted file mode 100644 index 724086256..000000000 --- a/ui/src/plugins/CustomAppBarButtons.jsx +++ /dev/null @@ -1,3 +0,0 @@ -export default function CustomAppBarButtons() { - return <>; -} diff --git a/ui/src/plugins/CustomRoutes.jsx b/ui/src/plugins/CustomRoutes.jsx deleted file mode 100644 index 6c70322b4..000000000 --- a/ui/src/plugins/CustomRoutes.jsx +++ /dev/null @@ -1,3 +0,0 @@ -export default function CustomRoutes() { - return <>; -} diff --git a/ui/src/plugins/constants.js b/ui/src/plugins/constants.js deleted file mode 100644 index e69de29bb..000000000 diff --git a/ui/src/plugins/customTypeRenderers.jsx b/ui/src/plugins/customTypeRenderers.jsx deleted file mode 100644 index d645915ae..000000000 --- a/ui/src/plugins/customTypeRenderers.jsx +++ /dev/null @@ -1 +0,0 @@ -export const customTypeRenderers = {}; diff --git a/ui/src/plugins/env.js b/ui/src/plugins/env.js deleted file mode 100644 index 505ef3e7c..000000000 --- a/ui/src/plugins/env.js +++ /dev/null @@ -1,6 +0,0 @@ -export function useEnv() { - return { - stack: "default", - defaultStack: "default", - }; -} diff --git a/ui/src/plugins/fetch.js b/ui/src/plugins/fetch.js deleted file mode 100644 index bf4ba2fe8..000000000 --- a/ui/src/plugins/fetch.js +++ /dev/null @@ -1,40 +0,0 @@ -import { useEnv } from "./env"; - -export function useFetchContext() { - const { stack } = useEnv(); - return { - stack, - ready: true, - }; -} -export function fetchWithContext( - path, - context, - fetchParams, - isJsonResponse = true -) { - const newParams = { ...fetchParams }; - - const newPath = `/api/${path}`; - const cleanPath = newPath.replace(/([^:]\/)\/+/g, "$1"); // Cleanup duplicated slashes - - return fetch(cleanPath, newParams) - .then((res) => Promise.all([res, res.text()])) - .then(([res, text]) => { - if (!res.ok) { - // get error message from body or default to response status - const error = text || res.status; - return Promise.reject(error); - } else if (!text || text.length === 0) { - return null; - } else if (!isJsonResponse) { - return text; - } else { - try { - return JSON.parse(text); - } catch (e) { - return text; - } - } - }); -} diff --git a/ui/src/react-app-env.d.ts b/ui/src/react-app-env.d.ts deleted file mode 100644 index 6431bc5fc..000000000 --- a/ui/src/react-app-env.d.ts +++ /dev/null @@ -1 +0,0 @@ -/// diff --git a/ui/src/schema/task.js b/ui/src/schema/task.js deleted file mode 100644 index 89dd464d6..000000000 --- a/ui/src/schema/task.js +++ /dev/null @@ -1,20 +0,0 @@ -export const NEW_TASK_TEMPLATE = { - name: "", - description: - "Edit or extend this sample task. Set the task name to get started", - retryCount: 3, - timeoutSeconds: 3600, - inputKeys: [], - outputKeys: [], - timeoutPolicy: "TIME_OUT_WF", - retryLogic: "FIXED", - retryDelaySeconds: 60, - responseTimeoutSeconds: 600, - rateLimitPerFrequency: 0, - rateLimitFrequencyInSeconds: 1, - ownerEmail: "", -}; - -export function configureMonaco(monaco) { - // No-op -} diff --git a/ui/src/schema/workflow.js b/ui/src/schema/workflow.js deleted file mode 100644 index 8742e19a7..000000000 --- a/ui/src/schema/workflow.js +++ /dev/null @@ -1,302 +0,0 @@ -/* eslint-disable no-template-curly-in-string */ - -export const NEW_WORKFLOW_TEMPLATE = { - name: "", - description: - "Edit or extend this sample workflow. Set the workflow name to get started", - version: 1, - tasks: [ - { - name: "get_population_data", - taskReferenceName: "get_population_data", - inputParameters: { - http_request: { - uri: "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - method: "GET", - }, - }, - type: "HTTP", - }, - ], - inputParameters: [], - outputParameters: { - data: "${get_population_data.output.response.body.data}", - source: "${get_population_data.output.response.body.source}", - }, - schemaVersion: 2, - restartable: true, - workflowStatusListenerEnabled: false, - ownerEmail: "example@email.com", - timeoutPolicy: "ALERT_ONLY", - timeoutSeconds: 0, -}; - -const WORKFLOW_SCHEMA = { - $schema: "http://json-schema.org/draft-07/schema", - $id: "http://example.com/example.json", - type: "object", - title: "The root schema", - description: "The root schema comprises the entire JSON document.", - default: {}, - examples: [ - { - name: "first_sample_workflow", - description: "First Sample Workflow", - version: 1, - tasks: [ - { - name: "get_population_data", - taskReferenceName: "get_population_data", - inputParameters: { - http_request: { - uri: "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - method: "GET", - }, - }, - type: "HTTP", - }, - ], - inputParameters: [], - outputParameters: { - data: "${get_population_data.output.response.body.data}", - source: "${get_population_data.output.response.body.source}", - }, - schemaVersion: 2, - restartable: true, - workflowStatusListenerEnabled: false, - ownerEmail: "example@email.com", - timeoutPolicy: "ALERT_ONLY", - timeoutSeconds: 0, - }, - ], - required: ["name", "version", "tasks", "schemaVersion"], - properties: { - name: { - $id: "#/properties/name", - default: "", - description: - "Workflow Name - should be without spaces or special characters. Underscores and periods are allowed.", - examples: ["first_sample_workflow"], - maxLength: 100, - pattern: "^[\\w\\.]+$", - title: "Workflow Name", - type: "string", - }, - description: { - $id: "#/properties/description", - type: "string", - title: "Workflow Description", - description: "An brief description of your workflow for reference.", - default: "", - examples: ["First Sample Workflow"], - }, - version: { - $id: "#/properties/version", - default: 0, - description: "An explanation about the purpose of this instance.", - examples: [1], - title: "The version schema", - minimum: 1, - type: "integer", - }, - tasks: { - $id: "#/properties/tasks", - type: "array", - title: "Workflow Tasks", - description: "This list holds the tasks for your workflow.", - default: [], - examples: [ - [ - { - name: "get_population_data", - taskReferenceName: "get_population_data", - inputParameters: { - http_request: { - uri: "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - method: "GET", - }, - }, - type: "HTTP", - }, - ], - ], - additionalItems: true, - items: { - $id: "#/properties/tasks/items", - anyOf: [ - { - $id: "#/properties/tasks/items/anyOf/0", - type: "object", - title: "The first anyOf schema", - description: "Workflow task details", - default: { - name: "", - taskReferenceName: "", - inputParameters: {}, - type: "SIMPLE", - }, - examples: [ - { - name: "get_population_data", - taskReferenceName: "get_population_data", - inputParameters: { - http_request: { - uri: "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - method: "GET", - }, - }, - type: "HTTP", - }, - ], - required: ["name", "taskReferenceName", "inputParameters", "type"], - properties: { - name: { - $id: "#/properties/tasks/items/anyOf/0/properties/name", - type: "string", - title: "Task name", - description: "Task name", - default: "", - examples: ["get_population_data"], - }, - taskReferenceName: { - $id: "#/properties/tasks/items/anyOf/0/properties/taskReferenceName", - type: "string", - title: "Task Reference Name", - description: - "A unique task reference name for this task in the entire workflow", - default: "", - examples: ["get_population_data"], - }, - inputParameters: { - $id: "#/properties/tasks/items/anyOf/0/properties/inputParameters", - type: "object", - title: "Input Parameters", - description: "Task input parameters", - default: {}, - examples: [ - { - http_request: { - uri: "https://datausa.io/api/data?drilldowns=Nation&measures=Population", - method: "GET", - }, - }, - ], - required: [], - properties: {}, - additionalProperties: true, - }, - type: { - $id: "#/properties/tasks/items/anyOf/0/properties/type", - type: "string", - title: "Task Type", - description: "Task type", - default: "", - examples: ["HTTP"], - }, - }, - additionalProperties: true, - }, - ], - }, - }, - inputParameters: { - $id: "#/properties/inputParameters", - type: "array", - title: "Workflow Input Parameters", - description: "An explanation about the purpose of this instance.", - default: [], - examples: [[]], - additionalItems: true, - items: { - $id: "#/properties/inputParameters/items", - }, - }, - outputParameters: { - $id: "#/properties/outputParameters", - type: "object", - title: "The outputParameters schema", - description: "An explanation about the purpose of this instance.", - default: {}, - examples: [ - { - data: "${get_population_data.output.response.body.data}", - source: "${get_population_data.output.response.body.source}", - }, - ], - required: [], - properties: {}, - additionalProperties: true, - }, - schemaVersion: { - $id: "#/properties/schemaVersion", - type: "integer", - title: "Schema Version", - description: "Fixed schema version", - default: 2, - examples: [2], - }, - restartable: { - $id: "#/properties/restartable", - type: "boolean", - title: "Workflow restartable", - description: "Specify if the workflow is restartable.", - default: true, - examples: [true, false], - }, - workflowStatusListenerEnabled: { - $id: "#/properties/workflowStatusListenerEnabled", - type: "boolean", - title: "The workflowStatusListenerEnabled schema", - description: "An explanation about the purpose of this instance.", - default: false, - examples: [true, false], - }, - ownerEmail: { - $id: "#/properties/ownerEmail", - type: "string", - title: "The ownerEmail schema", - description: "An explanation about the purpose of this instance.", - default: "", - examples: ["example@email.com"], - }, - timeoutPolicy: { - $id: "#/properties/timeoutPolicy", - type: "string", - title: "The timeoutPolicy schema", - description: "An explanation about the purpose of this instance.", - default: "", - examples: ["ALERT_ONLY", "TIME_OUT_WF"], - }, - timeoutSeconds: { - $id: "#/properties/timeoutSeconds", - type: "integer", - title: "The timeoutSeconds schema", - description: "An explanation about the purpose of this instance.", - default: 0, - examples: [0], - }, - }, - additionalProperties: true, -}; - -export const JSON_FILE_NAME = "file:///workflow.json"; - -export function configureMonaco(monaco) { - monaco.languages.typescript.javascriptDefaults.setEagerModelSync(true); - // noinspection JSUnresolvedVariable - monaco.languages.typescript.javascriptDefaults.setCompilerOptions({ - target: monaco.languages.typescript.ScriptTarget.ES6, - allowNonTsExtensions: true, - }); - let modelUri = monaco.Uri.parse(JSON_FILE_NAME); - monaco.languages.json.jsonDefaults.setDiagnosticsOptions({ - validate: true, - schemas: [ - { - uri: "http://conductor.tmp/schemas/workflow.json", // id of the first schema - fileMatch: [modelUri.toString()], // associate with our model - schema: WORKFLOW_SCHEMA, - }, - ], - }); -} diff --git a/ui/src/serviceWorker.js b/ui/src/serviceWorker.js deleted file mode 100644 index c7cd6663d..000000000 --- a/ui/src/serviceWorker.js +++ /dev/null @@ -1,141 +0,0 @@ -// This optional code is used to register a service worker. -// register() is not called by default. - -// This lets the app load faster on subsequent visits in production, and gives -// it offline capabilities. However, it also means that developers (and users) -// will only see deployed updates on subsequent visits to a page, after all the -// existing tabs open on the page have been closed, since previously cached -// resources are updated in the background. - -// To learn more about the benefits of this model and instructions on how to -// opt-in, read https://bit.ly/CRA-PWA - -const isLocalhost = Boolean( - window.location.hostname === "localhost" || - // [::1] is the IPv6 localhost address. - window.location.hostname === "[::1]" || - // 127.0.0.0/8 are considered localhost for IPv4. - window.location.hostname.match( - /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ - ) -); - -export function register(config) { - if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) { - // The URL constructor is available in all browsers that support SW. - const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); - if (publicUrl.origin !== window.location.origin) { - // Our service worker won't work if PUBLIC_URL is on a different origin - // from what our page is served on. This might happen if a CDN is used to - // serve assets; see https://github.com/facebook/create-react-app/issues/2374 - return; - } - - window.addEventListener("load", () => { - const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; - - if (isLocalhost) { - // This is running on localhost. Let's check if a service worker still exists or not. - checkValidServiceWorker(swUrl, config); - - // Add some additional logging to localhost, pointing developers to the - // service worker/PWA documentation. - navigator.serviceWorker.ready.then(() => { - console.log( - "This web app is being served cache-first by a service " + - "worker. To learn more, visit https://bit.ly/CRA-PWA" - ); - }); - } else { - // Is not localhost. Just register service worker - registerValidSW(swUrl, config); - } - }); - } -} - -function registerValidSW(swUrl, config) { - navigator.serviceWorker - .register(swUrl) - .then((registration) => { - registration.onupdatefound = () => { - const installingWorker = registration.installing; - if (installingWorker == null) { - return; - } - installingWorker.onstatechange = () => { - if (installingWorker.state === "installed") { - if (navigator.serviceWorker.controller) { - // At this point, the updated precached content has been fetched, - // but the previous service worker will still serve the older - // content until all client tabs are closed. - console.log( - "New content is available and will be used when all " + - "tabs for this page are closed. See https://bit.ly/CRA-PWA." - ); - - // Execute callback - if (config && config.onUpdate) { - config.onUpdate(registration); - } - } else { - // At this point, everything has been precached. - // It's the perfect time to display a - // "Content is cached for offline use." message. - console.log("Content is cached for offline use."); - - // Execute callback - if (config && config.onSuccess) { - config.onSuccess(registration); - } - } - } - }; - }; - }) - .catch((error) => { - console.error("Error during service worker registration:", error); - }); -} - -function checkValidServiceWorker(swUrl, config) { - // Check if the service worker can be found. If it can't reload the page. - fetch(swUrl, { - headers: { "Service-Worker": "script" }, - }) - .then((response) => { - // Ensure service worker exists, and that we really are getting a JS file. - const contentType = response.headers.get("content-type"); - if ( - response.status === 404 || - (contentType != null && contentType.indexOf("javascript") === -1) - ) { - // No service worker found. Probably a different app. Reload the page. - navigator.serviceWorker.ready.then((registration) => { - registration.unregister().then(() => { - window.location.reload(); - }); - }); - } else { - // Service worker found. Proceed as normal. - registerValidSW(swUrl, config); - } - }) - .catch(() => { - console.log( - "No internet connection found. App is running in offline mode." - ); - }); -} - -export function unregister() { - if ("serviceWorker" in navigator) { - navigator.serviceWorker.ready - .then((registration) => { - registration.unregister(); - }) - .catch((error) => { - console.error(error.message); - }); - } -} diff --git a/ui/src/setupProxy.js b/ui/src/setupProxy.js deleted file mode 100644 index 39896b450..000000000 --- a/ui/src/setupProxy.js +++ /dev/null @@ -1,13 +0,0 @@ -const { createProxyMiddleware } = require("http-proxy-middleware"); -const target = process.env.WF_SERVER || "http://localhost:8080"; - -module.exports = function (app) { - app.use( - "/api", - createProxyMiddleware({ - target: target, - //pathRewrite: { "^/api/": "/" }, - changeOrigin: true, - }) - ); -}; diff --git a/ui/src/setupTests.js b/ui/src/setupTests.js deleted file mode 100644 index 5fdf00169..000000000 --- a/ui/src/setupTests.js +++ /dev/null @@ -1,5 +0,0 @@ -// jest-dom adds custom jest matchers for asserting on DOM nodes. -// allows you to do things like: -// expect(element).toHaveTextContent(/react/i) -// learn more: https://github.com/testing-library/jest-dom -import "@testing-library/jest-dom/extend-expect"; diff --git a/ui/src/theme/colorOverrides.js b/ui/src/theme/colorOverrides.js deleted file mode 100644 index 9c1953978..000000000 --- a/ui/src/theme/colorOverrides.js +++ /dev/null @@ -1,41 +0,0 @@ -import * as colors from "./colors"; - -const brandAliases = { - brand00: colors.indigo00, - brand01: colors.indigo01, - brand02: colors.indigo02, - brand03: colors.indigo03, - brand04: colors.indigo04, - brand05: colors.indigo05, - brand06: colors.indigo06, - brand07: colors.indigo07, - brand08: colors.indigo08, - brand09: colors.indigo09, - brand10: colors.indigo10, - brand11: colors.indigo11, - brand12: colors.indigo12, - brand13: colors.indigo13, - brand14: colors.indigo14, -}; - -const brandShortcuts = { - brand: brandAliases.brand07, - bgBrand: brandAliases.brand07, - bgBrandLight: brandAliases.brand09, - bgBrandDark: brandAliases.brand05, - brandXLight: colors.indigoXLight, - brandXXLight: colors.indigoXXLight, -}; - -const failureAliases = { - failure: colors.red07, - failureLight: colors.red09, - failureDark: colors.red05, -}; - -export const colorOverrides = { - ...colors, - ...brandAliases, - ...brandShortcuts, - ...failureAliases, -}; diff --git a/ui/src/theme/colors.js b/ui/src/theme/colors.js deleted file mode 100644 index e3734e7ab..000000000 --- a/ui/src/theme/colors.js +++ /dev/null @@ -1,725 +0,0 @@ -// Backgrounds / Black -exports.black = "#050505"; - -// Transparents / Black / 00-Black-Light (70%) -exports.blackLight = "rgba(5,5,5,0.7)"; - -// Transparents / Black / 01-Black-Xlight (40%) -exports.blackXLight = "rgba(5,5,5,0.4)"; - -// Transparents / Black / 02-Black-Xxlight (10%) -exports.blackXXLight = "rgba(5,5,5,0.1)"; - -// Backgrounds / Blue / Blue-00 (Xxdark) -exports.blue00 = "#00101f"; - -// Backgrounds / Blue / Blue-01 -exports.blue01 = "#05192b"; - -// Backgrounds / Blue / Blue-02 -exports.blue02 = "#092743"; - -// Backgrounds / Blue / Blue-03 (Xdark) -exports.blue03 = "#0d365c"; - -// Backgrounds / Blue / Blue-04 -exports.blue04 = "#12487a"; - -// Backgrounds / Blue / Blue-05 (Dark) -exports.blue05 = "#165b99"; - -// Backgrounds / Blue / Blue-06 -exports.blue06 = "#1b6fb9"; - -// Backgrounds / Blue / -Blue-07 (Base) -exports.blue07 = "#1f83db"; - -// Backgrounds / Blue / Blue-08 -exports.blue08 = "#5995e1"; - -// Backgrounds / Blue / Blue-09 (Light) -exports.blue09 = "#7ea7e7"; - -// Backgrounds / Blue / Blue-10 -exports.blue10 = "#9dbaec"; - -// Backgrounds / Blue / Blue-11 (Xlight) -exports.blue11 = "#bacdf2"; - -// Backgrounds / Blue / Blue-12 -exports.blue12 = "#d2def6"; - -// Backgrounds / Blue / Blue-13 -exports.blue13 = "#eaf0fb"; - -// Backgrounds / Blue / Blue-14 (Xxlight) -exports.blue14 = "#f7fafd"; - -// Transparents / Blue / 00-Blue-Light (70%) -exports.blueLight = "rgba(31,131,219,0.7)"; - -// Transparents / Blue / 01-Blue-Xlight (40%) -exports.blueXLight = "rgba(31,131,219,0.4)"; - -// Transparents / Blue / 02-Blue-Xxlight (10%) -exports.blueXXLight = "rgba(31,131,219,0.1)"; - -// Backgrounds / Cyan / Cyan-00 (Xxdark) -exports.cyan00 = "#001b1e"; - -// Backgrounds / Cyan / Cyan-01 -exports.cyan01 = "#042529"; - -// Backgrounds / Cyan / Cyan-02 -exports.cyan02 = "#08373d"; - -// Backgrounds / Cyan / Cyan-03 (Xdark) -exports.cyan03 = "#0f4a52"; - -// Backgrounds / Cyan / Cyan-04 -exports.cyan04 = "#17616c"; - -// Backgrounds / Cyan / Cyan-05 (Dark) -exports.cyan05 = "#207986"; - -// Backgrounds / Cyan / Cyan-06 -exports.cyan06 = "#2991a2"; - -// Backgrounds / Cyan / -Cyan-07 (Base) -exports.cyan07 = "#32abbe"; - -// Backgrounds / Cyan / Cyan-08 -exports.cyan08 = "#5fb8c8"; - -// Backgrounds / Cyan / Cyan-09 (Light) -exports.cyan09 = "#80c5d2"; - -// Backgrounds / Cyan / Cyan-10 -exports.cyan10 = "#9ed2dc"; - -// Backgrounds / Cyan / Cyan-11 (Xlight) -exports.cyan11 = "#badfe6"; - -// Backgrounds / Cyan / Cyan-12 -exports.cyan12 = "#d2eaef"; - -// Backgrounds / Cyan / Cyan-13 -exports.cyan13 = "#eaf5f8"; - -// Backgrounds / Cyan / Cyan-14 (Xxlight) -exports.cyan14 = "#f7fcfd"; - -// Transparents / Cyan / 00-Cyan-Light (70%) -exports.cyanLight = "rgba(50,171,190,0.7)"; - -// Transparents / Cyan / 01-Cyan-Xlight (40%) -exports.cyanXLight = "rgba(50,171,190,0.4)"; - -// Transparents / Cyan / 02-Cyan-Xxlight (10%) -exports.cyanXXLight = "rgba(50,171,190,0.1)"; - -// Backgrounds / Grape / Grape-00 (Xxdark) -exports.grape00 = "#18001f"; - -// Backgrounds / Grape / Grape-01 -exports.grape01 = "#200b2a"; - -// Backgrounds / Grape / Grape-02 -exports.grape02 = "#33143f"; - -// Backgrounds / Grape / Grape-03 (Xdark) -exports.grape03 = "#481d56"; - -// Backgrounds / Grape / Grape-04 -exports.grape04 = "#602871"; - -// Backgrounds / Grape / Grape-05 (Dark) -exports.grape05 = "#7a338d"; - -// Backgrounds / Grape / Grape-06 -exports.grape06 = "#943eab"; - -// Backgrounds / Grape / -Grape-07 (Base) -exports.grape07 = "#b04ac9"; - -// Backgrounds / Grape / Grape-08 -exports.grape08 = "#be68d2"; - -// Backgrounds / Grape / Grape-09 (Light) -exports.grape09 = "#cb84da"; - -// Backgrounds / Grape / Grape-10 -exports.grape10 = "#d89fe3"; - -// Backgrounds / Grape / Grape-11 (Xlight) -exports.grape11 = "#e4baeb"; - -// Backgrounds / Grape / Grape-12 -exports.grape12 = "#edd2f2"; - -// Backgrounds / Grape / Grape-13 -exports.grape13 = "#f7e9f9"; - -// Backgrounds / Grape / Grape-14 (Xxlight) -exports.grape14 = "#fcf7fd"; - -// Transparents / Grape / 00-Grape-Light (70%) -exports.grapeLight = "rgba(176,74,201,0.7)"; - -// Transparents / Grape / 01-Grape-Xlight (40%) -exports.grapeXLight = "rgba(176,74,201,0.4)"; - -// Transparents / Grape / 02-Grape-Xxlight (10%) -exports.grapeXXLight = "rgba(176,74,201,0.1)"; - -// Backgrounds / Gray / Gray-00 (Xxdark) -exports.gray00 = "#0f0f0f"; - -// Backgrounds / Gray / Gray-01 -exports.gray01 = "#181818"; - -// Backgrounds / Gray / Gray-02 -exports.gray02 = "#242424"; - -// Backgrounds / Gray / Gray-03 (Xdark) -exports.gray03 = "#323232"; - -// Backgrounds / Gray / Gray-04 -exports.gray04 = "#424242"; - -// Backgrounds / Gray / Gray-05 (Dark) -exports.gray05 = "#535353"; - -// Backgrounds / Gray / Gray-06 -exports.gray06 = "#646464"; - -// Backgrounds / Gray / -Gray-07 (Base) -exports.gray07 = "#767676"; - -// Backgrounds / Gray / Gray-08 -exports.gray08 = "#8a8a8a"; - -// Backgrounds / Gray / Gray-09 (Light) -exports.gray09 = "#9e9e9e"; - -// Backgrounds / Gray / Gray-10 -exports.gray10 = "#b3b3b3"; - -// Backgrounds / Gray / Gray-11 (Xlight) -exports.gray11 = "#c8c8c8"; - -// Backgrounds / Gray / Gray-12 -exports.gray12 = "#dbdbdb"; - -// Backgrounds / Gray / Gray-13 -exports.gray13 = "#efefef"; - -// Backgrounds / Gray / Gray-14 (Xxlight) -exports.gray14 = "#fafafa"; - -// Transparents / Gray / 00-Gray-Light (70%) -exports.grayLight = "rgba(118,118,118,0.7)"; - -// Transparents / Gray / 01-Gray-Xlight (40%) -exports.grayXLight = "rgba(118,118,118,0.4)"; - -// Transparents / Gray / 02-Gray-Xxlight (10%) -exports.grayXXLight = "rgba(118,118,118,0.1)"; - -// Backgrounds / Green / Green-00 (Xxdark) -exports.green00 = "#121e00"; - -// Backgrounds / Green / Green-01 -exports.green01 = "#192a07"; - -// Backgrounds / Green / Green-02 -exports.green02 = "#28400f"; - -// Backgrounds / Green / Green-03 (Xdark) -exports.green03 = "#385714"; - -// Backgrounds / Green / Green-04 -exports.green04 = "#4c731a"; - -// Backgrounds / Green / Green-05 (Dark) -exports.green05 = "#61911f"; - -// Backgrounds / Green / Green-06 -exports.green06 = "#76af25"; - -// Backgrounds / Green / -Green-07 (Base) -exports.green07 = "#8ccf2a"; - -// Backgrounds / Green / Green-08 -exports.green08 = "#a1d753"; - -// Backgrounds / Green / Green-09 (Light) -exports.green09 = "#b4de74"; - -// Backgrounds / Green / Green-10 -exports.green10 = "#c6e593"; - -// Backgrounds / Green / Green-11 (Xlight) -exports.green11 = "#d7edb2"; - -// Backgrounds / Green / Green-12 -exports.green12 = "#e5f3cd"; - -// Backgrounds / Green / Green-13 -exports.green13 = "#f3f9e8"; - -// Backgrounds / Green / Green-14 (Xxlight) -exports.green14 = "#fbfdf7"; - -// Transparents / Green / 00-Green-Light (70%) -exports.greenLight = "rgba(140,207,42,0.7)"; - -// Transparents / Green / 01-Green-Xlight (40%) -exports.greenXLight = "rgba(140,207,42,0.4)"; - -// Transparents / Green / 02-Green-Xxlight (10%) -exports.greenXXLight = "rgba(140,207,42,0.1)"; - -// Backgrounds / Indigo / Indigo-00 (Xxdark) -exports.indigo00 = "#00071f"; - -// Backgrounds / Indigo / Indigo-01 -exports.indigo01 = "#07122c"; - -// Backgrounds / Indigo / Indigo-02 -exports.indigo02 = "#0f1e44"; - -// Backgrounds / Indigo / Indigo-03 (Xdark) -exports.indigo03 = "#192b5e"; - -// Backgrounds / Indigo / Indigo-04 -exports.indigo04 = "#24397e"; - -// Backgrounds / Indigo / Indigo-05 (Dark) -exports.indigo05 = "#30499f"; - -// Backgrounds / Indigo / Indigo-06 -exports.indigo06 = "#3c59c1"; - -// Backgrounds / Indigo / -Indigo-07 (Base) -exports.indigo07 = "#4969e4"; - -// Backgrounds / Indigo / Indigo-08 -exports.indigo08 = "#6f7ee9"; - -// Backgrounds / Indigo / Indigo-09 (Light) -exports.indigo09 = "#8e94ed"; - -// Backgrounds / Indigo / Indigo-10 -exports.indigo10 = "#a9abf1"; - -// Backgrounds / Indigo / Indigo-11 (Xlight) -exports.indigo11 = "#c2c2f5"; - -// Backgrounds / Indigo / Indigo-12 -exports.indigo12 = "#d7d7f8"; - -// Backgrounds / Indigo / Indigo-13 -exports.indigo13 = "#ebedfb"; - -// Backgrounds / Indigo / Indigo-14 (Xxlight) -exports.indigo14 = "#f7f9fd"; - -// Transparents / Indigo / 00-Indigo-Light (70%) -exports.indigoLight = "rgba(73,105,228,0.7)"; - -// Transparents / Indigo / 01-Indigo-Xlight (40%) -exports.indigoXLight = "rgba(73,105,228,0.4)"; - -// Transparents / Indigo / 02-Indigo-Xxlight (10%) -exports.indigoXXLight = "rgba(73,105,228,0.1)"; - -// Backgrounds / Lime / Lime-00 (Xxdark) -exports.lime00 = "#001f06"; - -// Backgrounds / Lime / Lime-01 -exports.lime01 = "#05290f"; - -// Backgrounds / Lime / Lime-02 -exports.lime02 = "#0c3c19"; - -// Backgrounds / Lime / Lime-03 (Xdark) -exports.lime03 = "#145124"; - -// Backgrounds / Lime / Lime-04 -exports.lime04 = "#1f6930"; - -// Backgrounds / Lime / Lime-05 (Dark) -exports.lime05 = "#2a833c"; - -// Backgrounds / Lime / Lime-06 -exports.lime06 = "#359e4a"; - -// Backgrounds / Lime / -Lime-07 (Base) -exports.lime07 = "#41b957"; - -// Backgrounds / Lime / Lime-08 -exports.lime08 = "#65c470"; - -// Backgrounds / Lime / Lime-09 (Light) -exports.lime09 = "#84d08a"; - -// Backgrounds / Lime / Lime-10 -exports.lime10 = "#a0dba3"; - -// Backgrounds / Lime / Lime-11 (Xlight) -exports.lime11 = "#bbe5bd"; - -// Backgrounds / Lime / Lime-12 -exports.lime12 = "#d2efd4"; - -// Backgrounds / Lime / Lime-13 -exports.lime13 = "#e9f8eb"; - -// Backgrounds / Lime / Lime-14 (Xxlight) -exports.lime14 = "#f6fdf8"; - -// Transparents / Lime / 00-Lime-Light (70%) -exports.limeLight = "rgba(65,185,87,0.7)"; - -// Transparents / Lime / 01-Lime-Xlight (40%) -exports.limeXLight = "rgba(65,185,87,0.4)"; - -// Transparents / Lime / 02-Lime-Xxlight (10%) -exports.limeXXLight = "rgba(65,185,87,0.1)"; - -// Backgrounds / Orange / Orange-00 (Xxdark) -exports.orange00 = "#1e0c00"; - -// Backgrounds / Orange / Orange-01 -exports.orange01 = "#2b1505"; - -// Backgrounds / Orange / Orange-02 -exports.orange02 = "#46210d"; - -// Backgrounds / Orange / Orange-03 (Xdark) -exports.orange03 = "#622e10"; - -// Backgrounds / Orange / Orange-04 -exports.orange04 = "#853d12"; - -// Backgrounds / Orange / Orange-05 (Dark) -exports.orange05 = "#a94d14"; - -// Backgrounds / Orange / Orange-06 -exports.orange06 = "#cf5d14"; - -// Backgrounds / Orange / -Orange-07 (Base) -exports.orange07 = "#f66e13"; - -// Backgrounds / Orange / Orange-08 -exports.orange08 = "#fd853f"; - -// Backgrounds / Orange / Orange-09 (Light) -exports.orange09 = "#ff9c62"; - -// Backgrounds / Orange / Orange-10 -exports.orange10 = "#ffb284"; - -// Backgrounds / Orange / Orange-11 (Xlight) -exports.orange11 = "#ffc8a7"; - -// Backgrounds / Orange / Orange-12 -exports.orange12 = "#ffdbc5"; - -// Backgrounds / Orange / Orange-13 -exports.orange13 = "#ffeee5"; - -// Backgrounds / Orange / Orange-14 (Xxlight) -exports.orange14 = "#fdf9f7"; - -// Transparents / Orange / 00-Orange-Light (70%) -exports.orangeLight = "rgba(246,110,19,0.7)"; - -// Transparents / Orange / 01-Orange-Xlight (40%) -exports.orangeXLight = "rgba(246,110,19,0.4)"; - -// Transparents / Orange / 02-Orange-Xxlight (10%) -exports.orangeXXLight = "rgba(246,110,19,0.1)"; - -// Backgrounds / Pear / Pear-00 (Xxdark) -exports.pear00 = "#1e1d00"; - -// Backgrounds / Pear / Pear-01 -exports.pear01 = "#2a2a07"; - -// Backgrounds / Pear / Pear-02 -exports.pear02 = "#42410e"; - -// Backgrounds / Pear / Pear-03 (Xdark) -exports.pear03 = "#5d5a12"; - -// Backgrounds / Pear / Pear-04 -exports.pear04 = "#7c7815"; - -// Backgrounds / Pear / Pear-05 (Dark) -exports.pear05 = "#9d9718"; - -// Backgrounds / Pear / Pear-06 -exports.pear06 = "#bfb71b"; - -// Backgrounds / Pear / -Pear-07 (Base) -exports.pear07 = "#e3d91c"; - -// Backgrounds / Pear / Pear-08 -exports.pear08 = "#eade4f"; - -// Backgrounds / Pear / Pear-09 (Light) -exports.pear09 = "#f0e472"; - -// Backgrounds / Pear / Pear-10 -exports.pear10 = "#f6e993"; - -// Backgrounds / Pear / Pear-11 (Xlight) -exports.pear11 = "#f9efb2"; - -// Backgrounds / Pear / Pear-12 -exports.pear12 = "#fcf4cd"; - -// Backgrounds / Pear / Pear-13 -exports.pear13 = "#fdf9e8"; - -// Backgrounds / Pear / Pear-14 (Xxlight) -exports.pear14 = "#fdfcf7"; - -// Transparents / Pear / 00-Pear-Light (70%) -exports.pearLight = "rgba(227,217,28,0.7)"; - -// Transparents / Pear / 01-Pear-Xlight (40%) -exports.pearXLight = "rgba(227,217,28,0.4)"; - -// Transparents / Pear / 02-Pear-Xxlight (10%) -exports.pearXXLight = "rgba(227,217,28,0.1)"; - -// Backgrounds / Pink / Pink-00 (Xxdark) -exports.pink00 = "#1e000a"; - -// Backgrounds / Pink / Pink-01 -exports.pink01 = "#280a14"; - -// Backgrounds / Pink / Pink-02 -exports.pink02 = "#3f1221"; - -// Backgrounds / Pink / Pink-03 (Xdark) -exports.pink03 = "#58192f"; - -// Backgrounds / Pink / Pink-04 -exports.pink04 = "#75223f"; - -// Backgrounds / Pink / Pink-05 (Dark) -exports.pink05 = "#942b50"; - -// Backgrounds / Pink / Pink-06 -exports.pink06 = "#b53461"; - -// Backgrounds / Pink / -Pink-07 (Base) -exports.pink07 = "#d63d73"; - -// Backgrounds / Pink / Pink-08 -exports.pink08 = "#e06187"; - -// Backgrounds / Pink / Pink-09 (Light) -exports.pink09 = "#e87f9c"; - -// Backgrounds / Pink / Pink-10 -exports.pink10 = "#f09cb1"; - -// Backgrounds / Pink / Pink-11 (Xlight) -exports.pink11 = "#f5b8c6"; - -// Backgrounds / Pink / Pink-12 -exports.pink12 = "#f9d1da"; - -// Backgrounds / Pink / Pink-13 -exports.pink13 = "#fce9ee"; - -// Backgrounds / Pink / Pink-14 (Xxlight) -exports.pink14 = "#fdf7f9"; - -// Transparents / Pink / 00-Pink-Light (70%) -exports.pinkLight = "rgba(214,61,115,0.7)"; - -// Transparents / Pink / 01-Pink-Xlight (40%) -exports.pinkXLight = "rgba(214,61,115,0.4)"; - -// Transparents / Pink / 02-Pink-Xxlight (10%) -exports.pinkXXLight = "rgba(214,61,115,0.1)"; - -// Backgrounds / Red / Red-00 (Xxdark) -exports.red00 = "#1e0002"; - -// Backgrounds / Red / Red-01 -exports.red01 = "#2a0805"; - -// Backgrounds / Red / Red-02 -exports.red02 = "#420e0b"; - -// Backgrounds / Red / Red-03 (Xdark) -exports.red03 = "#5d110f"; - -// Backgrounds / Red / Red-04 -exports.red04 = "#7d1311"; - -// Backgrounds / Red / Red-05 (Dark) -exports.red05 = "#9e1313"; - -// Backgrounds / Red / Red-06 -exports.red06 = "#c11014"; - -// Backgrounds / Red / -Red-07 (Base) -exports.red07 = "#e50914"; - -// Backgrounds / Red / Red-08 -exports.red08 = "#f04c38"; - -// Backgrounds / Red / Red-09 (Light) -exports.red09 = "#f9715a"; - -// Backgrounds / Red / Red-10 -exports.red10 = "#ff927d"; - -// Backgrounds / Red / Red-11 (Xlight) -exports.red11 = "#ffb2a2"; - -// Backgrounds / Red / Red-12 -exports.red12 = "#ffcdc3"; - -// Backgrounds / Red / Red-13 -exports.red13 = "#ffe8e4"; - -// Backgrounds / Red / Red-14 (Xxlight) -exports.red14 = "#fdf7f8"; - -// Transparents / Red / 00-Red-Light (70%) -exports.redLight = "rgba(229,9,20,0.7)"; - -// Transparents / Red / 01-Red-Xlight (40%) -exports.redXLight = "rgba(229,9,20,0.4)"; - -// Transparents / Red / 02-Red-Xxlight (10%) -exports.redXXLight = "rgba(229,9,20,0.1)"; - -// Backgrounds / Violet / Violet-00 (Xxdark) -exports.violet00 = "#08001e"; - -// Backgrounds / Violet / Violet-01 -exports.violet01 = "#110b2b"; - -// Backgrounds / Violet / Violet-02 -exports.violet02 = "#1d1643"; - -// Backgrounds / Violet / Violet-03 (Xdark) -exports.violet03 = "#2a1f5d"; - -// Backgrounds / Violet / Violet-04 -exports.violet04 = "#3b297c"; - -// Backgrounds / Violet / Violet-05 (Dark) -exports.violet05 = "#4c349d"; - -// Backgrounds / Violet / Violet-06 -exports.violet06 = "#5e3fbf"; - -// Backgrounds / Violet / -Violet-07 (Base) -exports.violet07 = "#714be2"; - -// Backgrounds / Violet / Violet-08 -exports.violet08 = "#8c66e7"; - -// Backgrounds / Violet / Violet-09 (Light) -exports.violet09 = "#a481ec"; - -// Backgrounds / Violet / Violet-10 -exports.violet10 = "#ba9cf1"; - -// Backgrounds / Violet / Violet-11 (Xlight) -exports.violet11 = "#ceb8f5"; - -// Backgrounds / Violet / Violet-12 -exports.violet12 = "#dfd0f8"; - -// Backgrounds / Violet / Violet-13 -exports.violet13 = "#f0e9fb"; - -// Backgrounds / Violet / Violet-14 (Xxlight) -exports.violet14 = "#f9f7fd"; - -// Transparents / Violet / 00-Violet-Light (70%) -exports.violetLight = "rgba(113,75,226,0.7)"; - -// Transparents / Violet / 01-Violet-Xlight (40%) -exports.violetXLight = "rgba(113,75,226,0.4)"; - -// Transparents / Violet / 02-Violet-Xxlight (10%) -exports.violetXXLight = "rgba(113,75,226,0.1)"; - -// Backgrounds / White -exports.white = "#FFFFFF"; - -// Transparents / White / 00-White-Light (70%) -exports.whiteLight = "rgba(255,255,255,0.7)"; - -// Transparents / White / 01-White-Xlight (40%) -exports.whiteXLight = "rgba(255,255,255,0.4)"; - -// Transparents / White / 02-White-Xxlight (10%) -exports.whiteXXLight = "rgba(255,255,255,0.1)"; - -// Backgrounds / Yellow / Yellow-00 (Xxdark) -exports.yellow00 = "#1e1400"; - -// Backgrounds / Yellow / Yellow-01 -exports.yellow01 = "#2c1e06"; - -// Backgrounds / Yellow / Yellow-02 -exports.yellow02 = "#47300d"; - -// Backgrounds / Yellow / Yellow-03 (Xdark) -exports.yellow03 = "#64430f"; - -// Backgrounds / Yellow / Yellow-04 -exports.yellow04 = "#875a11"; - -// Backgrounds / Yellow / Yellow-05 (Dark) -exports.yellow05 = "#ac7210"; - -// Backgrounds / Yellow / Yellow-06 -exports.yellow06 = "#d38a0c"; - -// Backgrounds / Yellow / -Yellow-07 (Base) -exports.yellow07 = "#fba404"; - -// Backgrounds / Yellow / Yellow-08 -exports.yellow08 = "#ffb141"; - -// Backgrounds / Yellow / Yellow-09 (Light) -exports.yellow09 = "#ffbf66"; - -// Backgrounds / Yellow / Yellow-10 -exports.yellow10 = "#ffcd89"; - -// Backgrounds / Yellow / Yellow-11 (Xlight) -exports.yellow11 = "#ffdbaa"; - -// Backgrounds / Yellow / Yellow-12 -exports.yellow12 = "#ffe7c8"; - -// Backgrounds / Yellow / Yellow-13 -exports.yellow13 = "#fff4e6"; - -// Backgrounds / Yellow / Yellow-14 (Xxlight) -exports.yellow14 = "#fdfbf7"; - -// Transparents / Yellow / 00-Yellow-Light (70%) -exports.yellowLight = "rgba(251,164,4,0.7)"; - -// Transparents / Yellow / 01-Yellow-Xlight (40%) -exports.yellowXLight = "rgba(251,164,4,0.4)"; - -// Transparents / Yellow / 02-Yellow-Xxlight (10%) -exports.yellowXXLight = "rgba(251,164,4,0.1)"; diff --git a/ui/src/theme/index.js b/ui/src/theme/index.js deleted file mode 100644 index 813cc59b2..000000000 --- a/ui/src/theme/index.js +++ /dev/null @@ -1,2 +0,0 @@ -export { Provider as ThemeProvider } from "./provider"; -export { default as theme } from "./theme"; diff --git a/ui/src/theme/provider.jsx b/ui/src/theme/provider.jsx deleted file mode 100644 index 9c38f404d..000000000 --- a/ui/src/theme/provider.jsx +++ /dev/null @@ -1,12 +0,0 @@ -import React from "react"; -import { MuiThemeProvider } from "@material-ui/core/styles"; - -import { theme } from "./"; - -export const Provider = ({ children, ...rest }) => { - return ( - - {children} - - ); -}; diff --git a/ui/src/theme/theme.js b/ui/src/theme/theme.js deleted file mode 100644 index c4161f752..000000000 --- a/ui/src/theme/theme.js +++ /dev/null @@ -1,649 +0,0 @@ -import { unstable_createMuiStrictModeTheme as createMuiTheme } from "@material-ui/core/styles"; -import { - borders, - colors, - spacings, - breakpoints, - fontSizes, - lineHeights, - fontWeights, - fontFamily, -} from "./variables"; - -function toNumber(v) { - return parseFloat(v); -} - -const spacingFn = (factor) => { - const unit = toNumber(spacings.space0); - - // Support theme.spacing('space3') - if (typeof factor === "string") { - return toNumber(spacings[factor]); - } - - if (typeof factor === "number") { - // Support theme.spacing(2) - return unit * factor; - } - - return unit; -}; - -const colorFn = (color) => colors[color]; - -const baseThemeOptions = { - palette: { - type: "light", - primary: { - main: colors.brand, - light: colors.bgBrandLight, - dark: colors.bgBrandDark, - contrastText: colors.white, - }, - secondary: { - main: colors.white, - light: colors.bgBrandLight, - dark: colors.bgBrandDark, - contrastText: colors.black, - }, - text: { - primary: colors.black, - secondary: colors.blackXLight, - disabled: colors.blackXXLight, - hint: colors.blackXXLight, - }, - grey: { - 50: colors.gray14, - 100: colors.gray13, - 200: colors.gray12, - 300: colors.gray11, - 400: colors.gray10, - 500: colors.gray09, - 600: colors.gray07, - 700: colors.gray06, - 800: colors.gray04, - 900: colors.gray02, - A100: colors.gray12, - A200: colors.gray08, - A400: colors.gray03, - A700: colors.gray06, - }, - error: { - main: colors.failure, - light: colors.failureLight, - dark: colors.failureDark, - contrastText: colors.white, - }, - background: { - paper: colors.white, - default: colors.gray14, - }, - divider: colors.blackXXLight, - }, - typography: { - fontFamily: fontFamily.fontFamilySans, - fontSize: toNumber(fontSizes.fontSize2), - htmlFontSize: toNumber(fontSizes.fontSize2), - fontWeightLight: fontWeights.fontWeight0, - fontWeightRegular: fontWeights.fontWeight0, - fontWeightMedium: fontWeights.fontWeight1, - fontWeightBold: fontWeights.fontWeight2, - h1: { - fontSize: fontSizes.fontSize10, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - h2: { - fontSize: fontSizes.fontSize9, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - h3: { - fontSize: fontSizes.fontSize8, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - h4: { - fontSize: fontSizes.fontSize7, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - h5: { - fontSize: fontSizes.fontSize6, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - h6: { - fontSize: fontSizes.fontSize5, - lineHeight: lineHeights.lineHeight0, - fontWeight: fontWeights.fontWeight2, - }, - body1: { - fontSize: fontSizes.fontSize4, - lineHeight: lineHeights.lineHeight1, - }, - body2: { - fontSize: fontSizes.fontSize3, - lineHeight: lineHeights.lineHeight1, - }, - caption: { - fontSize: fontSizes.fontSize2, - lineHeight: lineHeights.lineHeight1, - fontWeight: fontWeights.fontWeight1, - }, - button: { - fontSize: fontSizes.fontSize2, - fontWeight: fontWeights.fontWeight1, - }, - }, - breakpoints: { - // this looks wrong, but it's not - // material's breakpoints are a range, so the below basically says - // xs is from 0 to breakpoints.large - values: { - xs: 0, - sm: toNumber(breakpoints.xsmall), - md: toNumber(breakpoints.small), - lg: toNumber(breakpoints.medium), - xl: toNumber(breakpoints.large), - }, - }, - shape: { - borderRadius: toNumber(borders.radiusSmall), - }, - color: colorFn, - spacing: spacingFn, - props: { - MuiButtonBase: { - disableRipple: true, - }, - MuiFormControl: { - variant: "outlined", - }, - MuiMenu: { - transitionDuration: 0, - elevation: 3, - }, - MuiTextField: { - variant: "outlined", - InputProps: { - labelWidth: 0, - }, - }, - MuiInputLabel: { - shrink: true, - disableAnimation: true, - }, - MuiOutlinedInput: { - notched: false, - }, - MuiPaper: { - elevation: 3, - }, - MuiPopover: { - elevation: 3, - }, - }, -}; - -const baseTheme = createMuiTheme(baseThemeOptions); - -// Keep overrides in separate object so we can reference attributes of baseTheme. -const overrides = { - overrides: { - MuiSvgIcon: { - root: { - fontSize: fontSizes.fontSize6, - }, - fontSizeSmall: { - fontSize: fontSizes.fontSize1, - }, - }, - MuiAvatar: { - root: { - fontSize: "2.4rem", - }, - }, - MuiButton: { - root: { - textDecoration: "none !important", - textTransform: "none", - paddingTop: baseTheme.spacing("space1"), - paddingBottom: baseTheme.spacing("space1"), - paddingLeft: baseTheme.spacing("space2"), - paddingRight: baseTheme.spacing("space2"), - border: "1px solid transparent", - transition: "none", - "&$focusVisible": { - boxShadow: "none", - position: "relative", - "&:after": { - content: '""', - display: "block", - position: "absolute", - width: "calc(100% + 6px)", - height: "calc(100% + 6px)", - borderRadius: borders.radiusSmall, - border: borders.blueRegular2px, - top: -5, - left: -5, - }, - }, - }, - text: { - paddingTop: baseTheme.spacing("space1"), - paddingBottom: baseTheme.spacing("space1"), - paddingLeft: baseTheme.spacing("space2"), - paddingRight: baseTheme.spacing("space2"), - "&:hover": { - backgroundColor: baseTheme.palette.grey.A100, - }, - }, - textSizeSmall: { - fontSize: "0.8125rem", - }, - outlined: { - paddingTop: baseTheme.spacing("space1"), - paddingBottom: baseTheme.spacing("space1"), - paddingLeft: baseTheme.spacing("space2"), - paddingRight: baseTheme.spacing("space2"), - }, - outlinedPrimary: { - border: borders.blackRegular1px, - }, - outlinedSecondary: { - border: borders.blackLight1px, - color: baseTheme.palette.secondary.contrastText, - "&:hover": { - border: borders.blackLight1px + " !important", - backgroundColor: baseTheme.palette.grey.A100, - }, - }, - contained: { - "&:disabled": { - backgroundColor: colors.bgBrandLight, - color: baseTheme.palette.common.white, - }, - boxShadow: "none !important", - "&:active": { - boxShadow: "none !important", - }, - }, - containedPrimary: { - color: `${colors.white} !important`, - }, - }, - MuiCheckbox: { - root: { - fontSize: fontSizes.fontSize4, - padding: baseTheme.spacing("space1"), - }, - colorSecondary: { - color: colors.blackLight, - "&$checked": { - color: baseTheme.palette.primary.main, - }, - "&$disabled": { - color: colors.blackXLight, - }, - }, - }, - MuiChip: { - root: { - borderRadius: borders.radiusSmall, - height: 24, - fontSize: fontSizes.fontSize2, - fontWeight: fontWeights.fontWeight1, - }, - label: { - paddingLeft: baseTheme.spacing("space1"), - paddingRight: baseTheme.spacing("space1"), - }, - sizeSmall: { - fontSize: fontSizes.fontSize0, - height: 20, - }, - deleteIcon: { - height: "100%", - padding: 3, - margin: 0, - backgroundColor: "rgba(5, 5, 5, 0.1)", - borderRadius: `0 ${borders.radiusSmall} ${borders.radiusSmall} 0`, - width: 24, - boxSizing: "border-box", - textAlign: "center", - fill: baseTheme.palette.common.white, - borderLeftWidth: 1, - borderLeftStyle: "solid", - borderLeftColor: "rgba(5, 5, 5, 0.1)", - }, - deleteIconColorPrimary: { - color: colors.white, - }, - colorSecondary: { - color: colors.white, - backgroundColor: colors.lime07, - }, - }, - MuiRadio: { - root: { - padding: baseTheme.spacing("space1"), - }, - }, - MuiInputBase: { - root: { - fontSize: fontSizes.fontSize2, - }, - input: { - "&[type=number]::-webkit-inner-spin-button ": { - appearance: "none", - margin: 0, - }, - }, - }, - MuiOutlinedInput: { - notchedOutline: { - borderColor: colors.blackXXLight, - top: 0, - "& legend": { - // force-disable notched legends - display: "none", - }, - }, - root: { - "&:hover $notchedOutline": { - borderColor: colors.blackXXLight, - }, - backgroundColor: baseTheme.palette.background.paper, - }, - input: { - padding: `${baseTheme.spacing("space2")}px ${baseTheme.spacing( - "space2" - )}px`, - }, - }, - MuiFormControl: { - root: { - display: "block", - }, - }, - MuiFormControlLabel: { - label: { - fontSize: fontSizes.fontSize3, - lineHeight: lineHeights.lineHeight1, - }, - }, - MuiInputLabel: { - root: { - display: "none", - pointerEvents: "none", - color: baseTheme.palette.text.primary, - }, - outlined: { - "&$shrink": { - display: "block", - transform: "none", - position: "relative", - fontWeight: fontWeights.fontWeight1, - fontSize: fontSizes.fontSize2, - paddingLeft: 0, - paddingBottom: 8, - }, - "&$focused": { - // focused attr under MuiInputLabel does not work - color: baseTheme.palette.text.primary, - }, - }, - }, - MuiFormHelperText: { - contained: { - margin: 0, - marginTop: baseTheme.spacing("space1"), - }, - }, - MuiSelect: { - icon: { - fontSize: fontSizes.fontSize5, - marginTop: 3, - color: baseTheme.palette.text.primary, - }, - selectMenu: {}, - }, - MuiPickersClockNumber: { - clockNumber: { - top: 6, - }, - }, - MuiMenuItem: { - root: { - color: baseTheme.palette.text.primary, - fontSize: fontSizes.fontSize1, - "&:hover": { - backgroundColor: baseTheme.palette.grey[100], - }, - "&:focus": { - backgroundColor: baseTheme.palette.grey[100], - }, - "&$selected": { - backgroundColor: baseTheme.palette.grey[200], - "&:hover": { - backgroundColor: baseTheme.palette.grey[200], - }, - "&:focus": { - backgroundColor: baseTheme.palette.grey[200], - }, - }, - }, - dense: { - paddingTop: 0, - paddingBottom: 0, - }, - }, - MuiSnackbarContent: { - root: { - backgroundColor: baseTheme.palette.primary.main, - paddingTop: 0, - paddingBottom: 0, - marginRight: baseTheme.spacing("space3"), - marginLeft: baseTheme.spacing("space3"), - borderRadius: baseTheme.shape.borderRadius, - boxShadow: "none", - }, - action: { - "& button": { - color: baseTheme.palette.common.white, - }, - }, - }, - MuiSwitch: { - root: { - padding: 0, - height: 20, - width: 40, - "&:hover": { - "& > $track": { - backgroundColor: colors.gray05, - }, - "& > $checked + $track": { - backgroundColor: colors.brand05, - }, - }, - }, - thumb: { - borderRadius: 8, - width: 16, - height: 16, - boxShadow: - "0px 1px 2px 0px rgba(0, 0, 0, 0.4), 0px 0px 1px 0px rgba(0, 0, 0, 0.4)", - }, - track: { - backgroundColor: colors.gray07, - borderRadius: 10, - opacity: 1, - }, - switchBase: { - padding: 2, - "&$checked": { - transform: "translateX(100%)", - "& + $track": { - opacity: 1, - }, - }, - }, - colorPrimary: { - "&$checked": { - color: baseTheme.palette.common.white, - }, - "&$checked + $track": { - backgroundColor: baseTheme.palette.primary.main, - }, - }, - }, - MuiTab: { - root: { - textTransform: "none", - "&$selected": { - color: "black", - }, - }, - }, - MuiTabs: { - indicator: { - height: 4, - }, - root: { - minHeight: 0, - }, - }, - MuiListItemText: { - secondary: { - fontSize: fontSizes.fontSize2, - }, - primary: { - fontSize: fontSizes.fontSize2, - }, - }, - MuiListSubheader: { - root: { - fontSize: fontSizes.fontSize2, - lineHeight: lineHeights.lineHeight1, - paddingTop: baseTheme.spacing("space0"), - paddingBottom: baseTheme.spacing("space0"), - }, - }, - MuiTableCell: { - root: { - fontSize: fontSizes.fontSize2, - }, - head: { - //border: 'none', - fontWeight: fontWeights.fontWeight1, - color: colors.gray05, - }, - }, - MuiTableRow: { - root: { - "&.Mui-selected:hover": { - backgroundColor: colors.gray12, - }, - "&.Mui-selected": { - backgroundColor: `${colors.gray12} !important`, - }, - }, - }, - MuiDialogTitle: { - root: { - backgroundColor: baseTheme.palette.grey[50], - padding: `${baseTheme.spacing("space5")}px ${baseTheme.spacing( - "space4" - )}px`, - borderBottom: `1px solid ${colors.blackXXLight}`, - }, - }, - MuiDialogContent: { - root: { - padding: baseTheme.spacing("space5"), - }, - }, - MuiDialogActions: { - root: { - backgroundColor: baseTheme.palette.grey[50], - padding: `${baseTheme.spacing("space3")}px ${baseTheme.spacing( - "space5" - )}px`, - borderTop: `1px solid ${colors.blackXXLight}`, - margin: 0, - - "button + button": { - marginLeft: baseTheme.spacing("space1"), - }, - }, - }, - MuiToolbar: { - root: { - gap: 8, - }, - }, - MuiAppBar: { - colorPrimary: { - backgroundColor: colors.white, - color: colors.gray00, - }, - root: { - zIndex: 999, - paddingLeft: 20, - paddingRight: 20, - boxShadow: "0 4px 8px 0 rgb(0 0 0 / 10%), 0 0 2px 0 rgb(0 0 0 / 10%)", - height: 80, - "& .MuiButton-label": { - color: colors.black, - }, - "& .MuiLink-underlineHover:hover": { - textDecoration: "none !important", - }, - }, - }, - MuiAutocomplete: { - input: { - padding: "12px 16px !important", - }, - paper: { - fontSize: fontSizes.fontSize2, - }, - popupIndicator: { - fontSize: fontSizes.fontSize5, - color: baseTheme.palette.text.primary, - }, - clearIndicator: { - fontSize: fontSizes.fontSize5, - }, - inputRoot: { - padding: "0px !important", - }, - listbox: { - backgroundColor: baseTheme.palette.common.white, - }, - tag: { - "&:first-child": { - marginLeft: 8, - }, - }, - }, - MuiTablePagination: { - select: { - paddingRight: "32px !important", - }, - selectRoot: { - top: 1, - }, - }, - }, -}; - -const finalTheme = createMuiTheme({ - ...baseTheme, - ...overrides, -}); - -export default finalTheme; diff --git a/ui/src/theme/variables.js b/ui/src/theme/variables.js deleted file mode 100644 index a1b98e927..000000000 --- a/ui/src/theme/variables.js +++ /dev/null @@ -1,63 +0,0 @@ -export { colorOverrides as colors } from "./colorOverrides"; - -export const fontSizes = { - fontSize0: "10px", - fontSize1: "12px", - fontSize2: "13px", - fontSize3: "14px", - fontSize4: "16px", - fontSize5: "18px", - fontSize6: "20px", - fontSize7: "24px", - fontSize8: "28px", - fontSize9: "32px", - fontSize10: "40px", - fontSize11: "52px", - fontSize12: "68px", - fontSize13: "88px", -}; -export const lineHeights = { - lineHeight0: 1.25, - lineHeight1: 1.5, -}; - -export const fontWeights = { - fontWeight0: 400, - fontWeight1: 600, - fontWeight2: 700, - fontWeight3: 800, -}; - -export const fontFamily = { - fontFamilySans: - '-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"', - fontFamilyMono: "monospace", -}; - -export const spacings = { - space0: "4px", - space1: "8px", - space2: "12px", - space3: "16px", - space4: "20px", - space5: "24px", - space6: "32px", - space7: "48px", - space8: "80px", - space9: "144px", -}; - -export const breakpoints = { - xsmall: "599px", - small: "1023px", - medium: "1439px", - large: "1919px", - xlarge: "3840px", -}; - -export const borders = { - radiusSmall: "4px", - blueRegular2px: "2px solid rgba(31,131,219,1)", - blackRegular1px: "1px solid rgba(5,5,5,1)", - blackLight1px: "1px solid rgba(5,5,5,0.7)", -}; diff --git a/ui/src/utils/constants.js b/ui/src/utils/constants.js deleted file mode 100644 index eb3e70ffa..000000000 --- a/ui/src/utils/constants.js +++ /dev/null @@ -1,31 +0,0 @@ -export const workflowStatuses = [ - "RUNNING", - "COMPLETED", - "FAILED", - "TIMED_OUT", - "TERMINATED", - "PAUSED", -]; - -export const taskTypes = [ - "ARCHER", - "DECISION", - "DO_WHILE", - "DYNAMIC", - "DYNIMO", - "EAAS", - "EVENT", - "EXCLUSIVE_JOIN", - "FORK_JOIN", - "FORK_JOIN_DYNAMIC", - "HTTP", - "JOIN", - "JSON_JQ_TRANSFORM", - "LAMBDA", - "SIMPLE", - "SUB_WORKFLOW", - "TERMINATE", - "TITUS", - "TITUS_TASK", - "WAIT", -]; diff --git a/ui/src/utils/helpers.js b/ui/src/utils/helpers.js deleted file mode 100644 index 5682a52de..000000000 --- a/ui/src/utils/helpers.js +++ /dev/null @@ -1,74 +0,0 @@ -import { format, intervalToDuration } from "date-fns"; -import _ from "lodash"; - -export function timestampRenderer(date) { - return !_.isNil(date) && format(new Date(date), "yyyy-MM-dd HH:mm:ss"); // could be string or number. -} - -export function durationRenderer(durationMs) { - const duration = intervalToDuration({ start: 0, end: durationMs }); - if (durationMs > 5000) { - return `${duration.minutes}m${duration.seconds}s`; - } else { - return `${durationMs}ms`; - } - - //return !isNaN(durationMs) && (durationMs > 0? formatDuration({seconds: durationMs/1000}): '0.0 seconds'); -} - -export function taskHasResult(task) { - const keys = Object.keys(task); - return !(keys.length === 1 && keys[0] === "workflowTask"); -} - -export function astToQuery(node) { - // leaf node - if (node.operator !== undefined) { - return node.field + node.operator + node.value; - } else if (node.combinator !== undefined) { - const clauses = node.rules - .filter((rule) => !(rule.rules && rule.rules.length === 0)) // Ignore empty groups - .map((rule) => astToQuery(rule)); - const wrapper = clauses.length > 1; - - let combinator = node.combinator.toUpperCase(); - - return `${wrapper ? "(" : ""}${clauses.join(` ${combinator} `)}${ - wrapper ? ")" : "" - }`; - } else { - return ""; - } -} - -export function isFailedTask(status) { - return ( - status === "FAILED" || - status === "FAILED_WITH_TERMINAL_ERROR" || - status === "TIMED_OUT" || - status === "CANCELED" - ); -} - -export function defaultCompare(x, y) { - //INFO: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort - //ECMA specification: http://www.ecma-international.org/ecma-262/6.0/#sec-sortcompare - - if (x === undefined && y === undefined) return 0; - - if (x === undefined) return 1; - - if (y === undefined) return -1; - - if (x < y) return -1; - - if (x > y) return 1; - - return 0; -} - -export function immutableReplaceAt(array, index, value) { - const ret = array.slice(0); - ret[index] = value; - return ret; -} diff --git a/ui/src/utils/localstorage.ts b/ui/src/utils/localstorage.ts deleted file mode 100644 index 3d7e78559..000000000 --- a/ui/src/utils/localstorage.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { useState } from "react"; - -// If key is null/undefined, hook behaves exactly like useState -export const useLocalStorage = (key: string, initialValue: any) => { - const initialString = JSON.stringify(initialValue); - - const [storedValue, setStoredValue] = useState(() => { - if (key) { - const item = window.localStorage.getItem(key); - return item ? JSON.parse(item) : initialValue; - } else { - return initialValue; - } - }); - - const setValue = (value: any) => { - // Allow value to be a function so we have same API as useState - const valueToStore = value instanceof Function ? value(storedValue) : value; - - // Save state - setStoredValue(valueToStore); - - if (key) { - const stringToStore = JSON.stringify(valueToStore); - if (stringToStore === initialString) { - window.localStorage.removeItem(key); - } else { - window.localStorage.setItem(key, stringToStore); - } - } - }; - - return [storedValue, setValue] as const; -}; diff --git a/ui/test-karbon.sh b/ui/test-karbon.sh deleted file mode 100755 index e69de29bb..000000000 diff --git a/ui/tsconfig.json b/ui/tsconfig.json deleted file mode 100644 index 9d379a3c4..000000000 --- a/ui/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "noFallthroughCasesInSwitch": true, - "module": "esnext", - "moduleResolution": "node", - "resolveJsonModule": true, - "isolatedModules": true, - "noEmit": true, - "jsx": "react-jsx" - }, - "include": ["src"] -} diff --git a/ui/yarn.lock b/ui/yarn.lock deleted file mode 100644 index 3db96014c..000000000 --- a/ui/yarn.lock +++ /dev/null @@ -1,12090 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@ampproject/remapping@^2.1.0": - version "2.1.2" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.1.2.tgz#4edca94973ded9630d20101cd8559cedb8d8bd34" - integrity sha512-hoyByceqwKirw7w3Z7gnIIZC3Wx3J484Y3L/cMpXFbr7d9ZQj2mODrirNzcJa+SM3UlpWXYvKV4RlRpFXlWgXg== - dependencies: - "@jridgewell/trace-mapping" "^0.3.0" - -"@apideck/better-ajv-errors@^0.3.1": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.3.tgz#ab0b1e981e1749bf59736cf7ebe25cfc9f949c15" - integrity sha512-9o+HO2MbJhJHjDYZaDxJmSDckvDpiuItEsrIShV0DXeCshXWRHhqYyU/PKHMkuClOmFnZhRd6wzv4vpDu/dRKg== - dependencies: - json-schema "^0.4.0" - jsonpointer "^5.0.0" - leven "^3.1.0" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.8.3": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" - integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== - dependencies: - "@babel/highlight" "^7.16.7" - -"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.16.8", "@babel/compat-data@^7.17.0", "@babel/compat-data@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.17.7.tgz#078d8b833fbbcc95286613be8c716cef2b519fa2" - integrity sha512-p8pdE6j0a29TNGebNm7NzYZWB3xVZJBZ7XGs42uAKzQo8VQ3F0By/cQCtUEABwIqw5zo6WA4NbmxsfzADzMKnQ== - -"@babel/compat-data@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.17.10.tgz#711dc726a492dfc8be8220028b1b92482362baab" - integrity sha512-GZt/TCsG70Ms19gfZO1tM4CVnXsPgEPBCpJu+Qz3L0LUDsY5nZqFZglIoPC1kIYOtNBZlrnFT+klg12vFGZXrw== - -"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.16.0": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.17.9.tgz#6bae81a06d95f4d0dec5bb9d74bbc1f58babdcfe" - integrity sha512-5ug+SfZCpDAkVp9SFIZAzlW18rlzsOcJGaetCjkySnrXXDUw9AR8cDUm1iByTmdWM6yxX6/zycaV76w3YTF2gw== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.17.9" - "@babel/helper-compilation-targets" "^7.17.7" - "@babel/helper-module-transforms" "^7.17.7" - "@babel/helpers" "^7.17.9" - "@babel/parser" "^7.17.9" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.17.9" - "@babel/types" "^7.17.0" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/core@^7.11.1", "@babel/core@^7.7.2", "@babel/core@^7.8.0": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.17.10.tgz#74ef0fbf56b7dfc3f198fc2d927f4f03e12f4b05" - integrity sha512-liKoppandF3ZcBnIYFjfSDHZLKdLHGJRkoWtG8zQyGJBQfIYobpnVGI5+pLBNtS6psFLDzyq8+h5HiVljW9PNA== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.17.10" - "@babel/helper-compilation-targets" "^7.17.10" - "@babel/helper-module-transforms" "^7.17.7" - "@babel/helpers" "^7.17.9" - "@babel/parser" "^7.17.10" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.17.10" - "@babel/types" "^7.17.10" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/eslint-parser@^7.16.3": - version "7.17.0" - resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.17.0.tgz#eabb24ad9f0afa80e5849f8240d0e5facc2d90d6" - integrity sha512-PUEJ7ZBXbRkbq3qqM/jZ2nIuakUBqCYc7Qf52Lj7dlZ6zERnqisdHioL0l4wwQZnmskMeasqUNzLBFKs3nylXA== - dependencies: - eslint-scope "^5.1.1" - eslint-visitor-keys "^2.1.0" - semver "^6.3.0" - -"@babel/generator@^7.17.10", "@babel/generator@^7.7.2": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.17.10.tgz#c281fa35b0c349bbe9d02916f4ae08fc85ed7189" - integrity sha512-46MJZZo9y3o4kmhBVc7zW7i8dtR1oIK/sdO5NcfcZRhTGYi+KKJRtHNgsU6c4VUcJmUNV/LQdebD/9Dlv4K+Tg== - dependencies: - "@babel/types" "^7.17.10" - "@jridgewell/gen-mapping" "^0.1.0" - jsesc "^2.5.1" - -"@babel/generator@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.17.9.tgz#f4af9fd38fa8de143c29fce3f71852406fc1e2fc" - integrity sha512-rAdDousTwxbIxbz5I7GEQ3lUip+xVCXooZNbsydCWs3xA7ZsYOv+CFRdzGxRX78BmQHu9B1Eso59AOZQOJDEdQ== - dependencies: - "@babel/types" "^7.17.0" - jsesc "^2.5.1" - source-map "^0.5.0" - -"@babel/helper-annotate-as-pure@^7.16.0", "@babel/helper-annotate-as-pure@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862" - integrity sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz#38d138561ea207f0f69eb1626a418e4f7e6a580b" - integrity sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.16.7" - "@babel/types" "^7.16.7" - -"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.16.7", "@babel/helper-compilation-targets@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.7.tgz#a3c2924f5e5f0379b356d4cfb313d1414dc30e46" - integrity sha512-UFzlz2jjd8kroj0hmCFV5zr+tQPi1dpC2cRsDV/3IEW8bJfCPrPpmcSN6ZS8RqIq4LXcmpipCQFPddyFA5Yc7w== - dependencies: - "@babel/compat-data" "^7.17.7" - "@babel/helper-validator-option" "^7.16.7" - browserslist "^4.17.5" - semver "^6.3.0" - -"@babel/helper-compilation-targets@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.10.tgz#09c63106d47af93cf31803db6bc49fef354e2ebe" - integrity sha512-gh3RxjWbauw/dFiU/7whjd0qN9K6nPJMqe6+Er7rOavFh0CQUSwhAE3IcTho2rywPJFxej6TUUHDkWcYI6gGqQ== - dependencies: - "@babel/compat-data" "^7.17.10" - "@babel/helper-validator-option" "^7.16.7" - browserslist "^4.20.2" - semver "^6.3.0" - -"@babel/helper-create-class-features-plugin@^7.16.10", "@babel/helper-create-class-features-plugin@^7.16.7", "@babel/helper-create-class-features-plugin@^7.17.6", "@babel/helper-create-class-features-plugin@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.17.9.tgz#71835d7fb9f38bd9f1378e40a4c0902fdc2ea49d" - integrity sha512-kUjip3gruz6AJKOq5i3nC6CoCEEF/oHH3cp6tOZhB+IyyyPyW0g1Gfsxn3mkk6S08pIA2y8GQh609v9G/5sHVQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.17.9" - "@babel/helper-member-expression-to-functions" "^7.17.7" - "@babel/helper-optimise-call-expression" "^7.16.7" - "@babel/helper-replace-supers" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - -"@babel/helper-create-regexp-features-plugin@^7.16.7", "@babel/helper-create-regexp-features-plugin@^7.17.0": - version "7.17.0" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.17.0.tgz#1dcc7d40ba0c6b6b25618997c5dbfd310f186fe1" - integrity sha512-awO2So99wG6KnlE+TPs6rn83gCz5WlEePJDTnLEqbchMVrBeAujURVphRdigsk094VhvZehFoNOihSlcBjwsXA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - regexpu-core "^5.0.1" - -"@babel/helper-define-polyfill-provider@^0.3.1": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz#52411b445bdb2e676869e5a74960d2d3826d2665" - integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA== - dependencies: - "@babel/helper-compilation-targets" "^7.13.0" - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/traverse" "^7.13.0" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - -"@babel/helper-environment-visitor@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz#ff484094a839bde9d89cd63cba017d7aae80ecd7" - integrity sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-explode-assignable-expression@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a" - integrity sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-function-name@^7.16.7", "@babel/helper-function-name@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12" - integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg== - dependencies: - "@babel/template" "^7.16.7" - "@babel/types" "^7.17.0" - -"@babel/helper-hoist-variables@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" - integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-member-expression-to-functions@^7.16.7", "@babel/helper-member-expression-to-functions@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.17.7.tgz#a34013b57d8542a8c4ff8ba3f747c02452a4d8c4" - integrity sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw== - dependencies: - "@babel/types" "^7.17.0" - -"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.0", "@babel/helper-module-imports@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" - integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-module-transforms@^7.16.7", "@babel/helper-module-transforms@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.17.7.tgz#3943c7f777139e7954a5355c815263741a9c1cbd" - integrity sha512-VmZD99F3gNTYB7fJRDTi+u6l/zxY0BE6OIxPSU7a50s6ZUQkHwSDmV92FfM+oCG0pZRVojGYhkR8I0OGeCVREw== - dependencies: - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-module-imports" "^7.16.7" - "@babel/helper-simple-access" "^7.17.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/helper-validator-identifier" "^7.16.7" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.17.3" - "@babel/types" "^7.17.0" - -"@babel/helper-optimise-call-expression@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz#a34e3560605abbd31a18546bd2aad3e6d9a174f2" - integrity sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz#aa3a8ab4c3cceff8e65eb9e73d87dc4ff320b2f5" - integrity sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA== - -"@babel/helper-remap-async-to-generator@^7.16.8": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz#29ffaade68a367e2ed09c90901986918d25e57e3" - integrity sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-wrap-function" "^7.16.8" - "@babel/types" "^7.16.8" - -"@babel/helper-replace-supers@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz#e9f5f5f32ac90429c1a4bdec0f231ef0c2838ab1" - integrity sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw== - dependencies: - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-member-expression-to-functions" "^7.16.7" - "@babel/helper-optimise-call-expression" "^7.16.7" - "@babel/traverse" "^7.16.7" - "@babel/types" "^7.16.7" - -"@babel/helper-simple-access@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.17.7.tgz#aaa473de92b7987c6dfa7ce9a7d9674724823367" - integrity sha512-txyMCGroZ96i+Pxr3Je3lzEJjqwaRC9buMUgtomcrLe5Nd0+fk1h0LLA+ixUF5OW7AhHuQ7Es1WcQJZmZsz2XA== - dependencies: - "@babel/types" "^7.17.0" - -"@babel/helper-skip-transparent-expression-wrappers@^7.16.0": - version "7.16.0" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09" - integrity sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw== - dependencies: - "@babel/types" "^7.16.0" - -"@babel/helper-split-export-declaration@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" - integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw== - dependencies: - "@babel/types" "^7.16.7" - -"@babel/helper-validator-identifier@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" - integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== - -"@babel/helper-validator-option@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" - integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ== - -"@babel/helper-wrap-function@^7.16.8": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz#58afda087c4cd235de92f7ceedebca2c41274200" - integrity sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw== - dependencies: - "@babel/helper-function-name" "^7.16.7" - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.16.8" - "@babel/types" "^7.16.8" - -"@babel/helpers@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.17.9.tgz#b2af120821bfbe44f9907b1826e168e819375a1a" - integrity sha512-cPCt915ShDWUEzEp3+UNRktO2n6v49l5RSnG9M5pS24hA+2FAc5si+Pn1i4VVbQQ+jh+bIZhPFQOJOzbrOYY1Q== - dependencies: - "@babel/template" "^7.16.7" - "@babel/traverse" "^7.17.9" - "@babel/types" "^7.17.0" - -"@babel/highlight@^7.16.7": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.9.tgz#61b2ee7f32ea0454612def4fccdae0de232b73e3" - integrity sha512-J9PfEKCbFIv2X5bjTMiZu6Vf341N05QIY+d6FvVKynkG1S7G0j3I0QoRtWIrXhZ+/Nlb5Q0MzqL7TokEJ5BNHg== - dependencies: - "@babel/helper-validator-identifier" "^7.16.7" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.7", "@babel/parser@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.9.tgz#9c94189a6062f0291418ca021077983058e171ef" - integrity sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg== - -"@babel/parser@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.10.tgz#873b16db82a8909e0fbd7f115772f4b739f6ce78" - integrity sha512-n2Q6i+fnJqzOaq2VkdXxy2TCPCWQZHiCo0XqmrCvDWcZQKRyZzYi4Z0yxlBuN0w+r2ZHmre+Q087DSrw3pbJDQ== - -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz#4eda6d6c2a0aa79c70fa7b6da67763dfe2141050" - integrity sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz#cc001234dfc139ac45f6bcf801866198c8c72ff9" - integrity sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" - "@babel/plugin-proposal-optional-chaining" "^7.16.7" - -"@babel/plugin-proposal-async-generator-functions@^7.16.8": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz#3bdd1ebbe620804ea9416706cd67d60787504bc8" - integrity sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-remap-async-to-generator" "^7.16.8" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz#925cad7b3b1a2fcea7e59ecc8eb5954f961f91b0" - integrity sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-proposal-class-static-block@^7.16.7", "@babel/plugin-proposal-class-static-block@^7.17.6": - version "7.17.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.17.6.tgz#164e8fd25f0d80fa48c5a4d1438a6629325ad83c" - integrity sha512-X/tididvL2zbs7jZCeeRJ8167U/+Ac135AM6jCAx6gYXDUviZV5Ku9UDvWS2NCuWlFjIRXklYhwo6HhAC7ETnA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.17.6" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - -"@babel/plugin-proposal-decorators@^7.16.4": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.17.9.tgz#67a1653be9c77ce5b6c318aa90c8287b87831619" - integrity sha512-EfH2LZ/vPa2wuPwJ26j+kYRkaubf89UlwxKXtxqEm57HrgSEYDB8t4swFP+p8LcI9yiP9ZRJJjo/58hS6BnaDA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.17.9" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-replace-supers" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/plugin-syntax-decorators" "^7.17.0" - charcodes "^0.2.0" - -"@babel/plugin-proposal-dynamic-import@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz#c19c897eaa46b27634a00fee9fb7d829158704b2" - integrity sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - -"@babel/plugin-proposal-export-namespace-from@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz#09de09df18445a5786a305681423ae63507a6163" - integrity sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz#9732cb1d17d9a2626a08c5be25186c195b6fa6e8" - integrity sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz#be23c0ba74deec1922e639832904be0bea73cdea" - integrity sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz#141fc20b6857e59459d430c850a0011e36561d99" - integrity sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz#d6b69f4af63fb38b6ca2558442a7fb191236eba9" - integrity sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - -"@babel/plugin-proposal-object-rest-spread@^7.16.7", "@babel/plugin-proposal-object-rest-spread@^7.17.3": - version "7.17.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.17.3.tgz#d9eb649a54628a51701aef7e0ea3d17e2b9dd390" - integrity sha512-yuL5iQA/TbZn+RGAfxQXfi7CNLmKi1f8zInn4IgobuCWcAb7i+zj4TYzQ9l8cEzVyJ89PDGuqxK1xZpUDISesw== - dependencies: - "@babel/compat-data" "^7.17.0" - "@babel/helper-compilation-targets" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.16.7" - -"@babel/plugin-proposal-optional-catch-binding@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz#c623a430674ffc4ab732fd0a0ae7722b67cb74cf" - integrity sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - -"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz#7cd629564724816c0e8a969535551f943c64c39a" - integrity sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.16.11": - version "7.16.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz#e8df108288555ff259f4527dbe84813aac3a1c50" - integrity sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.16.10" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-proposal-private-property-in-object@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz#b0b8cef543c2c3d57e59e2c611994861d46a3fce" - integrity sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-create-class-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.16.7", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz#635d18eb10c6214210ffc5ff4932552de08188a2" - integrity sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-decorators@^7.17.0": - version "7.17.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.17.0.tgz#a2be3b2c9fe7d78bd4994e790896bc411e2f166d" - integrity sha512-qWe85yCXsvDEluNP0OyeQjH63DlhAR3W7K9BxxU1MvbDb48tgBG+Ao6IJJ6smPDrrVzSQZrbF6donpkFBMcs3A== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" - -"@babel/plugin-syntax-flow@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.16.7.tgz#202b147e5892b8452bbb0bb269c7ed2539ab8832" - integrity sha512-UDo3YGQO0jH6ytzVwgSLv9i/CzMcUjbKenL67dTrAZPPv6GFAtDhe6jqnvmoKzC/7htNTohhos+onPtDMqJwaQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.16.7.tgz#50b6571d13f764266a113d77c82b4a6508bbe665" - integrity sha512-Esxmk7YjA8QysKeT3VhTXvF6y77f/a91SIs4pWb4H2eWGQkCKFgQaG6hdoEVZtGsrAcb2K5BW66XsOErD4WU3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.7.tgz#39c9b55ee153151990fb038651d58d3fd03f98f8" - integrity sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-syntax-typescript@^7.7.2": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.17.10.tgz#80031e6042cad6a95ed753f672ebd23c30933195" - integrity sha512-xJefea1DWXW09pW4Tm9bjwVlPDyYA2it3fWlmEjpYz6alPvTUjL0EOzNzI/FEOyI3r4/J7uVH5UqKgl1TQ5hqQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-arrow-functions@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz#44125e653d94b98db76369de9c396dc14bef4154" - integrity sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-async-to-generator@^7.16.8": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz#b83dff4b970cf41f1b819f8b49cc0cfbaa53a808" - integrity sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg== - dependencies: - "@babel/helper-module-imports" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-remap-async-to-generator" "^7.16.8" - -"@babel/plugin-transform-block-scoped-functions@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz#4d0d57d9632ef6062cdf354bb717102ee042a620" - integrity sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-block-scoping@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz#f50664ab99ddeaee5bc681b8f3a6ea9d72ab4f87" - integrity sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-classes@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz#8f4b9562850cd973de3b498f1218796eb181ce00" - integrity sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.16.7" - "@babel/helper-optimise-call-expression" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-replace-supers" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - globals "^11.1.0" - -"@babel/plugin-transform-computed-properties@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz#66dee12e46f61d2aae7a73710f591eb3df616470" - integrity sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-destructuring@^7.16.7", "@babel/plugin-transform-destructuring@^7.17.7": - version "7.17.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.17.7.tgz#49dc2675a7afa9a5e4c6bdee636061136c3408d1" - integrity sha512-XVh0r5yq9sLR4vZ6eVZe8FKfIcSgaTBxVBRSYokRj2qksf6QerYnTxz9/GTuKTH/n/HwLP7t6gtlybHetJ/6hQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-dotall-regex@^7.16.7", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz#6b2d67686fab15fb6a7fd4bd895d5982cfc81241" - integrity sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-duplicate-keys@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz#2207e9ca8f82a0d36a5a67b6536e7ef8b08823c9" - integrity sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-exponentiation-operator@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz#efa9862ef97e9e9e5f653f6ddc7b665e8536fe9b" - integrity sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-flow-strip-types@^7.16.0": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.16.7.tgz#291fb140c78dabbf87f2427e7c7c332b126964b8" - integrity sha512-mzmCq3cNsDpZZu9FADYYyfZJIOrSONmHcop2XEKPdBNMa4PDC4eEvcOvzZaCNcjKu72v0XQlA5y1g58aLRXdYg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-flow" "^7.16.7" - -"@babel/plugin-transform-for-of@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz#649d639d4617dff502a9a158c479b3b556728d8c" - integrity sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-function-name@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz#5ab34375c64d61d083d7d2f05c38d90b97ec65cf" - integrity sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA== - dependencies: - "@babel/helper-compilation-targets" "^7.16.7" - "@babel/helper-function-name" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-literals@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz#254c9618c5ff749e87cb0c0cef1a0a050c0bdab1" - integrity sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-member-expression-literals@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz#6e5dcf906ef8a098e630149d14c867dd28f92384" - integrity sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-modules-amd@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz#b28d323016a7daaae8609781d1f8c9da42b13186" - integrity sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g== - dependencies: - "@babel/helper-module-transforms" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-commonjs@^7.16.8", "@babel/plugin-transform-modules-commonjs@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.17.9.tgz#274be1a2087beec0254d4abd4d86e52442e1e5b6" - integrity sha512-2TBFd/r2I6VlYn0YRTz2JdazS+FoUuQ2rIFHoAxtyP/0G3D82SBLaRq9rnUkpqlLg03Byfl/+M32mpxjO6KaPw== - dependencies: - "@babel/helper-module-transforms" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-simple-access" "^7.17.7" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-systemjs@^7.16.7", "@babel/plugin-transform-modules-systemjs@^7.17.8": - version "7.17.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.17.8.tgz#81fd834024fae14ea78fbe34168b042f38703859" - integrity sha512-39reIkMTUVagzgA5x88zDYXPCMT6lcaRKs1+S9K6NKBPErbgO/w/kP8GlNQTC87b412ZTlmNgr3k2JrWgHH+Bw== - dependencies: - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-module-transforms" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-validator-identifier" "^7.16.7" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-umd@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz#23dad479fa585283dbd22215bff12719171e7618" - integrity sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ== - dependencies: - "@babel/helper-module-transforms" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.16.8": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz#7f860e0e40d844a02c9dcf9d84965e7dfd666252" - integrity sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.16.7" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.17.10.tgz#715dbcfafdb54ce8bccd3d12e8917296a4ba66a4" - integrity sha512-v54O6yLaJySCs6mGzaVOUw9T967GnH38T6CQSAtnzdNPwu84l2qAjssKzo/WSO8Yi7NF+7ekm5cVbF/5qiIgNA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.17.0" - -"@babel/plugin-transform-new-target@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz#9967d89a5c243818e0800fdad89db22c5f514244" - integrity sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-object-super@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz#ac359cf8d32cf4354d27a46867999490b6c32a94" - integrity sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-replace-supers" "^7.16.7" - -"@babel/plugin-transform-parameters@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz#a1721f55b99b736511cb7e0152f61f17688f331f" - integrity sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-property-literals@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz#2dadac85155436f22c696c4827730e0fe1057a55" - integrity sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-react-constant-elements@^7.12.1": - version "7.17.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.17.6.tgz#6cc273c2f612a6a50cb657e63ee1303e5e68d10a" - integrity sha512-OBv9VkyyKtsHZiHLoSfCn+h6yU7YKX8nrs32xUmOa1SRSk+t03FosB6fBZ0Yz4BpD1WV7l73Nsad+2Tz7APpqw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.16.7.tgz#7b6d40d232f4c0f550ea348593db3b21e2404340" - integrity sha512-qgIg8BcZgd0G/Cz916D5+9kqX0c7nPZyXaP8R2tLNN5tkyIZdG5fEwBrxwplzSnjC1jvQmyMNVwUCZPcbGY7Pg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-react-jsx-development@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.16.7.tgz#43a00724a3ed2557ed3f276a01a929e6686ac7b8" - integrity sha512-RMvQWvpla+xy6MlBpPlrKZCMRs2AGiHOGHY3xRwl0pEeim348dDyxeH4xBsMPbIMhujeq7ihE702eM2Ew0Wo+A== - dependencies: - "@babel/plugin-transform-react-jsx" "^7.16.7" - -"@babel/plugin-transform-react-jsx@^7.16.7": - version "7.17.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.17.3.tgz#eac1565da176ccb1a715dae0b4609858808008c1" - integrity sha512-9tjBm4O07f7mzKSIlEmPdiE6ub7kfIe6Cd+w+oQebpATfTQMAgW+YOuWxogbKVTulA+MEO7byMeIUtQ1z+z+ZQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-module-imports" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-jsx" "^7.16.7" - "@babel/types" "^7.17.0" - -"@babel/plugin-transform-react-pure-annotations@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.16.7.tgz#232bfd2f12eb551d6d7d01d13fe3f86b45eb9c67" - integrity sha512-hs71ToC97k3QWxswh2ElzMFABXHvGiJ01IB1TbYQDGeWRKWz/MPUTh5jGExdHvosYKpnJW5Pm3S4+TA3FyX+GA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-regenerator@^7.16.7", "@babel/plugin-transform-regenerator@^7.17.9": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.17.9.tgz#0a33c3a61cf47f45ed3232903683a0afd2d3460c" - integrity sha512-Lc2TfbxR1HOyn/c6b4Y/b6NHoTb67n/IoWLxTu4kC7h4KQnWlhCq2S8Tx0t2SVvv5Uu87Hs+6JEJ5kt2tYGylQ== - dependencies: - regenerator-transform "^0.15.0" - -"@babel/plugin-transform-reserved-words@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz#1d798e078f7c5958eec952059c460b220a63f586" - integrity sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-runtime@^7.16.4": - version "7.17.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.17.0.tgz#0a2e08b5e2b2d95c4b1d3b3371a2180617455b70" - integrity sha512-fr7zPWnKXNc1xoHfrIU9mN/4XKX4VLZ45Q+oMhfsYIaHvg7mHgmhfOy/ckRWqDK7XF3QDigRpkh5DKq6+clE8A== - dependencies: - "@babel/helper-module-imports" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - babel-plugin-polyfill-corejs2 "^0.3.0" - babel-plugin-polyfill-corejs3 "^0.5.0" - babel-plugin-polyfill-regenerator "^0.3.0" - semver "^6.3.0" - -"@babel/plugin-transform-shorthand-properties@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz#e8549ae4afcf8382f711794c0c7b6b934c5fbd2a" - integrity sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-spread@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz#a303e2122f9f12e0105daeedd0f30fb197d8ff44" - integrity sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" - -"@babel/plugin-transform-sticky-regex@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz#c84741d4f4a38072b9a1e2e3fd56d359552e8660" - integrity sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-template-literals@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz#f3d1c45d28967c8e80f53666fc9c3e50618217ab" - integrity sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-typeof-symbol@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz#9cdbe622582c21368bd482b660ba87d5545d4f7e" - integrity sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-typescript@^7.16.7": - version "7.16.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.16.8.tgz#591ce9b6b83504903fa9dd3652c357c2ba7a1ee0" - integrity sha512-bHdQ9k7YpBDO2d0NVfkj51DpQcvwIzIusJ7mEUaMlbZq3Kt/U47j24inXZHQ5MDiYpCs+oZiwnXyKedE8+q7AQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/plugin-syntax-typescript" "^7.16.7" - -"@babel/plugin-transform-unicode-escapes@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz#da8717de7b3287a2c6d659750c964f302b31ece3" - integrity sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/plugin-transform-unicode-regex@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz#0f7aa4a501198976e25e82702574c34cfebe9ef2" - integrity sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - -"@babel/preset-env@^7.11.0": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.17.10.tgz#a81b093669e3eb6541bb81a23173c5963c5de69c" - integrity sha512-YNgyBHZQpeoBSRBg0xixsZzfT58Ze1iZrajvv0lJc70qDDGuGfonEnMGfWeSY0mQ3JTuCWFbMkzFRVafOyJx4g== - dependencies: - "@babel/compat-data" "^7.17.10" - "@babel/helper-compilation-targets" "^7.17.10" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-validator-option" "^7.16.7" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.16.7" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.16.7" - "@babel/plugin-proposal-async-generator-functions" "^7.16.8" - "@babel/plugin-proposal-class-properties" "^7.16.7" - "@babel/plugin-proposal-class-static-block" "^7.17.6" - "@babel/plugin-proposal-dynamic-import" "^7.16.7" - "@babel/plugin-proposal-export-namespace-from" "^7.16.7" - "@babel/plugin-proposal-json-strings" "^7.16.7" - "@babel/plugin-proposal-logical-assignment-operators" "^7.16.7" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.7" - "@babel/plugin-proposal-numeric-separator" "^7.16.7" - "@babel/plugin-proposal-object-rest-spread" "^7.17.3" - "@babel/plugin-proposal-optional-catch-binding" "^7.16.7" - "@babel/plugin-proposal-optional-chaining" "^7.16.7" - "@babel/plugin-proposal-private-methods" "^7.16.11" - "@babel/plugin-proposal-private-property-in-object" "^7.16.7" - "@babel/plugin-proposal-unicode-property-regex" "^7.16.7" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.16.7" - "@babel/plugin-transform-async-to-generator" "^7.16.8" - "@babel/plugin-transform-block-scoped-functions" "^7.16.7" - "@babel/plugin-transform-block-scoping" "^7.16.7" - "@babel/plugin-transform-classes" "^7.16.7" - "@babel/plugin-transform-computed-properties" "^7.16.7" - "@babel/plugin-transform-destructuring" "^7.17.7" - "@babel/plugin-transform-dotall-regex" "^7.16.7" - "@babel/plugin-transform-duplicate-keys" "^7.16.7" - "@babel/plugin-transform-exponentiation-operator" "^7.16.7" - "@babel/plugin-transform-for-of" "^7.16.7" - "@babel/plugin-transform-function-name" "^7.16.7" - "@babel/plugin-transform-literals" "^7.16.7" - "@babel/plugin-transform-member-expression-literals" "^7.16.7" - "@babel/plugin-transform-modules-amd" "^7.16.7" - "@babel/plugin-transform-modules-commonjs" "^7.17.9" - "@babel/plugin-transform-modules-systemjs" "^7.17.8" - "@babel/plugin-transform-modules-umd" "^7.16.7" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.17.10" - "@babel/plugin-transform-new-target" "^7.16.7" - "@babel/plugin-transform-object-super" "^7.16.7" - "@babel/plugin-transform-parameters" "^7.16.7" - "@babel/plugin-transform-property-literals" "^7.16.7" - "@babel/plugin-transform-regenerator" "^7.17.9" - "@babel/plugin-transform-reserved-words" "^7.16.7" - "@babel/plugin-transform-shorthand-properties" "^7.16.7" - "@babel/plugin-transform-spread" "^7.16.7" - "@babel/plugin-transform-sticky-regex" "^7.16.7" - "@babel/plugin-transform-template-literals" "^7.16.7" - "@babel/plugin-transform-typeof-symbol" "^7.16.7" - "@babel/plugin-transform-unicode-escapes" "^7.16.7" - "@babel/plugin-transform-unicode-regex" "^7.16.7" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.17.10" - babel-plugin-polyfill-corejs2 "^0.3.0" - babel-plugin-polyfill-corejs3 "^0.5.0" - babel-plugin-polyfill-regenerator "^0.3.0" - core-js-compat "^3.22.1" - semver "^6.3.0" - -"@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": - version "7.16.11" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.16.11.tgz#5dd88fd885fae36f88fd7c8342475c9f0abe2982" - integrity sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g== - dependencies: - "@babel/compat-data" "^7.16.8" - "@babel/helper-compilation-targets" "^7.16.7" - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-validator-option" "^7.16.7" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.16.7" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.16.7" - "@babel/plugin-proposal-async-generator-functions" "^7.16.8" - "@babel/plugin-proposal-class-properties" "^7.16.7" - "@babel/plugin-proposal-class-static-block" "^7.16.7" - "@babel/plugin-proposal-dynamic-import" "^7.16.7" - "@babel/plugin-proposal-export-namespace-from" "^7.16.7" - "@babel/plugin-proposal-json-strings" "^7.16.7" - "@babel/plugin-proposal-logical-assignment-operators" "^7.16.7" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.7" - "@babel/plugin-proposal-numeric-separator" "^7.16.7" - "@babel/plugin-proposal-object-rest-spread" "^7.16.7" - "@babel/plugin-proposal-optional-catch-binding" "^7.16.7" - "@babel/plugin-proposal-optional-chaining" "^7.16.7" - "@babel/plugin-proposal-private-methods" "^7.16.11" - "@babel/plugin-proposal-private-property-in-object" "^7.16.7" - "@babel/plugin-proposal-unicode-property-regex" "^7.16.7" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.16.7" - "@babel/plugin-transform-async-to-generator" "^7.16.8" - "@babel/plugin-transform-block-scoped-functions" "^7.16.7" - "@babel/plugin-transform-block-scoping" "^7.16.7" - "@babel/plugin-transform-classes" "^7.16.7" - "@babel/plugin-transform-computed-properties" "^7.16.7" - "@babel/plugin-transform-destructuring" "^7.16.7" - "@babel/plugin-transform-dotall-regex" "^7.16.7" - "@babel/plugin-transform-duplicate-keys" "^7.16.7" - "@babel/plugin-transform-exponentiation-operator" "^7.16.7" - "@babel/plugin-transform-for-of" "^7.16.7" - "@babel/plugin-transform-function-name" "^7.16.7" - "@babel/plugin-transform-literals" "^7.16.7" - "@babel/plugin-transform-member-expression-literals" "^7.16.7" - "@babel/plugin-transform-modules-amd" "^7.16.7" - "@babel/plugin-transform-modules-commonjs" "^7.16.8" - "@babel/plugin-transform-modules-systemjs" "^7.16.7" - "@babel/plugin-transform-modules-umd" "^7.16.7" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.16.8" - "@babel/plugin-transform-new-target" "^7.16.7" - "@babel/plugin-transform-object-super" "^7.16.7" - "@babel/plugin-transform-parameters" "^7.16.7" - "@babel/plugin-transform-property-literals" "^7.16.7" - "@babel/plugin-transform-regenerator" "^7.16.7" - "@babel/plugin-transform-reserved-words" "^7.16.7" - "@babel/plugin-transform-shorthand-properties" "^7.16.7" - "@babel/plugin-transform-spread" "^7.16.7" - "@babel/plugin-transform-sticky-regex" "^7.16.7" - "@babel/plugin-transform-template-literals" "^7.16.7" - "@babel/plugin-transform-typeof-symbol" "^7.16.7" - "@babel/plugin-transform-unicode-escapes" "^7.16.7" - "@babel/plugin-transform-unicode-regex" "^7.16.7" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.16.8" - babel-plugin-polyfill-corejs2 "^0.3.0" - babel-plugin-polyfill-corejs3 "^0.5.0" - babel-plugin-polyfill-regenerator "^0.3.0" - core-js-compat "^3.20.2" - semver "^6.3.0" - -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" - "@babel/types" "^7.4.4" - esutils "^2.0.2" - -"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.16.7.tgz#4c18150491edc69c183ff818f9f2aecbe5d93852" - integrity sha512-fWpyI8UM/HE6DfPBzD8LnhQ/OcH8AgTaqcqP2nGOXEUV+VKBR5JRN9hCk9ai+zQQ57vtm9oWeXguBCPNUjytgA== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-validator-option" "^7.16.7" - "@babel/plugin-transform-react-display-name" "^7.16.7" - "@babel/plugin-transform-react-jsx" "^7.16.7" - "@babel/plugin-transform-react-jsx-development" "^7.16.7" - "@babel/plugin-transform-react-pure-annotations" "^7.16.7" - -"@babel/preset-typescript@^7.16.0": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.16.7.tgz#ab114d68bb2020afc069cd51b37ff98a046a70b9" - integrity sha512-WbVEmgXdIyvzB77AQjGBEyYPZx+8tTsO50XtfozQrkW8QB2rLJpH2lgx0TRw5EJrBxOZQ+wCcyPVQvS8tjEHpQ== - dependencies: - "@babel/helper-plugin-utils" "^7.16.7" - "@babel/helper-validator-option" "^7.16.7" - "@babel/plugin-transform-typescript" "^7.16.7" - -"@babel/runtime-corejs3@^7.10.2": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.17.9.tgz#3d02d0161f0fbf3ada8e88159375af97690f4055" - integrity sha512-WxYHHUWF2uZ7Hp1K+D1xQgbgkGUfA+5UPOegEXGt2Y5SMog/rYCVaifLZDbw8UkNXozEqqrZTy6bglL7xTaCOw== - dependencies: - core-js-pure "^3.20.2" - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.3.1", "@babel/runtime@^7.4.4", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.3", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72" - integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/template@^7.16.7", "@babel/template@^7.3.3": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" - integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/parser" "^7.16.7" - "@babel/types" "^7.16.7" - -"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.17.3", "@babel/traverse@^7.17.9", "@babel/traverse@^7.4.5": - version "7.17.9" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.9.tgz#1f9b207435d9ae4a8ed6998b2b82300d83c37a0d" - integrity sha512-PQO8sDIJ8SIwipTPiR71kJQCKQYB5NGImbOviK8K+kg5xkNSYXLBupuX9QhatFowrsvo9Hj8WgArg3W7ijNAQw== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.17.9" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.17.9" - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/parser" "^7.17.9" - "@babel/types" "^7.17.0" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.17.10", "@babel/traverse@^7.7.2": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.10.tgz#1ee1a5ac39f4eac844e6cf855b35520e5eb6f8b5" - integrity sha512-VmbrTHQteIdUUQNTb+zE12SHS/xQVIShmBPhlNP12hD5poF2pbITW1Z4172d03HegaQWhLffdkRJYtAzp0AGcw== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.17.10" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.17.9" - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/parser" "^7.17.10" - "@babel/types" "^7.17.10" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.16.0", "@babel/types@^7.16.7", "@babel/types@^7.16.8", "@babel/types@^7.17.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": - version "7.17.0" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.17.0.tgz#a826e368bccb6b3d84acd76acad5c0d87342390b" - integrity sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw== - dependencies: - "@babel/helper-validator-identifier" "^7.16.7" - to-fast-properties "^2.0.0" - -"@babel/types@^7.17.10": - version "7.17.10" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.17.10.tgz#d35d7b4467e439fcf06d195f8100e0fea7fc82c4" - integrity sha512-9O26jG0mBYfGkUYCYZRnBwbVLd1UZOICEr2Em6InB6jVfsAv1GKgwXHmrSg+WFWDmeKTA6vyTZiN8tCSM5Oo3A== - dependencies: - "@babel/helper-validator-identifier" "^7.16.7" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@csstools/normalize.css@*": - version "12.0.0" - resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" - integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== - -"@csstools/postcss-color-function@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-1.1.0.tgz#229966327747f58fbe586de35daa139db3ce1e5d" - integrity sha512-5D5ND/mZWcQoSfYnSPsXtuiFxhzmhxt6pcjrFLJyldj+p0ZN2vvRpYNX+lahFTtMhAYOa2WmkdGINr0yP0CvGA== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-font-format-keywords@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.0.tgz#7e7df948a83a0dfb7eb150a96e2390ac642356a1" - integrity sha512-oO0cZt8do8FdVBX8INftvIA4lUrKUSCcWUf9IwH9IPWOgKT22oAZFXeHLoDK7nhB2SmkNycp5brxfNMRLIhd6Q== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-hwb-function@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.0.tgz#d6785c1c5ba8152d1d392c66f3a6a446c6034f6d" - integrity sha512-VSTd7hGjmde4rTj1rR30sokY3ONJph1reCBTUXqeW1fKwETPy1x4t/XIeaaqbMbC5Xg4SM/lyXZ2S8NELT2TaA== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-ic-unit@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.0.tgz#f484db59fc94f35a21b6d680d23b0ec69b286b7f" - integrity sha512-i4yps1mBp2ijrx7E96RXrQXQQHm6F4ym1TOD0D69/sjDjZvQ22tqiEvaNw7pFZTUO5b9vWRHzbHzP9+UKuw+bA== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-is-pseudo-class@^2.0.2": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.3.tgz#98c827ca88598e598dcd726a9d9e21e0475eb487" - integrity sha512-wMQ3GMWrJyRQfvBJsD38ndF/nwHT32xevSn8w2X+iCoWqmhhoj0K7HgdGW8XQhah6sdENBa8yS9gRosdezaQZw== - dependencies: - "@csstools/selector-specificity" "^1.0.0" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-normalize-display-values@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.0.tgz#ce698f688c28517447aedf15a9037987e3d2dc97" - integrity sha512-bX+nx5V8XTJEmGtpWTO6kywdS725t71YSLlxWt78XoHUbELWgoCXeOFymRJmL3SU1TLlKSIi7v52EWqe60vJTQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-oklab-function@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.0.tgz#e9a269487a292e0930760948e923e1d46b638ee6" - integrity sha512-e/Q5HopQzmnQgqimG9v3w2IG4VRABsBq3itOcn4bnm+j4enTgQZ0nWsaH/m9GV2otWGQ0nwccYL5vmLKyvP1ww== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" - integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-stepped-value-functions@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.0.tgz#f8ffc05e163ba7bcbefc5fdcaf264ce9fd408c16" - integrity sha512-q8c4bs1GumAiRenmFjASBcWSLKrbzHzWl6C2HcaAxAXIiL2rUlUWbqQZUjwVG5tied0rld19j/Mm90K3qI26vw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-unset-value@^1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.1.tgz#2cc020785db5ec82cc9444afe4cdae2a65445f89" - integrity sha512-f1G1WGDXEU/RN1TWAxBPQgQudtLnLQPyiWdtypkPC+mVYNKFKH/HYXSxH4MVNqwF8M0eDsoiU7HumJHCg/L/jg== - -"@csstools/selector-specificity@1.0.0", "@csstools/selector-specificity@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-1.0.0.tgz#91c560df2ed8d9700e4c7ed4ac21a3a322c9d975" - integrity sha512-RkYG5KiGNX0fJ5YoI0f4Wfq2Yo74D25Hru4fxTOioYdQvHBxcrrtTTyT5Ozzh2ejcNrhFy7IEts2WyEY7yi5yw== - -"@egjs/hammerjs@^2.0.17": - version "2.0.17" - resolved "https://registry.yarnpkg.com/@egjs/hammerjs/-/hammerjs-2.0.17.tgz#5dc02af75a6a06e4c2db0202cae38c9263895124" - integrity sha512-XQsZgjm2EcVUiZQf11UBJQfmZeEmOW8DpI1gsFeln6w0ae0ii4dMQEQ0kjl6DspdWX1aGY1/loyXnP0JS06e/A== - dependencies: - "@types/hammerjs" "^2.0.36" - -"@emotion/hash@^0.8.0": - version "0.8.0" - resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" - integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== - -"@emotion/is-prop-valid@^1.1.0": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.1.2.tgz#34ad6e98e871aa6f7a20469b602911b8b11b3a95" - integrity sha512-3QnhqeL+WW88YjYbQL5gUIkthuMw7a0NGbZ7wfFVk2kg/CK5w8w5FFa0RzWjyY1+sujN0NWbtSHH6OJmWHtJpQ== - dependencies: - "@emotion/memoize" "^0.7.4" - -"@emotion/memoize@^0.7.4": - version "0.7.5" - resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" - integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== - -"@emotion/stylis@^0.8.4": - version "0.8.5" - resolved "https://registry.yarnpkg.com/@emotion/stylis/-/stylis-0.8.5.tgz#deacb389bd6ee77d1e7fcaccce9e16c5c7e78e04" - integrity sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ== - -"@emotion/unitless@^0.7.4": - version "0.7.5" - resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" - integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== - -"@eslint/eslintrc@^1.2.3": - version "1.2.3" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.3.tgz#fcaa2bcef39e13d6e9e7f6271f4cc7cae1174886" - integrity sha512-uGo44hIwoLGNyduRpjdEpovcbMdd+Nv7amtmJxnKmI8xj6yd5LncmSwDa5NgX/41lIFJtkjD6YdVfgEzPfJ5UA== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.3.2" - globals "^13.9.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@humanwhocodes/config-array@^0.9.2": - version "0.9.5" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" - integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== - dependencies: - "@humanwhocodes/object-schema" "^1.2.1" - debug "^4.1.1" - minimatch "^3.0.4" - -"@humanwhocodes/object-schema@^1.2.1": - version "1.2.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" - integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" - integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - -"@jest/console@^28.1.0": - version "28.1.0" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.0.tgz#db78222c3d3b0c1db82f1b9de51094c2aaff2176" - integrity sha512-tscn3dlJFGay47kb4qVruQg/XWlmvU0xp3EJOjzzY+sBaI+YgwKcvAmTcyYU7xEiLLIY5HCdWRooAL8dqkFlDA== - dependencies: - "@jest/types" "^28.1.0" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^28.1.0" - jest-util "^28.1.0" - slash "^3.0.0" - -"@jest/core@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" - integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/reporters" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.8.1" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^27.5.1" - jest-config "^27.5.1" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-resolve-dependencies "^27.5.1" - jest-runner "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - jest-watcher "^27.5.1" - micromatch "^4.0.4" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" - integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== - dependencies: - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - -"@jest/fake-timers@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" - integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== - dependencies: - "@jest/types" "^27.5.1" - "@sinonjs/fake-timers" "^8.0.1" - "@types/node" "*" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -"@jest/globals@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" - integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/types" "^27.5.1" - expect "^27.5.1" - -"@jest/reporters@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" - integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-haste-map "^27.5.1" - jest-resolve "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^4.0.1" - terminal-link "^2.0.0" - v8-to-istanbul "^8.1.0" - -"@jest/schemas@^28.0.2": - version "28.0.2" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.0.2.tgz#08c30df6a8d07eafea0aef9fb222c5e26d72e613" - integrity sha512-YVDJZjd4izeTDkij00vHHAymNXQ6WWsdChFRK86qck6Jpr3DCL5W3Is3vslviRlP+bLuMYRLbdp98amMvqudhA== - dependencies: - "@sinclair/typebox" "^0.23.3" - -"@jest/source-map@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" - integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.9" - source-map "^0.6.0" - -"@jest/test-result@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" - integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== - dependencies: - "@jest/console" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-result@^28.1.0": - version "28.1.0" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.0.tgz#fd149dee123510dd2fcadbbf5f0020f98ad7f12c" - integrity sha512-sBBFIyoPzrZho3N+80P35A5oAkSKlGfsEFfXFWuPGBsW40UAjCkGakZhn4UQK4iQlW2vgCDMRDOob9FGKV8YoQ== - dependencies: - "@jest/console" "^28.1.0" - "@jest/types" "^28.1.0" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" - integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== - dependencies: - "@jest/test-result" "^27.5.1" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-runtime "^27.5.1" - -"@jest/transform@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" - integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== - dependencies: - "@babel/core" "^7.1.0" - "@jest/types" "^27.5.1" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-regex-util "^27.5.1" - jest-util "^27.5.1" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - source-map "^0.6.1" - write-file-atomic "^3.0.0" - -"@jest/types@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" - integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^16.0.0" - chalk "^4.0.0" - -"@jest/types@^28.1.0": - version "28.1.0" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.0.tgz#508327a89976cbf9bd3e1cc74641a29fd7dfd519" - integrity sha512-xmEggMPr317MIOjjDoZ4ejCSr9Lpbt/u34+dvc99t7DS8YirW5rwZEhzKPC2BMUFkUhI48qs6qLUSGw5FuL0GA== - dependencies: - "@jest/schemas" "^28.0.2" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== - dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@jridgewell/resolve-uri@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" - integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== - -"@jridgewell/set-array@^1.0.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" - integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== - -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.11" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" - integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== - -"@jridgewell/trace-mapping@^0.3.0": - version "0.3.4" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.4.tgz#f6a0832dffd5b8a6aaa633b7d9f8e8e94c83a0c3" - integrity sha512-vFv9ttIedivx0ux3QSjhgtCVjPZd5l46ZOMDSCwnH1yUO2e964gO8LZGyv2QkqcgR6TnBU1v+1IFqmeoG+0UJQ== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@leichtgewicht/ip-codec@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" - integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== - -"@material-ui/core@^4.12.3": - version "4.12.4" - resolved "https://registry.yarnpkg.com/@material-ui/core/-/core-4.12.4.tgz#4ac17488e8fcaf55eb6a7f5efb2a131e10138a73" - integrity sha512-tr7xekNlM9LjA6pagJmL8QCgZXaubWUwkJnoYcMKd4gw/t4XiyvnTkjdGrUVicyB2BsdaAv1tvow45bPM4sSwQ== - dependencies: - "@babel/runtime" "^7.4.4" - "@material-ui/styles" "^4.11.5" - "@material-ui/system" "^4.12.2" - "@material-ui/types" "5.1.0" - "@material-ui/utils" "^4.11.3" - "@types/react-transition-group" "^4.2.0" - clsx "^1.0.4" - hoist-non-react-statics "^3.3.2" - popper.js "1.16.1-lts" - prop-types "^15.7.2" - react-is "^16.8.0 || ^17.0.0" - react-transition-group "^4.4.0" - -"@material-ui/icons@^4.11.2": - version "4.11.3" - resolved "https://registry.yarnpkg.com/@material-ui/icons/-/icons-4.11.3.tgz#b0693709f9b161ce9ccde276a770d968484ecff1" - integrity sha512-IKHlyx6LDh8n19vzwH5RtHIOHl9Tu90aAAxcbWME6kp4dmvODM3UvOHJeMIDzUbd4muuJKHmlNoBN+mDY4XkBA== - dependencies: - "@babel/runtime" "^7.4.4" - -"@material-ui/lab@^4.0.0-alpha.60": - version "4.0.0-alpha.61" - resolved "https://registry.yarnpkg.com/@material-ui/lab/-/lab-4.0.0-alpha.61.tgz#9bf8eb389c0c26c15e40933cc114d4ad85e3d978" - integrity sha512-rSzm+XKiNUjKegj8bzt5+pygZeckNLOr+IjykH8sYdVk7dE9y2ZuUSofiMV2bJk3qU+JHwexmw+q0RyNZB9ugg== - dependencies: - "@babel/runtime" "^7.4.4" - "@material-ui/utils" "^4.11.3" - clsx "^1.0.4" - prop-types "^15.7.2" - react-is "^16.8.0 || ^17.0.0" - -"@material-ui/styles@^4.11.4", "@material-ui/styles@^4.11.5": - version "4.11.5" - resolved "https://registry.yarnpkg.com/@material-ui/styles/-/styles-4.11.5.tgz#19f84457df3aafd956ac863dbe156b1d88e2bbfb" - integrity sha512-o/41ot5JJiUsIETME9wVLAJrmIWL3j0R0Bj2kCOLbSfqEkKf0fmaPt+5vtblUh5eXr2S+J/8J3DaCb10+CzPGA== - dependencies: - "@babel/runtime" "^7.4.4" - "@emotion/hash" "^0.8.0" - "@material-ui/types" "5.1.0" - "@material-ui/utils" "^4.11.3" - clsx "^1.0.4" - csstype "^2.5.2" - hoist-non-react-statics "^3.3.2" - jss "^10.5.1" - jss-plugin-camel-case "^10.5.1" - jss-plugin-default-unit "^10.5.1" - jss-plugin-global "^10.5.1" - jss-plugin-nested "^10.5.1" - jss-plugin-props-sort "^10.5.1" - jss-plugin-rule-value-function "^10.5.1" - jss-plugin-vendor-prefixer "^10.5.1" - prop-types "^15.7.2" - -"@material-ui/system@^4.12.2": - version "4.12.2" - resolved "https://registry.yarnpkg.com/@material-ui/system/-/system-4.12.2.tgz#f5c389adf3fce4146edd489bf4082d461d86aa8b" - integrity sha512-6CSKu2MtmiJgcCGf6nBQpM8fLkuB9F55EKfbdTC80NND5wpTmKzwdhLYLH3zL4cLlK0gVaaltW7/wMuyTnN0Lw== - dependencies: - "@babel/runtime" "^7.4.4" - "@material-ui/utils" "^4.11.3" - csstype "^2.5.2" - prop-types "^15.7.2" - -"@material-ui/types@5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@material-ui/types/-/types-5.1.0.tgz#efa1c7a0b0eaa4c7c87ac0390445f0f88b0d88f2" - integrity sha512-7cqRjrY50b8QzRSYyhSpx4WRw2YuO0KKIGQEVk5J8uoz2BanawykgZGoWEqKm7pVIbzFDN0SpPcVV4IhOFkl8A== - -"@material-ui/utils@^4.11.3": - version "4.11.3" - resolved "https://registry.yarnpkg.com/@material-ui/utils/-/utils-4.11.3.tgz#232bd86c4ea81dab714f21edad70b7fdf0253942" - integrity sha512-ZuQPV4rBK/V1j2dIkSSEcH5uT6AaHuKWFfotADHsC0wVL1NLd2WkFCm4ZZbX33iO4ydl6V0GPngKm8HZQ2oujg== - dependencies: - "@babel/runtime" "^7.4.4" - prop-types "^15.7.2" - react-is "^16.8.0 || ^17.0.0" - -"@monaco-editor/loader@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@monaco-editor/loader/-/loader-1.3.2.tgz#04effbb87052d19cd7d3c9d81c0635490f9bb6d8" - integrity sha512-BTDbpHl3e47r3AAtpfVFTlAi7WXv4UQ/xZmz8atKl4q7epQV5e7+JbigFDViWF71VBi4IIBdcWP57Hj+OWuc9g== - dependencies: - state-local "^1.0.6" - -"@monaco-editor/react@^4.3.1": - version "4.4.5" - resolved "https://registry.yarnpkg.com/@monaco-editor/react/-/react-4.4.5.tgz#beabe491efeb2457441a00d1c7651c653697f65b" - integrity sha512-IImtzU7sRc66OOaQVCG+5PFHkSWnnhrUWGBuH6zNmH2h0YgmAhcjHZQc/6MY9JWEbUtVF1WPBMJ9u1XuFbRrVA== - dependencies: - "@monaco-editor/loader" "^1.3.2" - prop-types "^15.7.2" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": - version "0.5.6" - resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.6.tgz#9ced74cb23dae31ab385f775e237ce4c50422a1d" - integrity sha512-IIWxofIYt/AbMwoeBgj+O2aAXLrlCQVg+A4a2zfpXFNHgP8o8rvi3v+oe5t787Lj+KXlKOh8BAiUp9bhuELXhg== - dependencies: - ansi-html-community "^0.0.8" - common-path-prefix "^3.0.0" - core-js-pure "^3.8.1" - error-stack-parser "^2.0.6" - find-up "^5.0.0" - html-entities "^2.1.0" - loader-utils "^2.0.0" - schema-utils "^3.0.0" - source-map "^0.7.3" - -"@rollup/plugin-babel@^5.2.0": - version "5.3.1" - resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" - integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@rollup/pluginutils" "^3.1.0" - -"@rollup/plugin-node-resolve@^11.2.1": - version "11.2.1" - resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" - integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - "@types/resolve" "1.17.1" - builtin-modules "^3.1.0" - deepmerge "^4.2.2" - is-module "^1.0.0" - resolve "^1.19.0" - -"@rollup/plugin-replace@^2.4.1": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" - integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - magic-string "^0.25.7" - -"@rollup/pluginutils@^3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" - integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== - dependencies: - "@types/estree" "0.0.39" - estree-walker "^1.0.1" - picomatch "^2.2.2" - -"@rushstack/eslint-patch@^1.1.0": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.3.tgz#6801033be7ff87a6b7cadaf5b337c9f366a3c4b0" - integrity sha512-WiBSI6JBIhC6LRIsB2Kwh8DsGTlbBU+mLRxJmAe3LjHTdkDpwIbEOZgoXBbZilk/vlfjK8i6nKRAvIRn1XaIMw== - -"@sinclair/typebox@^0.23.3": - version "0.23.5" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.23.5.tgz#93f7b9f4e3285a7a9ade7557d9a8d36809cbc47d" - integrity sha512-AFBVi/iT4g20DHoujvMH1aEDn8fGJh4xsRGCP6d8RpLPMqsNPvW01Jcn0QysXTsg++/xj25NmJsGyH9xug/wKg== - -"@sindresorhus/is@^4.0.0": - version "4.6.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" - integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== - -"@sinonjs/commons@^1.7.0": - version "1.8.3" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^8.0.1": - version "8.1.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" - integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@surma/rollup-plugin-off-main-thread@^2.2.3": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" - integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== - dependencies: - ejs "^3.1.6" - json5 "^2.2.0" - magic-string "^0.25.0" - string.prototype.matchall "^4.0.6" - -"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" - integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== - -"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" - integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== - -"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" - integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== - -"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" - integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== - -"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" - integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== - -"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" - integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== - -"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" - integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== - -"@svgr/babel-plugin-transform-svg-component@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" - integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== - -"@svgr/babel-preset@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" - integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" - "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" - "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" - "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" - "@svgr/babel-plugin-transform-svg-component" "^5.5.0" - -"@svgr/core@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" - integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== - dependencies: - "@svgr/plugin-jsx" "^5.5.0" - camelcase "^6.2.0" - cosmiconfig "^7.0.0" - -"@svgr/hast-util-to-babel-ast@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" - integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== - dependencies: - "@babel/types" "^7.12.6" - -"@svgr/plugin-jsx@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" - integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== - dependencies: - "@babel/core" "^7.12.3" - "@svgr/babel-preset" "^5.5.0" - "@svgr/hast-util-to-babel-ast" "^5.5.0" - svg-parser "^2.0.2" - -"@svgr/plugin-svgo@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" - integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== - dependencies: - cosmiconfig "^7.0.0" - deepmerge "^4.2.2" - svgo "^1.2.2" - -"@svgr/webpack@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" - integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== - dependencies: - "@babel/core" "^7.12.3" - "@babel/plugin-transform-react-constant-elements" "^7.12.1" - "@babel/preset-env" "^7.12.1" - "@babel/preset-react" "^7.12.5" - "@svgr/core" "^5.5.0" - "@svgr/plugin-jsx" "^5.5.0" - "@svgr/plugin-svgo" "^5.5.0" - loader-utils "^2.0.0" - -"@szmarczak/http-timer@^4.0.5": - version "4.0.6" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" - integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== - dependencies: - defer-to-connect "^2.0.0" - -"@testing-library/dom@^8.2.0", "@testing-library/dom@^8.5.0": - version "8.13.0" - resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.13.0.tgz#bc00bdd64c7d8b40841e27a70211399ad3af46f5" - integrity sha512-9VHgfIatKNXQNaZTtLnalIy0jNZzY35a4S3oi08YAt9Hv1VsfZ/DfA45lM8D/UhtHBGJ4/lGwp0PZkVndRkoOQ== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/runtime" "^7.12.5" - "@types/aria-query" "^4.2.0" - aria-query "^5.0.0" - chalk "^4.1.0" - dom-accessibility-api "^0.5.9" - lz-string "^1.4.4" - pretty-format "^27.0.2" - -"@testing-library/jest-dom@^5.16.4": - version "5.16.4" - resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.4.tgz#938302d7b8b483963a3ae821f1c0808f872245cd" - integrity sha512-Gy+IoFutbMQcky0k+bqqumXZ1cTGswLsFqmNLzNdSKkU9KGV2u9oXhukCbbJ9/LRPKiqwxEE8VpV/+YZlfkPUA== - dependencies: - "@babel/runtime" "^7.9.2" - "@types/testing-library__jest-dom" "^5.9.1" - aria-query "^5.0.0" - chalk "^3.0.0" - css "^3.0.0" - css.escape "^1.5.1" - dom-accessibility-api "^0.5.6" - lodash "^4.17.15" - redent "^3.0.0" - -"@testing-library/react@^13.2.0": - version "13.2.0" - resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-13.2.0.tgz#2db00bc94d71c4e90e5c25582e90a650ae2925bf" - integrity sha512-Bprbz/SZVONCJy5f7hcihNCv313IJXdYiv0nSJklIs1SQCIHHNlnGNkosSXnGZTmesyGIcBGNppYhXcc11pb7g== - dependencies: - "@babel/runtime" "^7.12.5" - "@testing-library/dom" "^8.5.0" - "@types/react-dom" "^18.0.0" - -"@testing-library/user-event@^14.2.0": - version "14.2.0" - resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-14.2.0.tgz#8293560f8f80a00383d6c755ec3e0b918acb1683" - integrity sha512-+hIlG4nJS6ivZrKnOP7OGsDu9Fxmryj9vCl8x0ZINtTJcCHs2zLsYif5GzuRiBF2ck5GZG2aQr7Msg+EHlnYVQ== - -"@tootallnate/once@1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - -"@types/aria-query@^4.2.0": - version "4.2.2" - resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" - integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== - -"@types/aria-query@^5.0.0": - version "5.0.0" - resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.0.tgz#df2d64b5cc73cca0d75e2a7793d6b5c199c2f7b2" - integrity sha512-P+dkdFu0n08PDIvw+9nT9ByQnd+Udc8DaWPb9HKfaPwCvWvQpC5XaMRx2xLWECm9x1VKNps6vEAlirjA6+uNrQ== - -"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": - version "7.1.19" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" - integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.1" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.14.2.tgz#ffcd470bbb3f8bf30481678fb5502278ca833a43" - integrity sha512-K2waXdXBi2302XUdcHcR1jCeU0LL4TD9HRs/gk0N2Xvrht+G/BfJa4QObBQZfhMdxiCpV3COl5Nfq4uKTeTnJA== - dependencies: - "@babel/types" "^7.3.0" - -"@types/body-parser@*": - version "1.19.2" - resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" - integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== - dependencies: - "@types/connect" "*" - "@types/node" "*" - -"@types/bonjour@^3.5.9": - version "3.5.10" - resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" - integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== - dependencies: - "@types/node" "*" - -"@types/cacheable-request@^6.0.1": - version "6.0.2" - resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.2.tgz#c324da0197de0a98a2312156536ae262429ff6b9" - integrity sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA== - dependencies: - "@types/http-cache-semantics" "*" - "@types/keyv" "*" - "@types/node" "*" - "@types/responselike" "*" - -"@types/connect-history-api-fallback@^1.3.5": - version "1.3.5" - resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" - integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== - dependencies: - "@types/express-serve-static-core" "*" - "@types/node" "*" - -"@types/connect@*": - version "3.4.35" - resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" - integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== - dependencies: - "@types/node" "*" - -"@types/diff@^5.0.0": - version "5.0.2" - resolved "https://registry.yarnpkg.com/@types/diff/-/diff-5.0.2.tgz#dd565e0086ccf8bc6522c6ebafd8a3125c91c12b" - integrity sha512-uw8eYMIReOwstQ0QKF0sICefSy8cNO/v7gOTiIy9SbwuHyEecJUm7qlgueOO5S1udZ5I/irVydHVwMchgzbKTg== - -"@types/easy-table@^0.0.33": - version "0.0.33" - resolved "https://registry.yarnpkg.com/@types/easy-table/-/easy-table-0.0.33.tgz#b1f7ec29014ec24906b4f28d8368e2e99b399313" - integrity sha512-/vvqcJPmZUfQwCgemL0/34G7bIQnCuvgls379ygRlcC1FqNqk3n+VZ15dAO51yl6JNDoWd8vsk+kT8zfZ1VZSw== - -"@types/ejs@^3.0.5": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@types/ejs/-/ejs-3.1.1.tgz#29c539826376a65e7f7d672d51301f37ed718f6d" - integrity sha512-RQul5wEfY7BjWm0sYY86cmUN/pcXWGyVxWX93DFFJvcrxax5zKlieLwA3T77xJGwNcZW0YW6CYG70p1m8xPFmA== - -"@types/eslint-scope@^3.7.3": - version "3.7.3" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" - integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.4.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.2.tgz#48f2ac58ab9c631cb68845c3d956b28f79fad575" - integrity sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - -"@types/eslint@^7.28.2": - version "7.29.0" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" - integrity sha512-VNcvioYDH8/FxaeTKkM4/TiTwt6pBV9E3OfGmvaw8tPl0rrHCJ4Ll15HRT+pMiFAf/MLQvAzC+6RzUMEL9Ceng== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - -"@types/estree@*", "@types/estree@^0.0.51": - version "0.0.51" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== - -"@types/estree@0.0.39": - version "0.0.39" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" - integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== - -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": - version "4.17.28" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" - integrity sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig== - dependencies: - "@types/node" "*" - "@types/qs" "*" - "@types/range-parser" "*" - -"@types/express@*", "@types/express@^4.17.13": - version "4.17.13" - resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034" - integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== - dependencies: - "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.18" - "@types/qs" "*" - "@types/serve-static" "*" - -"@types/fibers@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@types/fibers/-/fibers-3.1.1.tgz#b714d357eebf6aec0bc5d70512e573b89bc84f20" - integrity sha512-yHoUi46uika0snoTpNcVqUSvgbRndaIps4TUCotrXjtc0DHDoPQckmyXEZ2bX3e4mpJmyEW3hRhCwQa/ISCPaA== - -"@types/fs-extra@^9.0.1", "@types/fs-extra@^9.0.4": - version "9.0.13" - resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-9.0.13.tgz#7594fbae04fe7f1918ce8b3d213f74ff44ac1f45" - integrity sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA== - dependencies: - "@types/node" "*" - -"@types/graceful-fs@^4.1.2": - version "4.1.5" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== - dependencies: - "@types/node" "*" - -"@types/hammerjs@^2.0.36": - version "2.0.41" - resolved "https://registry.yarnpkg.com/@types/hammerjs/-/hammerjs-2.0.41.tgz#f6ecf57d1b12d2befcce00e928a6a097c22980aa" - integrity sha512-ewXv/ceBaJprikMcxCmWU1FKyMAQ2X7a9Gtmzw8fcg2kIePI1crERDM818W+XYrxqdBBOdlf2rm137bU+BltCA== - -"@types/html-minifier-terser@^6.0.0": - version "6.1.0" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" - integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== - -"@types/http-cache-semantics@*": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" - integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== - -"@types/http-proxy@^1.17.8": - version "1.17.9" - resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" - integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== - dependencies: - "@types/node" "*" - -"@types/inquirer@^8.1.2": - version "8.2.1" - resolved "https://registry.yarnpkg.com/@types/inquirer/-/inquirer-8.2.1.tgz#28a139be3105a1175e205537e8ac10830e38dbf4" - integrity sha512-wKW3SKIUMmltbykg4I5JzCVzUhkuD9trD6efAmYgN2MrSntY0SMRQzEnD3mkyJ/rv9NLbTC7g3hKKE86YwEDLw== - dependencies: - "@types/through" "*" - rxjs "^7.2.0" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@*": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-27.5.1.tgz#2c8b6dc6ff85c33bcd07d0b62cb3d19ddfdb3ab9" - integrity sha512-fUy7YRpT+rHXto1YlL+J9rs0uLGyiqVt3ZOTQR+4ROc47yNl8WLdVLgUloBRhOxP1PZvguHl44T3H0wAWxahYQ== - dependencies: - jest-matcher-utils "^27.0.0" - pretty-format "^27.0.0" - -"@types/json-buffer@~3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/json-buffer/-/json-buffer-3.0.0.tgz#85c1ff0f0948fc159810d4b5be35bf8c20875f64" - integrity sha512-3YP80IxxFJB4b5tYC2SUPwkg0XQLiu0nWvhRgEatgjf+29IcWO9X1k8xRv5DGssJ/lCrjYTjQPcobJr2yWIVuQ== - -"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.11" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== - -"@types/json-stringify-safe@^5.0.0": - version "5.0.0" - resolved "https://registry.yarnpkg.com/@types/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz#df34d054419d39323a3730966bacba02ac5e474e" - integrity sha512-UUA1sH0RSRROdInuDOA1yoRzbi5xVFD1RHCoOvNRPTNwR8zBkJ/84PZ6NhKVDtKp0FTeIccJCdQz1X2aJPr4uw== - -"@types/json5@^0.0.29": - version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" - integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= - -"@types/keyv@*": - version "3.1.4" - resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.4.tgz#3ccdb1c6751b0c7e52300bcdacd5bcbf8faa75b6" - integrity sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg== - dependencies: - "@types/node" "*" - -"@types/lodash.flattendeep@^4.4.6": - version "4.4.7" - resolved "https://registry.yarnpkg.com/@types/lodash.flattendeep/-/lodash.flattendeep-4.4.7.tgz#0ce3dccbe006826d58e9824b27df4b00ed3e90e6" - integrity sha512-1h6GW/AeZw/Wej6uxrqgmdTDZX1yFS39lRsXYkg+3kWvOWWrlGCI6H7lXxlUHOzxDT4QeYGmgPpQ3BX9XevzOg== - dependencies: - "@types/lodash" "*" - -"@types/lodash.pickby@^4.6.6": - version "4.6.7" - resolved "https://registry.yarnpkg.com/@types/lodash.pickby/-/lodash.pickby-4.6.7.tgz#fd089a5a7f8cbe7294ae5c90ea5ecd9f4cae4d2c" - integrity sha512-4ebXRusuLflfscbD0PUX4eVknDHD9Yf+uMtBIvA/hrnTqeAzbuHuDjvnYriLjUrI9YrhCPVKUf4wkRSXJQ6gig== - dependencies: - "@types/lodash" "*" - -"@types/lodash.union@^4.6.6": - version "4.6.7" - resolved "https://registry.yarnpkg.com/@types/lodash.union/-/lodash.union-4.6.7.tgz#ceace5ed9f3610652ba4a72e0e0afb2a0eec7a4d" - integrity sha512-6HXM6tsnHJzKgJE0gA/LhTGf/7AbjUk759WZ1MziVm+OBNAATHhdgj+a3KVE8g76GCLAnN4ZEQQG1EGgtBIABA== - dependencies: - "@types/lodash" "*" - -"@types/lodash@*": - version "4.14.182" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.182.tgz#05301a4d5e62963227eaafe0ce04dd77c54ea5c2" - integrity sha512-/THyiqyQAP9AfARo4pF+aCGcyiQ94tX/Is2I7HofNRqoYLgN1PBoOWu2/zTA5zMxzP5EFutMtWtGAFRKUe961Q== - -"@types/lodash@^4.14.175": - version "4.14.181" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.181.tgz#d1d3740c379fda17ab175165ba04e2d03389385d" - integrity sha512-n3tyKthHJbkiWhDZs3DkhkCzt2MexYHXlX0td5iMplyfwketaOeKboEVBqzceH7juqvEg3q5oUoBFxSLu7zFag== - -"@types/mime@^1": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" - integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== - -"@types/mocha@^9.0.0": - version "9.1.1" - resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-9.1.1.tgz#e7c4f1001eefa4b8afbd1eee27a237fee3bf29c4" - integrity sha512-Z61JK7DKDtdKTWwLeElSEBcWGRLY8g95ic5FoQqI9CMx0ns/Ghep3B4DfcEimiKMvtamNVULVNKEsiwV3aQmXw== - -"@types/node@*", "@types/node@^17.0.4": - version "17.0.33" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.33.tgz#3c1879b276dc63e73030bb91165e62a4509cd506" - integrity sha512-miWq2m2FiQZmaHfdZNcbpp9PuXg34W5JZ5CrJ/BaS70VuhoJENBEQybeiYSaPBRNq6KQGnjfEnc/F3PN++D+XQ== - -"@types/object-inspect@^1.8.0": - version "1.8.1" - resolved "https://registry.yarnpkg.com/@types/object-inspect/-/object-inspect-1.8.1.tgz#7c08197ad05cc0e513f529b1f3919cc99f720e1f" - integrity sha512-0JTdf3CGV0oWzE6Wa40Ayv2e2GhpP3pEJMcrlM74vBSJPuuNkVwfDnl0SZxyFCXETcB4oKA/MpTVfuYSMOelBg== - -"@types/parse-json@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== - -"@types/prettier@^2.1.5": - version "2.6.1" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.1.tgz#76e72d8a775eef7ce649c63c8acae1a0824bbaed" - integrity sha512-XFjFHmaLVifrAKaZ+EKghFHtHSUonyw8P2Qmy2/+osBnrKbH9UYtlK10zg8/kCt47MFilll/DEDKy3DHfJ0URw== - -"@types/prop-types@*": - version "15.7.5" - resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" - integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== - -"@types/puppeteer@^5.4.0": - version "5.4.6" - resolved "https://registry.yarnpkg.com/@types/puppeteer/-/puppeteer-5.4.6.tgz#afc438e41dcbc27ca1ba0235ea464a372db2b21c" - integrity sha512-98Kghehs7+/GD9b56qryhqdqVCXUTbetTv3PlvDnmFRTHQH0j9DIp1f7rkAW3BAj4U3yoeSEQnKgdW8bDq0Y0Q== - dependencies: - "@types/node" "*" - -"@types/q@^1.5.1": - version "1.5.5" - resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" - integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== - -"@types/qs@*": - version "6.9.7" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" - integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== - -"@types/range-parser@*": - version "1.2.4" - resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" - integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== - -"@types/react-dom@^18.0.0": - version "18.0.4" - resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.4.tgz#dcbcadb277bcf6c411ceff70069424c57797d375" - integrity sha512-FgTtbqPOCI3dzZPZoC2T/sx3L34qxy99ITWn4eoSA95qPyXDMH0ALoAqUp49ITniiJFsXUVBtalh/KffMpg21Q== - dependencies: - "@types/react" "*" - -"@types/react-transition-group@^4.2.0": - version "4.4.4" - resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-4.4.4.tgz#acd4cceaa2be6b757db61ed7b432e103242d163e" - integrity sha512-7gAPz7anVK5xzbeQW9wFBDg7G++aPLAFY0QaSMOou9rJZpbuI58WAuJrgu+qR92l61grlnCUe7AFX8KGahAgug== - dependencies: - "@types/react" "*" - -"@types/react@*": - version "18.0.3" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.3.tgz#baefa397561372015b9f8ba5bc83bc3f84ae8fcb" - integrity sha512-P8QUaMW4k+kH9aKNPl9b3XWcKMSSALYprLL8xpAMJOLUn3Pl6B+6nKC4F7dsk9oJPwkiRx+qlwhG/Zc1LxFVuQ== - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - csstype "^3.0.2" - -"@types/recursive-readdir@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@types/recursive-readdir/-/recursive-readdir-2.2.1.tgz#330f5ec0b73e8aeaf267a6e056884e393f3543a3" - integrity sha512-Xd+Ptc4/F2ueInqy5yK2FI5FxtwwbX2+VZpcg+9oYsFJVen8qQKGapCr+Bi5wQtHU1cTXT8s+07lo/nKPgu8Gg== - dependencies: - "@types/node" "*" - -"@types/resolve@1.17.1": - version "1.17.1" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" - integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== - dependencies: - "@types/node" "*" - -"@types/responselike@*", "@types/responselike@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" - integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== - dependencies: - "@types/node" "*" - -"@types/retry@0.12.0": - version "0.12.0" - resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" - integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== - -"@types/scheduler@*": - version "0.16.2" - resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" - integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== - -"@types/selenium-standalone@^7.0.0": - version "7.0.1" - resolved "https://registry.yarnpkg.com/@types/selenium-standalone/-/selenium-standalone-7.0.1.tgz#7d94c2f663ceb495648c2c2a300f317d3a835257" - integrity sha512-zbKenL0fAXzPyiOaaFMrvFdMNhj5BgNJQq8bxiZfwQD9ID2J8bUG5xbcS3tQtlzIX/62z9nG5Vo45oaHWTbvbw== - dependencies: - "@types/node" "*" - -"@types/serve-index@^1.9.1": - version "1.9.1" - resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" - integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== - dependencies: - "@types/express" "*" - -"@types/serve-static@*": - version "1.13.10" - resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9" - integrity sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ== - dependencies: - "@types/mime" "^1" - "@types/node" "*" - -"@types/sockjs@^0.3.33": - version "0.3.33" - resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" - integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== - dependencies: - "@types/node" "*" - -"@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== - -"@types/stream-buffers@^3.0.3": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/stream-buffers/-/stream-buffers-3.0.4.tgz#bf128182da7bc62722ca0ddf5458a9c65f76e648" - integrity sha512-qU/K1tb2yUdhXkLIATzsIPwbtX6BpZk0l3dPW6xqWyhfzzM1ECaQ/8faEnu3CNraLiQ9LHyQQPBGp7N9Fbs25w== - dependencies: - "@types/node" "*" - -"@types/supports-color@^8.1.0": - version "8.1.1" - resolved "https://registry.yarnpkg.com/@types/supports-color/-/supports-color-8.1.1.tgz#1b44b1b096479273adf7f93c75fc4ecc40a61ee4" - integrity sha512-dPWnWsf+kzIG140B8z2w3fr5D03TLWbOAFQl45xUpI3vcizeXriNR5VYkWZ+WTMsUHqZ9Xlt3hrxGNANFyNQfw== - -"@types/testing-library__jest-dom@^5.9.1": - version "5.14.3" - resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.3.tgz#ee6c7ffe9f8595882ee7bda8af33ae7b8789ef17" - integrity sha512-oKZe+Mf4ioWlMuzVBaXQ9WDnEm1+umLx0InILg+yvZVBBDmzV5KfZyLrCvadtWcx8+916jLmHafcmqqffl+iIw== - dependencies: - "@types/jest" "*" - -"@types/through@*": - version "0.0.30" - resolved "https://registry.yarnpkg.com/@types/through/-/through-0.0.30.tgz#e0e42ce77e897bd6aead6f6ea62aeb135b8a3895" - integrity sha512-FvnCJljyxhPM3gkRgWmxmDZyAQSiBQQWLI0A0VFL0K7W1oRUrPJSqNO0NvTnLkBcotdlp3lKvaT0JrnyRDkzOg== - dependencies: - "@types/node" "*" - -"@types/tmp@^0.2.0": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.3.tgz#908bfb113419fd6a42273674c00994d40902c165" - integrity sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA== - -"@types/trusted-types@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" - integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== - -"@types/ua-parser-js@^0.7.33": - version "0.7.36" - resolved "https://registry.yarnpkg.com/@types/ua-parser-js/-/ua-parser-js-0.7.36.tgz#9bd0b47f26b5a3151be21ba4ce9f5fa457c5f190" - integrity sha512-N1rW+njavs70y2cApeIw1vLMYXRwfBy+7trgavGuuTfOd7j1Yh7QTRc/yqsPl6ncokt72ZXuxEU0PiCp9bSwNQ== - -"@types/validator@^13.1.3": - version "13.7.2" - resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.7.2.tgz#a2114225d9be743fb154b06c29b8257aaca42922" - integrity sha512-KFcchQ3h0OPQgFirBRPZr5F/sVjxZsOrQHedj3zi8AH3Zv/hOLx2OLR4hxR5HcfoU+33n69ZuOfzthKVdMoTiw== - -"@types/which@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@types/which/-/which-1.3.2.tgz#9c246fc0c93ded311c8512df2891fb41f6227fdf" - integrity sha512-8oDqyLC7eD4HM307boe2QWKyuzdzWBj56xI/imSl2cpL+U3tCMaTAkMJ4ee5JBZ/FsOJlvRGeIShiZDAl1qERA== - -"@types/ws@^8.5.1": - version "8.5.3" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" - integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== - dependencies: - "@types/node" "*" - -"@types/yargs-parser@*": - version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== - -"@types/yargs@^16.0.0": - version "16.0.4" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" - integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== - dependencies: - "@types/yargs-parser" "*" - -"@types/yargs@^17.0.8": - version "17.0.10" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.10.tgz#591522fce85d8739bca7b8bb90d048e4478d186a" - integrity sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA== - dependencies: - "@types/yargs-parser" "*" - -"@types/yauzl@^2.9.1": - version "2.10.0" - resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.10.0.tgz#b3248295276cf8c6f153ebe6a9aba0c988cb2599" - integrity sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw== - dependencies: - "@types/node" "*" - -"@typescript-eslint/eslint-plugin@^5.5.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.23.0.tgz#bc4cbcf91fbbcc2e47e534774781b82ae25cc3d8" - integrity sha512-hEcSmG4XodSLiAp1uxv/OQSGsDY6QN3TcRU32gANp+19wGE1QQZLRS8/GV58VRUoXhnkuJ3ZxNQ3T6Z6zM59DA== - dependencies: - "@typescript-eslint/scope-manager" "5.23.0" - "@typescript-eslint/type-utils" "5.23.0" - "@typescript-eslint/utils" "5.23.0" - debug "^4.3.2" - functional-red-black-tree "^1.0.1" - ignore "^5.1.8" - regexpp "^3.2.0" - semver "^7.3.5" - tsutils "^3.21.0" - -"@typescript-eslint/experimental-utils@^5.0.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.23.0.tgz#ea03860fa612dadf272789988f2ce41f0b7bb2f7" - integrity sha512-I+3YGQztH1DM9kgWzjslpZzJCBMRz0KhYG2WP62IwpooeZ1L6Qt0mNK8zs+uP+R2HOsr+TeDW35Pitc3PfVv8Q== - dependencies: - "@typescript-eslint/utils" "5.23.0" - -"@typescript-eslint/parser@^5.5.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.23.0.tgz#443778e1afc9a8ff180f91b5e260ac3bec5e2de1" - integrity sha512-V06cYUkqcGqpFjb8ttVgzNF53tgbB/KoQT/iB++DOIExKmzI9vBJKjZKt/6FuV9c+zrDsvJKbJ2DOCYwX91cbw== - dependencies: - "@typescript-eslint/scope-manager" "5.23.0" - "@typescript-eslint/types" "5.23.0" - "@typescript-eslint/typescript-estree" "5.23.0" - debug "^4.3.2" - -"@typescript-eslint/scope-manager@5.23.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.23.0.tgz#4305e61c2c8e3cfa3787d30f54e79430cc17ce1b" - integrity sha512-EhjaFELQHCRb5wTwlGsNMvzK9b8Oco4aYNleeDlNuL6qXWDF47ch4EhVNPh8Rdhf9tmqbN4sWDk/8g+Z/J8JVw== - dependencies: - "@typescript-eslint/types" "5.23.0" - "@typescript-eslint/visitor-keys" "5.23.0" - -"@typescript-eslint/type-utils@5.23.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.23.0.tgz#f852252f2fc27620d5bb279d8fed2a13d2e3685e" - integrity sha512-iuI05JsJl/SUnOTXA9f4oI+/4qS/Zcgk+s2ir+lRmXI+80D8GaGwoUqs4p+X+4AxDolPpEpVUdlEH4ADxFy4gw== - dependencies: - "@typescript-eslint/utils" "5.23.0" - debug "^4.3.2" - tsutils "^3.21.0" - -"@typescript-eslint/types@5.23.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.23.0.tgz#8733de0f58ae0ed318dbdd8f09868cdbf9f9ad09" - integrity sha512-NfBsV/h4dir/8mJwdZz7JFibaKC3E/QdeMEDJhiAE3/eMkoniZ7MjbEMCGXw6MZnZDMN3G9S0mH/6WUIj91dmw== - -"@typescript-eslint/typescript-estree@5.23.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.23.0.tgz#dca5f10a0a85226db0796e8ad86addc9aee52065" - integrity sha512-xE9e0lrHhI647SlGMl+m+3E3CKPF1wzvvOEWnuE3CCjjT7UiRnDGJxmAcVKJIlFgK6DY9RB98eLr1OPigPEOGg== - dependencies: - "@typescript-eslint/types" "5.23.0" - "@typescript-eslint/visitor-keys" "5.23.0" - debug "^4.3.2" - globby "^11.0.4" - is-glob "^4.0.3" - semver "^7.3.5" - tsutils "^3.21.0" - -"@typescript-eslint/utils@5.23.0", "@typescript-eslint/utils@^5.13.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.23.0.tgz#4691c3d1b414da2c53d8943310df36ab1c50648a" - integrity sha512-dbgaKN21drqpkbbedGMNPCtRPZo1IOUr5EI9Jrrh99r5UW5Q0dz46RKXeSBoPV+56R6dFKpbrdhgUNSJsDDRZA== - dependencies: - "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.23.0" - "@typescript-eslint/types" "5.23.0" - "@typescript-eslint/typescript-estree" "5.23.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/visitor-keys@5.23.0": - version "5.23.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.23.0.tgz#057c60a7ca64667a39f991473059377a8067c87b" - integrity sha512-Vd4mFNchU62sJB8pX19ZSPog05B0Y0CE2UxAZPT5k4iqhRYjPnqyY3woMxCd0++t9OTqkgjST+1ydLBi7e2Fvg== - dependencies: - "@typescript-eslint/types" "5.23.0" - eslint-visitor-keys "^3.0.0" - -"@ungap/promise-all-settled@1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz#aa58042711d6e3275dd37dc597e5d31e8c290a44" - integrity sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q== - -"@wdio/cli@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/cli/-/cli-7.19.7.tgz#3a5c5de45839e218e4b5f326f7fe2e4c2634da42" - integrity sha512-BxAL2iHBZ/3J5hP48kRWfL6zI7T/WKtybsvQtuDSwYzpgJq6ZrCS8bbOldH6Nvb3YRaP7MmItRQFG7bQwFzJew== - dependencies: - "@types/ejs" "^3.0.5" - "@types/fs-extra" "^9.0.4" - "@types/inquirer" "^8.1.2" - "@types/lodash.flattendeep" "^4.4.6" - "@types/lodash.pickby" "^4.6.6" - "@types/lodash.union" "^4.6.6" - "@types/node" "^17.0.4" - "@types/recursive-readdir" "^2.2.0" - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - async-exit-hook "^2.0.1" - chalk "^4.0.0" - chokidar "^3.0.0" - cli-spinners "^2.1.0" - ejs "^3.0.1" - fs-extra "^10.0.0" - inquirer "8.2.4" - lodash.flattendeep "^4.4.0" - lodash.pickby "^4.6.0" - lodash.union "^4.6.0" - mkdirp "^1.0.4" - recursive-readdir "^2.2.2" - webdriverio "7.19.7" - yargs "^17.0.0" - yarn-install "^1.0.0" - -"@wdio/config@7.19.5": - version "7.19.5" - resolved "https://registry.yarnpkg.com/@wdio/config/-/config-7.19.5.tgz#aa8158d648e1ffb28a7e53474d5ce171066e82f7" - integrity sha512-GyG0SSUjw9RyDgEwculgwiWyQ0eEeFAgaKTAa4RHC6ZgHHTgfyxzkWqBmNLzHfiB6GSR2DyZDcDsPT7ZAHkiEg== - dependencies: - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - deepmerge "^4.0.0" - glob "^7.1.2" - -"@wdio/junit-reporter@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/junit-reporter/-/junit-reporter-7.19.7.tgz#1d983928ad9f3aa07c7ebfa448315052dce440ed" - integrity sha512-x3sjHybj+WpRJt7HIvW9ae3F6kaeV+/KUTUMYVMkl/USI623unuFEqEtv5gpL77g+k763nE+XUZCq2mXMMWLyw== - dependencies: - "@types/json-stringify-safe" "^5.0.0" - "@types/validator" "^13.1.3" - "@wdio/reporter" "7.19.7" - "@wdio/types" "7.19.5" - json-stringify-safe "^5.0.1" - junit-report-builder "^3.0.0" - validator "^13.0.0" - -"@wdio/local-runner@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/local-runner/-/local-runner-7.19.7.tgz#519d28d7f5f0204a996c6d441bfc1dd0aac02829" - integrity sha512-DZPaAzUwYZKO1OpBIeGppbY1vP9LJ1N/YT2/FkBy0mvSJ4NbJHj0jbKNUrGpEzOMIklbYPV/htxy3l29pjOkDg== - dependencies: - "@types/stream-buffers" "^3.0.3" - "@wdio/logger" "7.19.0" - "@wdio/repl" "7.19.7" - "@wdio/runner" "7.19.7" - "@wdio/types" "7.19.5" - async-exit-hook "^2.0.1" - split2 "^4.0.0" - stream-buffers "^3.0.2" - -"@wdio/logger@7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@wdio/logger/-/logger-7.19.0.tgz#23697a4b4aaea56c3bd477a0393af2a5c175fc85" - integrity sha512-xR7SN/kGei1QJD1aagzxs3KMuzNxdT/7LYYx+lt6BII49+fqL/SO+5X0FDCZD0Ds93AuQvvz9eGyzrBI2FFXmQ== - dependencies: - chalk "^4.0.0" - loglevel "^1.6.0" - loglevel-plugin-prefix "^0.8.4" - strip-ansi "^6.0.0" - -"@wdio/mocha-framework@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/mocha-framework/-/mocha-framework-7.19.7.tgz#58b0f397108ffc966242a45603c659b993f78c70" - integrity sha512-8QLIiveyfkHk4qulytNPNvSQ8YqANgrDjrKOryFF1EzdrjwdX0jLdfb23lKqDvFfBCWLWQ8DdY090gD6/tJOQg== - dependencies: - "@types/mocha" "^9.0.0" - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - expect-webdriverio "^3.0.0" - mocha "^10.0.0" - -"@wdio/protocols@7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@wdio/protocols/-/protocols-7.19.0.tgz#cd753752c64b9c1dd7ace05398c1d11c46af41ab" - integrity sha512-ji74rQag6v+INSNd0J8eAh2rpH5vOXgeiP5Qr32K6PWU6HzYWuAFH2x4srXsH0JawHCdTK2OQAOYrLmMb44hug== - -"@wdio/repl@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/repl/-/repl-7.19.7.tgz#bfcc1128785bc747e775c6baa280782f7e064eb7" - integrity sha512-6lgzZxSU2yV0YLb4byBASeC42y5rAZk7mOQ41fHTXyC9CfRJubwe47M9KJyAoOrHG2wpwUX92RLTpDrAVDV6Fg== - dependencies: - "@wdio/utils" "7.19.7" - -"@wdio/reporter@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/reporter/-/reporter-7.19.7.tgz#d595a631cedf387d015d55a51afe1549333c367b" - integrity sha512-Dum19gpfru66FnIq78/4HTuW87B7ceLDp6PJXwQM5kXyN7Gb7zhMgp6FZTM0FCYLyi6U/zXZSvpNUYl77caS6g== - dependencies: - "@types/diff" "^5.0.0" - "@types/node" "^17.0.4" - "@types/object-inspect" "^1.8.0" - "@types/supports-color" "^8.1.0" - "@types/tmp" "^0.2.0" - "@wdio/types" "7.19.5" - diff "^5.0.0" - fs-extra "^10.0.0" - object-inspect "^1.10.3" - supports-color "8.1.1" - -"@wdio/runner@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/runner/-/runner-7.19.7.tgz#d0115c52a02c2e4878e5450c3dbd9f214dff1ebc" - integrity sha512-PH4vOMwPnAU+cOTPrkJrU20CDDFGccBf4VV80GAQK9b71pfD+T7MIvUCnL0x/kMgIQV/0rFtM+6y5CDZI0R27g== - dependencies: - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - deepmerge "^4.0.0" - gaze "^1.1.2" - webdriver "7.19.7" - webdriverio "7.19.7" - -"@wdio/selenium-standalone-service@7.19.5": - version "7.19.5" - resolved "https://registry.yarnpkg.com/@wdio/selenium-standalone-service/-/selenium-standalone-service-7.19.5.tgz#9bb383b5d9bbddc9df9de308d86459df7332c579" - integrity sha512-Llq4zeQiWSOrAdtmAMyr3jiGAxC4mTOJUmCsj6EmzIwOx9wjagfO+ehx3OPdpjYlubSiJFJO8eT2lR34GHNARg== - dependencies: - "@types/fs-extra" "^9.0.1" - "@types/node" "^17.0.4" - "@types/selenium-standalone" "^7.0.0" - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - fs-extra "^10.0.0" - selenium-standalone "^8.0.3" - -"@wdio/spec-reporter@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/spec-reporter/-/spec-reporter-7.19.7.tgz#dd0f69f2e178e8c4a74121865a80abe2ccbd8f1e" - integrity sha512-BDBZU2EK/GuC9VxtfqPtoW43FmvKxYDsvcDVDi3F7o+9fkcuGSJiWbw1AX251ZzzVQ7YP9ImTitSpdpUKXkilQ== - dependencies: - "@types/easy-table" "^0.0.33" - "@wdio/reporter" "7.19.7" - "@wdio/types" "7.19.5" - chalk "^4.0.0" - easy-table "^1.1.1" - pretty-ms "^7.0.0" - -"@wdio/sync@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/sync/-/sync-7.19.7.tgz#19f685f1b052a154e7121fe6c19e2f227dde3457" - integrity sha512-iKQSeO3qWJKBB5Lt7Ir+AE1uZJgG0JVXnGfzTcvQAfn0i3ZdcMe4kQJrHIu8q8GxW8LeZh5MmSq2XP/VXjd1DQ== - dependencies: - "@types/fibers" "^3.1.0" - "@types/puppeteer" "^5.4.0" - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - fibers "^5.0.0" - webdriverio "7.19.7" - -"@wdio/types@7.19.5": - version "7.19.5" - resolved "https://registry.yarnpkg.com/@wdio/types/-/types-7.19.5.tgz#e05790f61dfab54ee6683ac799cb5f96615d1d0f" - integrity sha512-S1lC0pmtEO7NVH/2nM1c7NHbkgxLZH3VVG/z6ym3Bbxdtcqi2LMsEvvawMAU/fmhyiIkMsGZCO8vxG9cRw4z4A== - dependencies: - "@types/node" "^17.0.4" - got "^11.8.1" - -"@wdio/utils@7.19.7": - version "7.19.7" - resolved "https://registry.yarnpkg.com/@wdio/utils/-/utils-7.19.7.tgz#b1dd86a12a08ba4f445a70c9859e30cff6eb522f" - integrity sha512-i/fBnEmEGDQ8Sr8H8p9UZ0kUPjSQhoJE2EullSyX+YgyZDtO3JO0M0jiRpbCFr0M+7fi17g+YOzQWmCSRGhPJA== - dependencies: - "@wdio/logger" "7.19.0" - "@wdio/types" "7.19.5" - p-iteration "^1.1.8" - -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== - dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== - -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== - -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== - -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== - dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== - -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@xtuc/long" "4.2.2" - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - -abab@^2.0.3, abab@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" - integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-globals@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== - dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" - -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn-node@^1.6.1: - version "1.8.2" - resolved "https://registry.yarnpkg.com/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" - integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== - dependencies: - acorn "^7.0.0" - acorn-walk "^7.0.0" - xtend "^4.0.2" - -acorn-walk@^7.0.0, acorn-walk@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn@^7.0.0, acorn@^7.1.1: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -acorn@^8.2.4, acorn@^8.5.0: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== - -acorn@^8.4.1, acorn@^8.7.1: - version "8.7.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" - integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== - -address@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" - integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== - -address@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" - integrity sha512-tNEZYz5G/zYunxFm7sfhAxkXEuLj3K6BKwv6ZURlsF6yiUQ65z0Q2wZW9L5cPUl9ocofGvXOdFYbFHp0+6MOig== - -adjust-sourcemap-loader@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" - integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== - dependencies: - loader-utils "^2.0.0" - regex-parser "^2.2.11" - -agent-base@6: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -ajv-formats@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== - dependencies: - ajv "^8.0.0" - -ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: - version "3.5.2" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" - integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== - dependencies: - fast-deep-equal "^3.1.3" - -ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: - version "8.11.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" - integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ansi-colors@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" - integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-html-community@^0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" - integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -anymatch@^3.0.3, anymatch@~3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -archiver-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-2.1.0.tgz#e8a460e94b693c3e3da182a098ca6285ba9249e2" - integrity sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw== - dependencies: - glob "^7.1.4" - graceful-fs "^4.2.0" - lazystream "^1.0.0" - lodash.defaults "^4.2.0" - lodash.difference "^4.5.0" - lodash.flatten "^4.4.0" - lodash.isplainobject "^4.0.6" - lodash.union "^4.6.0" - normalize-path "^3.0.0" - readable-stream "^2.0.0" - -archiver@^5.0.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.1.tgz#21e92811d6f09ecfce649fbefefe8c79e57cbbb6" - integrity sha512-8KyabkmbYrH+9ibcTScQ1xCJC/CGcugdVIwB+53f5sZziXgwUh3iXlAlANMxcZyDEfTHMe6+Z5FofV8nopXP7w== - dependencies: - archiver-utils "^2.1.0" - async "^3.2.3" - buffer-crc32 "^0.2.1" - readable-stream "^3.6.0" - readdir-glob "^1.0.0" - tar-stream "^2.2.0" - zip-stream "^4.1.0" - -arg@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.1.tgz#eb0c9a8f77786cad2af8ff2b862899842d7b6adb" - integrity sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA== - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -aria-query@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" - integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== - dependencies: - "@babel/runtime" "^7.10.2" - "@babel/runtime-corejs3" "^7.10.2" - -aria-query@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.0.0.tgz#210c21aaf469613ee8c9a62c7f86525e058db52c" - integrity sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg== - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= - -array-flatten@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== - -array-includes@^3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.4.tgz#f5b493162c760f3539631f005ba2bb46acb45ba9" - integrity sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - get-intrinsic "^1.1.1" - is-string "^1.0.7" - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -array.prototype.flat@^1.2.5: - version "1.3.0" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" - integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -array.prototype.flatmap@^1.2.5: - version "1.3.0" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" - integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -asap@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" - integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= - -ast-types-flow@^0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" - integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= - -async-exit-hook@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/async-exit-hook/-/async-exit-hook-2.0.1.tgz#8bd8b024b0ec9b1c01cccb9af9db29bd717dfaf3" - integrity sha512-NW2cX8m1Q7KPA7a5M2ULQeZ2wR5qI5PAbw5L0UOMxdioVk9PMZ0h1TmyZEkPYrCvYjDlFICusOu1dlEKAAeXBw== - -async@^3.2.2, async@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.3.tgz#ac53dafd3f4720ee9e8a160628f18ea91df196c9" - integrity sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g== - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= - -at-least-node@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - -autoprefixer@^10.4.6: - version "10.4.7" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.7.tgz#1db8d195f41a52ca5069b7593be167618edbbedf" - integrity sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA== - dependencies: - browserslist "^4.20.3" - caniuse-lite "^1.0.30001335" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -axe-core@^4.3.5: - version "4.4.1" - resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.4.1.tgz#7dbdc25989298f9ad006645cd396782443757413" - integrity sha512-gd1kmb21kwNuWr6BQz8fv6GNECPBnUasepcoLbekws23NVBLODdsClRZ+bQ8+9Uomf3Sm3+Vwn0oYG9NvwnJCw== - -axobject-query@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" - integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== - -babel-jest@^27.4.2, babel-jest@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" - integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== - dependencies: - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-loader@^8.2.3: - version "8.2.5" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" - integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== - dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" - integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.0.0" - "@types/babel__traverse" "^7.0.6" - -babel-plugin-macros@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" - integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== - dependencies: - "@babel/runtime" "^7.12.5" - cosmiconfig "^7.0.0" - resolve "^1.19.0" - -babel-plugin-named-asset-import@^0.3.8: - version "0.3.8" - resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" - integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== - -babel-plugin-polyfill-corejs2@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz#440f1b70ccfaabc6b676d196239b138f8a2cfba5" - integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w== - dependencies: - "@babel/compat-data" "^7.13.11" - "@babel/helper-define-polyfill-provider" "^0.3.1" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.5.0: - version "0.5.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz#aabe4b2fa04a6e038b688c5e55d44e78cd3a5f72" - integrity sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.1" - core-js-compat "^3.21.0" - -babel-plugin-polyfill-regenerator@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz#2c0678ea47c75c8cc2fbb1852278d8fb68233990" - integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.1" - -"babel-plugin-styled-components@>= 1.12.0": - version "2.0.7" - resolved "https://registry.yarnpkg.com/babel-plugin-styled-components/-/babel-plugin-styled-components-2.0.7.tgz#c81ef34b713f9da2b7d3f5550df0d1e19e798086" - integrity sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.16.0" - "@babel/helper-module-imports" "^7.16.0" - babel-plugin-syntax-jsx "^6.18.0" - lodash "^4.17.11" - picomatch "^2.3.0" - -babel-plugin-syntax-jsx@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" - integrity sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY= - -babel-plugin-transform-react-remove-prop-types@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" - integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" - integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== - dependencies: - babel-plugin-jest-hoist "^27.5.1" - babel-preset-current-node-syntax "^1.0.0" - -babel-preset-react-app@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" - integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== - dependencies: - "@babel/core" "^7.16.0" - "@babel/plugin-proposal-class-properties" "^7.16.0" - "@babel/plugin-proposal-decorators" "^7.16.4" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" - "@babel/plugin-proposal-numeric-separator" "^7.16.0" - "@babel/plugin-proposal-optional-chaining" "^7.16.0" - "@babel/plugin-proposal-private-methods" "^7.16.0" - "@babel/plugin-transform-flow-strip-types" "^7.16.0" - "@babel/plugin-transform-react-display-name" "^7.16.0" - "@babel/plugin-transform-runtime" "^7.16.4" - "@babel/preset-env" "^7.16.4" - "@babel/preset-react" "^7.16.0" - "@babel/preset-typescript" "^7.16.0" - "@babel/runtime" "^7.16.3" - babel-plugin-macros "^3.1.0" - babel-plugin-transform-react-remove-prop-types "^0.4.24" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -batch@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= - -bfj@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" - integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== - dependencies: - bluebird "^3.5.5" - check-types "^11.1.1" - hoopy "^0.1.4" - tryer "^1.0.1" - -big-integer@^1.6.16: - version "1.6.51" - resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" - integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bl@^4.0.3, bl@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - -bluebird@^3.5.5: - version "3.7.2" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -body-parser@1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -bonjour-service@^1.0.11: - version "1.0.12" - resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.0.12.tgz#28fbd4683f5f2e36feedb833e24ba661cac960c3" - integrity sha512-pMmguXYCu63Ug37DluMKEHdxc+aaIf/ay4YbF8Gxtba+9d3u+rmEWy61VK3Z3hp8Rskok3BunHYnG0dUHAsblw== - dependencies: - array-flatten "^2.1.2" - dns-equal "^1.0.0" - fast-deep-equal "^3.1.3" - multicast-dns "^7.2.4" - -boolbase@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -broadcast-channel@^3.4.1: - version "3.7.0" - resolved "https://registry.yarnpkg.com/broadcast-channel/-/broadcast-channel-3.7.0.tgz#2dfa5c7b4289547ac3f6705f9c00af8723889937" - integrity sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg== - dependencies: - "@babel/runtime" "^7.7.2" - detect-node "^2.1.0" - js-sha3 "0.8.0" - microseconds "0.2.0" - nano-time "1.0.0" - oblivious-set "1.0.0" - rimraf "3.0.2" - unload "2.2.0" - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browser-stdout@1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" - integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== - -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1, browserslist@^4.20.2: - version "4.20.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.2.tgz#567b41508757ecd904dab4d1c646c612cd3d4f88" - integrity sha512-CQOBCqp/9pDvDbx3xfMi+86pr4KXIf2FDkTTdeuYw8OxS9t898LA1Khq57gtufFILXpfgsSx5woNgsBgvGjpsA== - dependencies: - caniuse-lite "^1.0.30001317" - electron-to-chromium "^1.4.84" - escalade "^3.1.1" - node-releases "^2.0.2" - picocolors "^1.0.0" - -browserslist@^4.20.3: - version "4.20.3" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.3.tgz#eb7572f49ec430e054f56d52ff0ebe9be915f8bf" - integrity sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg== - dependencies: - caniuse-lite "^1.0.30001332" - electron-to-chromium "^1.4.118" - escalade "^3.1.1" - node-releases "^2.0.3" - picocolors "^1.0.0" - -bser@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" - integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI= - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -buffer@^5.2.1, buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -builtin-modules@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" - integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== - -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -cac@^3.0.3: - version "3.0.4" - resolved "https://registry.yarnpkg.com/cac/-/cac-3.0.4.tgz#6d24ceec372efe5c9b798808bc7f49b47242a4ef" - integrity sha1-bSTO7Dcu/lybeYgIvH9JtHJCpO8= - dependencies: - camelcase-keys "^3.0.0" - chalk "^1.1.3" - indent-string "^3.0.0" - minimist "^1.2.0" - read-pkg-up "^1.0.1" - suffix "^0.1.0" - text-table "^0.2.0" - -cacheable-lookup@^5.0.3: - version "5.0.4" - resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" - integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== - -cacheable-request@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.2.tgz#ea0d0b889364a25854757301ca12b2da77f91d27" - integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^4.0.0" - lowercase-keys "^2.0.0" - normalize-url "^6.0.1" - responselike "^2.0.0" - -call-bind@^1.0.0, call-bind@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camel-case@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" - integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== - dependencies: - pascal-case "^3.1.2" - tslib "^2.0.3" - -camelcase-css@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" - integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== - -camelcase-keys@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-3.0.0.tgz#fc0c6c360363f7377e3793b9a16bccf1070c1ca4" - integrity sha1-/AxsNgNj9zd+N5O5oWvM8QcMHKQ= - dependencies: - camelcase "^3.0.0" - map-obj "^1.0.0" - -camelcase@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" - integrity sha1-MvxLn82vhF/N9+c7uXysImHwqwo= - -camelcase@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.0.0, camelcase@^6.2.0, camelcase@^6.2.1: - version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -camelize@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.0.tgz#164a5483e630fa4321e5af07020e531831b2609b" - integrity sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs= - -caniuse-api@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" - integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== - dependencies: - browserslist "^4.0.0" - caniuse-lite "^1.0.0" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001317: - version "1.0.30001328" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001328.tgz#0ed7a2ca65ec45872c613630201644237ba1e329" - integrity sha512-Ue55jHkR/s4r00FLNiX+hGMMuwml/QGqqzVeMQ5thUewznU2EdULFvI3JR7JJid6OrjJNfFvHY2G2dIjmRaDDQ== - -caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001335: - version "1.0.30001340" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001340.tgz#029a2f8bfc025d4820fafbfaa6259fd7778340c7" - integrity sha512-jUNz+a9blQTQVu4uFcn17uAD8IDizPzQkIKh3LCJfg9BkyIqExYYdyc/ZSlWUSKb8iYiXxKsxbv4zYSvkqjrxw== - -case-sensitive-paths-webpack-plugin@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" - integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== - -chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.0, chalk@^2.4.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" - integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -char-regex@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -char-regex@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" - integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== - -charcodes@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/charcodes/-/charcodes-0.2.0.tgz#5208d327e6cc05f99eb80ffc814707572d1f14e4" - integrity sha512-Y4kiDb+AM4Ecy58YkuZrrSRJBDQdQ2L+NyS1vHHFtNtUjgutcZfx3yp1dAONI/oPaPmyGfCLx5CxL+zauIMyKQ== - -chardet@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" - integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== - -check-types@^11.1.1: - version "11.1.2" - resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" - integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== - -chokidar@3.5.3, "chokidar@>=3.0.0 <4.0.0", chokidar@^3.0.0, chokidar@^3.4.2, chokidar@^3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -chrome-launcher@^0.15.0: - version "0.15.0" - resolved "https://registry.yarnpkg.com/chrome-launcher/-/chrome-launcher-0.15.0.tgz#5144a57aba0cf2f4cbe61dccefdde024fb3ca7fc" - integrity sha512-ZQqX5kb9H0+jy1OqLnWampfocrtSZaGl7Ny3F9GRha85o4odbL8x55paUzh51UC7cEmZ5obp3H2Mm70uC2PpRA== - dependencies: - "@types/node" "*" - escape-string-regexp "^4.0.0" - is-wsl "^2.2.0" - lighthouse-logger "^1.0.0" - -chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== - -ci-info@^3.2.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.1.tgz#58331f6f472a25fe3a50a351ae3052936c2c7f32" - integrity sha512-SXgeMX9VwDe7iFFaEWkA5AstuER9YKqy4EhHqr4DVqkwmD9rpVimkMKWHdjn30Ja45txyjhSn63lVX69eVCckg== - -cjs-module-lexer@^1.0.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" - integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== - -clean-css@^5.2.2: - version "5.3.0" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.0.tgz#ad3d8238d5f3549e83d5f87205189494bc7cbb59" - integrity sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ== - dependencies: - source-map "~0.6.0" - -cli-cursor@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" - integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== - dependencies: - restore-cursor "^3.1.0" - -cli-spinners@^2.1.0, cli-spinners@^2.5.0: - version "2.6.1" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" - integrity sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g== - -cli-width@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" - integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== - -cliui@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" - -clone@^1.0.2: - version "1.0.4" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" - integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= - -clsx@^1.0.4, clsx@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188" - integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA== - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= - -coa@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" - integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== - dependencies: - "@types/q" "^1.5.1" - chalk "^2.4.1" - q "^1.1.2" - -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= - -color-name@^1.1.4, color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colord@^2.9.1: - version "2.9.2" - resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.2.tgz#25e2bacbbaa65991422c07ea209e2089428effb1" - integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ== - -colorette@^2.0.10: - version "2.0.16" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.16.tgz#713b9af84fdb000139f04546bd4a93f62a5085da" - integrity sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@2, commander@^2.20.0, commander@^2.20.3: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -commander@^8.3.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - -commander@^9.0.0: - version "9.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-9.2.0.tgz#6e21014b2ed90d8b7c9647230d8b7a94a4a419a9" - integrity sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w== - -common-path-prefix@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" - integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== - -common-tags@^1.8.0: - version "1.8.2" - resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" - integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== - -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= - -component-emitter@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -compress-brotli@^1.3.6: - version "1.3.6" - resolved "https://registry.yarnpkg.com/compress-brotli/-/compress-brotli-1.3.6.tgz#64bd6f21f4f3e9841dbac392f4c29218caf5e9d9" - integrity sha512-au99/GqZtUtiCBliqLFbWlhnCxn+XSYjwZ77q6mKN4La4qOXDoLVPZ50iXr0WmAyMxl8yqoq3Yq4OeQNPPkyeQ== - dependencies: - "@types/json-buffer" "~3.0.0" - json-buffer "~3.0.1" - -compress-commons@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.1.tgz#df2a09a7ed17447642bad10a85cc9a19e5c42a7d" - integrity sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ== - dependencies: - buffer-crc32 "^0.2.13" - crc32-stream "^4.0.2" - normalize-path "^3.0.0" - readable-stream "^3.6.0" - -compressible@~2.0.16: - version "2.0.18" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -confusing-browser-globals@^1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" - integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== - -connect-history-api-fallback@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" - integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== - -content-disposition@0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= - -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - -core-js-compat@^3.20.2, core-js-compat@^3.21.0: - version "3.21.1" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.21.1.tgz#cac369f67c8d134ff8f9bd1623e3bc2c42068c82" - integrity sha512-gbgX5AUvMb8gwxC7FLVWYT7Kkgu/y7+h/h1X43yJkNqhlK2fuYyQimqvKGNZFAY6CKii/GFKJ2cp/1/42TN36g== - dependencies: - browserslist "^4.19.1" - semver "7.0.0" - -core-js-compat@^3.22.1: - version "3.22.5" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.22.5.tgz#7fffa1d20cb18405bd22756ca1353c6f1a0e8614" - integrity sha512-rEF75n3QtInrYICvJjrAgV03HwKiYvtKHdPtaba1KucG+cNZ4NJnH9isqt979e67KZlhpbCOTwnsvnIr+CVeOg== - dependencies: - browserslist "^4.20.3" - semver "7.0.0" - -core-js-pure@^3.20.2: - version "3.21.1" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.21.1.tgz#8c4d1e78839f5f46208de7230cebfb72bc3bdb51" - integrity sha512-12VZfFIu+wyVbBebyHmRTuEE/tZrB4tJToWcwAMcsp3h4+sHR+fMJWbKpYiCRWlhFBq+KNyO8rIV9rTkeVmznQ== - -core-js-pure@^3.8.1: - version "3.22.5" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.22.5.tgz#bdee0ed2f9b78f2862cda4338a07b13a49b6c9a9" - integrity sha512-8xo9R00iYD7TcV7OrC98GwxiUEAabVWO3dix+uyWjnYrx9fyASLlIX+f/3p5dW5qByaP2bcZ8X/T47s55et/tA== - -core-js@^3.19.2: - version "3.22.5" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.22.5.tgz#a5f5a58e663d5c0ebb4e680cd7be37536fb2a9cf" - integrity sha512-VP/xYuvJ0MJWRAobcmQ8F2H6Bsn+s7zqAAjFaHGBMc5AQm7zaelhD1LGduFn2EehEcQcU+br6t+fwbpQ5d1ZWA== - -core-util-is@~1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cosmiconfig@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" - integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.1.0" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.7.2" - -cosmiconfig@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" - integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - -crc-32@^1.2.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.2.tgz#3cad35a934b8bf71f25ca524b6da51fb7eace2ff" - integrity sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ== - -crc32-stream@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-4.0.2.tgz#c922ad22b38395abe9d3870f02fa8134ed709007" - integrity sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w== - dependencies: - crc-32 "^1.2.0" - readable-stream "^3.4.0" - -cronstrue@1.92.0: - version "1.92.0" - resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.92.0.tgz#3547fd0950cca488f3edd321d14376dcd1db59f0" - integrity sha512-AKptYmOJVXlxkzmHH6HTYJyuOr6t8FHHl4GJ+19yVLN7fMXRyNg3g67CzqdVYSWxaR2A4iOqvfxM/dsuyJpH9A== - -cronstrue@^1.72.0: - version "1.125.0" - resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.125.0.tgz#8030816d033d00caade9b2a9f9b71e69175bcf42" - integrity sha512-qkC5mVbVGuuyBVXmam5anaRtbLcgfBUKajoyZqCdf/XBdgF43PsLSEm8eEi2dsI3YbqDPbLSH2mWNzM1dVqHgQ== - -cross-fetch@3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" - integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== - dependencies: - node-fetch "2.6.7" - -cross-spawn@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41" - integrity sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE= - dependencies: - lru-cache "^4.0.1" - which "^1.2.9" - -cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - -css-blank-pseudo@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" - integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== - dependencies: - postcss-selector-parser "^6.0.9" - -css-color-keywords@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/css-color-keywords/-/css-color-keywords-1.0.0.tgz#fea2616dc676b2962686b3af8dbdbe180b244e05" - integrity sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU= - -css-declaration-sorter@^6.2.2: - version "6.2.2" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.2.2.tgz#bfd2f6f50002d6a3ae779a87d3a0c5d5b10e0f02" - integrity sha512-Ufadglr88ZLsrvS11gjeu/40Lw74D9Am/Jpr3LlYm5Q4ZP5KdlUhG+6u2EjyXeZcxmZ2h1ebCKngDjolpeLHpg== - -css-has-pseudo@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" - integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== - dependencies: - postcss-selector-parser "^6.0.9" - -css-loader@^6.5.1: - version "6.7.1" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" - integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== - dependencies: - icss-utils "^5.1.0" - postcss "^8.4.7" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" - postcss-modules-scope "^3.0.0" - postcss-modules-values "^4.0.0" - postcss-value-parser "^4.2.0" - semver "^7.3.5" - -css-minimizer-webpack-plugin@^3.2.0: - version "3.4.1" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" - integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== - dependencies: - cssnano "^5.0.6" - jest-worker "^27.0.2" - postcss "^8.3.5" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - -css-prefers-color-scheme@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" - integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== - -css-select-base-adapter@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" - integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== - -css-select@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" - integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== - dependencies: - boolbase "^1.0.0" - css-what "^3.2.1" - domutils "^1.7.0" - nth-check "^1.0.2" - -css-select@^4.1.3: - version "4.3.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" - integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== - dependencies: - boolbase "^1.0.0" - css-what "^6.0.1" - domhandler "^4.3.1" - domutils "^2.8.0" - nth-check "^2.0.1" - -css-shorthand-properties@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/css-shorthand-properties/-/css-shorthand-properties-1.1.1.tgz#1c808e63553c283f289f2dd56fcee8f3337bd935" - integrity sha512-Md+Juc7M3uOdbAFwOYlTrccIZ7oCFuzrhKYQjdeUEW/sE1hv17Jp/Bws+ReOPpGVBTYCBoYo+G17V5Qo8QQ75A== - -css-to-react-native@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/css-to-react-native/-/css-to-react-native-3.0.0.tgz#62dbe678072a824a689bcfee011fc96e02a7d756" - integrity sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ== - dependencies: - camelize "^1.0.0" - css-color-keywords "^1.0.0" - postcss-value-parser "^4.0.2" - -css-tree@1.0.0-alpha.37: - version "1.0.0-alpha.37" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" - integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== - dependencies: - mdn-data "2.0.4" - source-map "^0.6.1" - -css-tree@^1.1.2, css-tree@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== - dependencies: - mdn-data "2.0.14" - source-map "^0.6.1" - -css-value@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/css-value/-/css-value-0.0.1.tgz#5efd6c2eea5ea1fd6b6ac57ec0427b18452424ea" - integrity sha1-Xv1sLupeof1rasV+wEJ7GEUkJOo= - -css-vendor@^2.0.8: - version "2.0.8" - resolved "https://registry.yarnpkg.com/css-vendor/-/css-vendor-2.0.8.tgz#e47f91d3bd3117d49180a3c935e62e3d9f7f449d" - integrity sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ== - dependencies: - "@babel/runtime" "^7.8.3" - is-in-browser "^1.0.2" - -css-what@^3.2.1: - version "3.4.2" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" - integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== - -css-what@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" - integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== - -css.escape@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" - integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= - -css@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" - integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== - dependencies: - inherits "^2.0.4" - source-map "^0.6.1" - source-map-resolve "^0.6.0" - -cssdb@^6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-6.6.1.tgz#2637fdc57eab452849488de7e8d961ec06f2fe8f" - integrity sha512-0/nZEYfp8SFEzJkMud8NxZJsGfD7RHDJti6GRBLZptIwAzco6RTx1KgwFl4mGWsYS0ZNbCrsY9QryhQ4ldF3Mg== - -cssesc@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -cssfilter@0.0.10: - version "0.0.10" - resolved "https://registry.yarnpkg.com/cssfilter/-/cssfilter-0.0.10.tgz#c6d2672632a2e5c83e013e6864a42ce8defd20ae" - integrity sha1-xtJnJjKi5cg+AT5oZKQs6N79IK4= - -cssnano-preset-default@^5.2.7: - version "5.2.7" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.7.tgz#791e3603fb8f1b46717ac53b47e3c418e950f5f3" - integrity sha512-JiKP38ymZQK+zVKevphPzNSGHSlTI+AOwlasoSRtSVMUU285O7/6uZyd5NbW92ZHp41m0sSHe6JoZosakj63uA== - dependencies: - css-declaration-sorter "^6.2.2" - cssnano-utils "^3.1.0" - postcss-calc "^8.2.3" - postcss-colormin "^5.3.0" - postcss-convert-values "^5.1.0" - postcss-discard-comments "^5.1.1" - postcss-discard-duplicates "^5.1.0" - postcss-discard-empty "^5.1.1" - postcss-discard-overridden "^5.1.0" - postcss-merge-longhand "^5.1.4" - postcss-merge-rules "^5.1.1" - postcss-minify-font-values "^5.1.0" - postcss-minify-gradients "^5.1.1" - postcss-minify-params "^5.1.2" - postcss-minify-selectors "^5.2.0" - postcss-normalize-charset "^5.1.0" - postcss-normalize-display-values "^5.1.0" - postcss-normalize-positions "^5.1.0" - postcss-normalize-repeat-style "^5.1.0" - postcss-normalize-string "^5.1.0" - postcss-normalize-timing-functions "^5.1.0" - postcss-normalize-unicode "^5.1.0" - postcss-normalize-url "^5.1.0" - postcss-normalize-whitespace "^5.1.1" - postcss-ordered-values "^5.1.1" - postcss-reduce-initial "^5.1.0" - postcss-reduce-transforms "^5.1.0" - postcss-svgo "^5.1.0" - postcss-unique-selectors "^5.1.1" - -cssnano-utils@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" - integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== - -cssnano@^5.0.6: - version "5.1.7" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.7.tgz#99858bef6c76c9240f0cdc9239570bc7db8368be" - integrity sha512-pVsUV6LcTXif7lvKKW9ZrmX+rGRzxkEdJuVJcp5ftUjWITgwam5LMZOgaTvUrWPkcORBey6he7JKb4XAJvrpKg== - dependencies: - cssnano-preset-default "^5.2.7" - lilconfig "^2.0.3" - yaml "^1.10.2" - -csso@^4.0.2, csso@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== - dependencies: - css-tree "^1.1.2" - -cssom@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -csstype@^2.5.2: - version "2.6.20" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.20.tgz#9229c65ea0b260cf4d3d997cb06288e36a8d6dda" - integrity sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA== - -csstype@^3.0.2: - version "3.0.11" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.11.tgz#d66700c5eacfac1940deb4e3ee5642792d85cd33" - integrity sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw== - -d3-array@1, d3-array@^1.1.1, d3-array@^1.2.0: - version "1.2.4" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.4.tgz#635ce4d5eea759f6f605863dbcfc30edc737f71f" - integrity sha512-KHW6M86R+FUPYGb3R5XiYjXPq7VzwxZ22buHhAEVG5ztoEcZZMLov530mmccaqA1GghZArjQV46fuc8kUqhhHw== - -d3-array@2, d3-array@^2.3.0, d3-array@^2.5.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81" - integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ== - dependencies: - internmap "^1.0.0" - -d3-axis@1: - version "1.0.12" - resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-1.0.12.tgz#cdf20ba210cfbb43795af33756886fb3638daac9" - integrity sha512-ejINPfPSNdGFKEOAtnBtdkpr24c4d4jsei6Lg98mxf424ivoDP2956/5HDpIAtmHo85lqT4pruy+zEgvRUBqaQ== - -d3-axis@2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-2.1.0.tgz#978db534092711117d032fad5d733d206307f6a0" - integrity sha512-z/G2TQMyuf0X3qP+Mh+2PimoJD41VOCjViJzT0BHeL/+JQAofkiWZbWxlwFGb1N8EN+Cl/CW+MUKbVzr1689Cw== - -d3-brush@1: - version "1.1.6" - resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-1.1.6.tgz#b0a22c7372cabec128bdddf9bddc058592f89e9b" - integrity sha512-7RW+w7HfMCPyZLifTz/UnJmI5kdkXtpCbombUSs8xniAyo0vIbrDzDwUJB6eJOgl9u5DQOt2TQlYumxzD1SvYA== - dependencies: - d3-dispatch "1" - d3-drag "1" - d3-interpolate "1" - d3-selection "1" - d3-transition "1" - -d3-brush@2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-2.1.0.tgz#adadfbb104e8937af142e9a6e2028326f0471065" - integrity sha512-cHLLAFatBATyIKqZOkk/mDHUbzne2B3ZwxkzMHvFTCZCmLaXDpZRihQSn8UNXTkGD/3lb/W2sQz0etAftmHMJQ== - dependencies: - d3-dispatch "1 - 2" - d3-drag "2" - d3-interpolate "1 - 2" - d3-selection "2" - d3-transition "2" - -d3-chord@1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-1.0.6.tgz#309157e3f2db2c752f0280fedd35f2067ccbb15f" - integrity sha512-JXA2Dro1Fxw9rJe33Uv+Ckr5IrAa74TlfDEhE/jfLOaXegMQFQTAgAw9WnZL8+HxVBRXaRGCkrNU7pJeylRIuA== - dependencies: - d3-array "1" - d3-path "1" - -d3-chord@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-2.0.0.tgz#32491b5665391180560f738e5c1ccd1e3c47ebae" - integrity sha512-D5PZb7EDsRNdGU4SsjQyKhja8Zgu+SHZfUSO5Ls8Wsn+jsAKUUGkcshLxMg9HDFxG3KqavGWaWkJ8EpU8ojuig== - dependencies: - d3-path "1 - 2" - -d3-collection@1: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-collection/-/d3-collection-1.0.7.tgz#349bd2aa9977db071091c13144d5e4f16b5b310e" - integrity sha512-ii0/r5f4sjKNTfh84Di+DpztYwqKhEyUlKoPrzUFfeSkWxjW49xU2QzO9qrPrNkpdI0XJkfzvmTu8V2Zylln6A== - -d3-color@1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-1.4.1.tgz#c52002bf8846ada4424d55d97982fef26eb3bc8a" - integrity sha512-p2sTHSLCJI2QKunbGb7ocOh7DgTAn8IrLx21QRc/BSnodXM4sv6aLQlnfpvehFMLZEfBc6g9pH9SWQccFYfJ9Q== - -"d3-color@1 - 2", d3-color@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-2.0.0.tgz#8d625cab42ed9b8f601a1760a389f7ea9189d62e" - integrity sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ== - -d3-contour@1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-1.3.2.tgz#652aacd500d2264cb3423cee10db69f6f59bead3" - integrity sha512-hoPp4K/rJCu0ladiH6zmJUEz6+u3lgR+GSm/QdM2BBvDraU39Vr7YdDCicJcxP1z8i9B/2dJLgDC1NcvlF8WCg== - dependencies: - d3-array "^1.1.1" - -d3-contour@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-2.0.0.tgz#80ee834988563e3bea9d99ddde72c0f8c089ea40" - integrity sha512-9unAtvIaNk06UwqBmvsdHX7CZ+NPDZnn8TtNH1myW93pWJkhsV25JcgnYAu0Ck5Veb1DHiCv++Ic5uvJ+h50JA== - dependencies: - d3-array "2" - -d3-delaunay@5: - version "5.3.0" - resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-5.3.0.tgz#b47f05c38f854a4e7b3cea80e0bb12e57398772d" - integrity sha512-amALSrOllWVLaHTnDLHwMIiz0d1bBu9gZXd1FiLfXf8sHcX9jrcj81TVZOqD4UX7MgBZZ07c8GxzEgBpJqc74w== - dependencies: - delaunator "4" - -d3-dispatch@1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-1.0.6.tgz#00d37bcee4dd8cd97729dd893a0ac29caaba5d58" - integrity sha512-fVjoElzjhCEy+Hbn8KygnmMS7Or0a9sI2UzGwoB7cCtvI1XpVN9GpoYlnb3xt2YV66oXYb1fLJ8GMvP4hdU1RA== - -"d3-dispatch@1 - 2", d3-dispatch@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-2.0.0.tgz#8a18e16f76dd3fcaef42163c97b926aa9b55e7cf" - integrity sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA== - -d3-drag@1: - version "1.2.5" - resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-1.2.5.tgz#2537f451acd39d31406677b7dc77c82f7d988f70" - integrity sha512-rD1ohlkKQwMZYkQlYVCrSFxsWPzI97+W+PaEIBNTMxRuxz9RF0Hi5nJWHGVJ3Om9d2fRTe1yOBINJyy/ahV95w== - dependencies: - d3-dispatch "1" - d3-selection "1" - -d3-drag@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-2.0.0.tgz#9eaf046ce9ed1c25c88661911c1d5a4d8eb7ea6d" - integrity sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w== - dependencies: - d3-dispatch "1 - 2" - d3-selection "2" - -d3-dsv@1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-1.2.0.tgz#9d5f75c3a5f8abd611f74d3f5847b0d4338b885c" - integrity sha512-9yVlqvZcSOMhCYzniHE7EVUws7Fa1zgw+/EAV2BxJoG3ME19V6BQFBwI855XQDsxyOuG7NibqRMTtiF/Qup46g== - dependencies: - commander "2" - iconv-lite "0.4" - rw "1" - -"d3-dsv@1 - 2", d3-dsv@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-2.0.0.tgz#b37b194b6df42da513a120d913ad1be22b5fe7c5" - integrity sha512-E+Pn8UJYx9mViuIUkoc93gJGGYut6mSDKy2+XaPwccwkRGlR+LO97L2VCCRjQivTwLHkSnAJG7yo00BWY6QM+w== - dependencies: - commander "2" - iconv-lite "0.4" - rw "1" - -d3-ease@1: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-1.0.7.tgz#9a834890ef8b8ae8c558b2fe55bd57f5993b85e2" - integrity sha512-lx14ZPYkhNx0s/2HX5sLFUI3mbasHjSSpwO/KaaNACweVwxUruKyWVcb293wMv1RqTPZyZ8kSZ2NogUZNcLOFQ== - -"d3-ease@1 - 2", d3-ease@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-2.0.0.tgz#fd1762bfca00dae4bacea504b1d628ff290ac563" - integrity sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ== - -d3-fetch@1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-1.2.0.tgz#15ce2ecfc41b092b1db50abd2c552c2316cf7fc7" - integrity sha512-yC78NBVcd2zFAyR/HnUiBS7Lf6inSCoWcSxFfw8FYL7ydiqe80SazNwoffcqOfs95XaLo7yebsmQqDKSsXUtvA== - dependencies: - d3-dsv "1" - -d3-fetch@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-2.0.0.tgz#ecd7ef2128d9847a3b41b548fec80918d645c064" - integrity sha512-TkYv/hjXgCryBeNKiclrwqZH7Nb+GaOwo3Neg24ZVWA3MKB+Rd+BY84Nh6tmNEMcjUik1CSUWjXYndmeO6F7sw== - dependencies: - d3-dsv "1 - 2" - -d3-force@1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-1.2.1.tgz#fd29a5d1ff181c9e7f0669e4bd72bdb0e914ec0b" - integrity sha512-HHvehyaiUlVo5CxBJ0yF/xny4xoaxFxDnBXNvNcfW9adORGZfyNF1dj6DGLKyk4Yh3brP/1h3rnDzdIAwL08zg== - dependencies: - d3-collection "1" - d3-dispatch "1" - d3-quadtree "1" - d3-timer "1" - -d3-force@2: - version "2.1.1" - resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-2.1.1.tgz#f20ccbf1e6c9e80add1926f09b51f686a8bc0937" - integrity sha512-nAuHEzBqMvpFVMf9OX75d00OxvOXdxY+xECIXjW6Gv8BRrXu6gAWbv/9XKrvfJ5i5DCokDW7RYE50LRoK092ew== - dependencies: - d3-dispatch "1 - 2" - d3-quadtree "1 - 2" - d3-timer "1 - 2" - -d3-format@1: - version "1.4.5" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.4.5.tgz#374f2ba1320e3717eb74a9356c67daee17a7edb4" - integrity sha512-J0piedu6Z8iB6TbIGfZgDzfXxUFN3qQRMofy2oPdXzQibYGqPB/9iMcxr/TGalU+2RsyDO+U4f33id8tbnSRMQ== - -"d3-format@1 - 2", d3-format@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-2.0.0.tgz#a10bcc0f986c372b729ba447382413aabf5b0767" - integrity sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA== - -d3-geo@1: - version "1.12.1" - resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-1.12.1.tgz#7fc2ab7414b72e59fbcbd603e80d9adc029b035f" - integrity sha512-XG4d1c/UJSEX9NfU02KwBL6BYPj8YKHxgBEw5om2ZnTRSbIcego6dhHwcxuSR3clxh0EpE38os1DVPOmnYtTPg== - dependencies: - d3-array "1" - -d3-geo@2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-2.0.2.tgz#c065c1b71fe8c5f1be657e5f43d9bdd010383c40" - integrity sha512-8pM1WGMLGFuhq9S+FpPURxic+gKzjluCD/CHTuUF3mXMeiCo0i6R0tO1s4+GArRFde96SLcW/kOFRjoAosPsFA== - dependencies: - d3-array "^2.5.0" - -d3-hierarchy@1: - version "1.1.9" - resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-1.1.9.tgz#2f6bee24caaea43f8dc37545fa01628559647a83" - integrity sha512-j8tPxlqh1srJHAtxfvOUwKNYJkQuBFdM1+JAUfq6xqH5eAqf93L7oG1NVqDa4CpFZNvnNKtCYEUC8KY9yEn9lQ== - -d3-hierarchy@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-2.0.0.tgz#dab88a58ca3e7a1bc6cab390e89667fcc6d20218" - integrity sha512-SwIdqM3HxQX2214EG9GTjgmCc/mbSx4mQBn+DuEETubhOw6/U3fmnji4uCVrmzOydMHSO1nZle5gh6HB/wdOzw== - -d3-interpolate@1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-1.4.0.tgz#526e79e2d80daa383f9e0c1c1c7dcc0f0583e987" - integrity sha512-V9znK0zc3jOPV4VD2zZn0sDhZU3WAE2bmlxdIwwQPPzPjvyLkd8B3JUVdS1IDUFDkWZ72c9qnv1GK2ZagTZ8EA== - dependencies: - d3-color "1" - -"d3-interpolate@1 - 2", "d3-interpolate@1.2.0 - 2", d3-interpolate@2: - version "2.0.1" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-2.0.1.tgz#98be499cfb8a3b94d4ff616900501a64abc91163" - integrity sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ== - dependencies: - d3-color "1 - 2" - -d3-path@1: - version "1.0.9" - resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf" - integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== - -"d3-path@1 - 2", d3-path@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-2.0.0.tgz#55d86ac131a0548adae241eebfb56b4582dd09d8" - integrity sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA== - -d3-polygon@1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-1.0.6.tgz#0bf8cb8180a6dc107f518ddf7975e12abbfbd38e" - integrity sha512-k+RF7WvI08PC8reEoXa/w2nSg5AUMTi+peBD9cmFc+0ixHfbs4QmxxkarVal1IkVkgxVuk9JSHhJURHiyHKAuQ== - -d3-polygon@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-2.0.0.tgz#13608ef042fbec625ba1598327564f03c0396d8e" - integrity sha512-MsexrCK38cTGermELs0cO1d79DcTsQRN7IWMJKczD/2kBjzNXxLUWP33qRF6VDpiLV/4EI4r6Gs0DAWQkE8pSQ== - -d3-quadtree@1: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-1.0.7.tgz#ca8b84df7bb53763fe3c2f24bd435137f4e53135" - integrity sha512-RKPAeXnkC59IDGD0Wu5mANy0Q2V28L+fNe65pOCXVdVuTJS3WPKaJlFHer32Rbh9gIo9qMuJXio8ra4+YmIymA== - -"d3-quadtree@1 - 2", d3-quadtree@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-2.0.0.tgz#edbad045cef88701f6fee3aee8e93fb332d30f9d" - integrity sha512-b0Ed2t1UUalJpc3qXzKi+cPGxeXRr4KU9YSlocN74aTzp6R/Ud43t79yLLqxHRWZfsvWXmbDWPpoENK1K539xw== - -d3-random@1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-1.1.2.tgz#2833be7c124360bf9e2d3fd4f33847cfe6cab291" - integrity sha512-6AK5BNpIFqP+cx/sreKzNjWbwZQCSUatxq+pPRmFIQaWuoD+NrbVWw7YWpHiXpCQ/NanKdtGDuB+VQcZDaEmYQ== - -d3-random@2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-2.2.2.tgz#5eebd209ef4e45a2b362b019c1fb21c2c98cbb6e" - integrity sha512-0D9P8TRj6qDAtHhRQn6EfdOtHMfsUWanl3yb/84C4DqpZ+VsgfI5iTVRNRbELCfNvRfpMr8OrqqUTQ6ANGCijw== - -d3-scale-chromatic@1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.5.0.tgz#54e333fc78212f439b14641fb55801dd81135a98" - integrity sha512-ACcL46DYImpRFMBcpk9HhtIyC7bTBR4fNOPxwVSl0LfulDAwyiHyPOTqcDG1+t5d4P9W7t/2NAuWu59aKko/cg== - dependencies: - d3-color "1" - d3-interpolate "1" - -d3-scale-chromatic@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-2.0.0.tgz#c13f3af86685ff91323dc2f0ebd2dabbd72d8bab" - integrity sha512-LLqy7dJSL8yDy7NRmf6xSlsFZ6zYvJ4BcWFE4zBrOPnQERv9zj24ohnXKRbyi9YHnYV+HN1oEO3iFK971/gkzA== - dependencies: - d3-color "1 - 2" - d3-interpolate "1 - 2" - -d3-scale@2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-2.2.2.tgz#4e880e0b2745acaaddd3ede26a9e908a9e17b81f" - integrity sha512-LbeEvGgIb8UMcAa0EATLNX0lelKWGYDQiPdHj+gLblGVhGLyNbaCn3EvrJf0A3Y/uOOU5aD6MTh5ZFCdEwGiCw== - dependencies: - d3-array "^1.2.0" - d3-collection "1" - d3-format "1" - d3-interpolate "1" - d3-time "1" - d3-time-format "2" - -d3-scale@3: - version "3.3.0" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-3.3.0.tgz#28c600b29f47e5b9cd2df9749c206727966203f3" - integrity sha512-1JGp44NQCt5d1g+Yy+GeOnZP7xHo0ii8zsQp6PGzd+C1/dl0KGsp9A7Mxwp+1D1o4unbTTxVdU/ZOIEBoeZPbQ== - dependencies: - d3-array "^2.3.0" - d3-format "1 - 2" - d3-interpolate "1.2.0 - 2" - d3-time "^2.1.1" - d3-time-format "2 - 3" - -d3-selection@1, d3-selection@^1.1.0: - version "1.4.2" - resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-1.4.2.tgz#dcaa49522c0dbf32d6c1858afc26b6094555bc5c" - integrity sha512-SJ0BqYihzOjDnnlfyeHT0e30k0K1+5sR3d5fNueCNeuhZTnGw4M4o8mqJchSwgKMXCNFo+e2VTChiSJ0vYtXkg== - -d3-selection@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-2.0.0.tgz#94a11638ea2141b7565f883780dabc7ef6a61066" - integrity sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA== - -d3-shape@1: - version "1.3.7" - resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.3.7.tgz#df63801be07bc986bc54f63789b4fe502992b5d7" - integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw== - dependencies: - d3-path "1" - -d3-shape@2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-2.1.0.tgz#3b6a82ccafbc45de55b57fcf956c584ded3b666f" - integrity sha512-PnjUqfM2PpskbSLTJvAzp2Wv4CZsnAgTfcVRTwW03QR3MkXF8Uo7B1y/lWkAsmbKwuecto++4NlsYcvYpXpTHA== - dependencies: - d3-path "1 - 2" - -d3-time-format@2: - version "2.3.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-2.3.0.tgz#107bdc028667788a8924ba040faf1fbccd5a7850" - integrity sha512-guv6b2H37s2Uq/GefleCDtbe0XZAuy7Wa49VGkPVPMfLL9qObgBST3lEHJBMUp8S7NdLQAGIvr2KXk8Hc98iKQ== - dependencies: - d3-time "1" - -"d3-time-format@2 - 3", d3-time-format@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-3.0.0.tgz#df8056c83659e01f20ac5da5fdeae7c08d5f1bb6" - integrity sha512-UXJh6EKsHBTjopVqZBhFysQcoXSv/5yLONZvkQ5Kk3qbwiUYkdX17Xa1PT6U1ZWXGGfB1ey5L8dKMlFq2DO0Ag== - dependencies: - d3-time "1 - 2" - -d3-time@1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.1.0.tgz#b1e19d307dae9c900b7e5b25ffc5dcc249a8a0f1" - integrity sha512-Xh0isrZ5rPYYdqhAVk8VLnMEidhz5aP7htAADH6MfzgmmicPkTo8LhkLxci61/lCB7n7UmE3bN0leRt+qvkLxA== - -"d3-time@1 - 2", d3-time@2, d3-time@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-2.1.1.tgz#e9d8a8a88691f4548e68ca085e5ff956724a6682" - integrity sha512-/eIQe/eR4kCQwq7yxi7z4c6qEXf2IYGcjoWB5OOQy4Tq9Uv39/947qlDcN2TLkiTzQWzvnsuYPB9TrWaNfipKQ== - dependencies: - d3-array "2" - -d3-timer@1: - version "1.0.10" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-1.0.10.tgz#dfe76b8a91748831b13b6d9c793ffbd508dd9de5" - integrity sha512-B1JDm0XDaQC+uvo4DT79H0XmBskgS3l6Ve+1SBCfxgmtIb1AVrPIoqd+nPSv+loMX8szQ0sVUhGngL7D5QPiXw== - -"d3-timer@1 - 2", d3-timer@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-2.0.0.tgz#055edb1d170cfe31ab2da8968deee940b56623e6" - integrity sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA== - -d3-transition@1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-1.3.2.tgz#a98ef2151be8d8600543434c1ca80140ae23b398" - integrity sha512-sc0gRU4PFqZ47lPVHloMn9tlPcv8jxgOQg+0zjhfZXMQuvppjG6YuwdMBE0TuqCZjeJkLecku/l9R0JPcRhaDA== - dependencies: - d3-color "1" - d3-dispatch "1" - d3-ease "1" - d3-interpolate "1" - d3-selection "^1.1.0" - d3-timer "1" - -d3-transition@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-2.0.0.tgz#366ef70c22ef88d1e34105f507516991a291c94c" - integrity sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog== - dependencies: - d3-color "1 - 2" - d3-dispatch "1 - 2" - d3-ease "1 - 2" - d3-interpolate "1 - 2" - d3-timer "1 - 2" - -d3-voronoi@1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/d3-voronoi/-/d3-voronoi-1.1.4.tgz#dd3c78d7653d2bb359284ae478645d95944c8297" - integrity sha512-dArJ32hchFsrQ8uMiTBLq256MpnZjeuBtdHpaDlYuQyjU0CVzCJl/BVW+SkszaAeH95D/8gxqAhgx0ouAWAfRg== - -d3-zoom@1: - version "1.8.3" - resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-1.8.3.tgz#b6a3dbe738c7763121cd05b8a7795ffe17f4fc0a" - integrity sha512-VoLXTK4wvy1a0JpH2Il+F2CiOhVu7VRXWF5M/LroMIh3/zBAC3WAt7QoIvPibOavVo20hN6/37vwAsdBejLyKQ== - dependencies: - d3-dispatch "1" - d3-drag "1" - d3-interpolate "1" - d3-selection "1" - d3-transition "1" - -d3-zoom@2: - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-2.0.0.tgz#f04d0afd05518becce879d04709c47ecd93fba54" - integrity sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw== - dependencies: - d3-dispatch "1 - 2" - d3-drag "2" - d3-interpolate "1 - 2" - d3-selection "2" - d3-transition "2" - -d3@^5.14: - version "5.16.0" - resolved "https://registry.yarnpkg.com/d3/-/d3-5.16.0.tgz#9c5e8d3b56403c79d4ed42fbd62f6113f199c877" - integrity sha512-4PL5hHaHwX4m7Zr1UapXW23apo6pexCgdetdJ5kTmADpG/7T9Gkxw0M0tf/pjoB63ezCCm0u5UaFYy2aMt0Mcw== - dependencies: - d3-array "1" - d3-axis "1" - d3-brush "1" - d3-chord "1" - d3-collection "1" - d3-color "1" - d3-contour "1" - d3-dispatch "1" - d3-drag "1" - d3-dsv "1" - d3-ease "1" - d3-fetch "1" - d3-force "1" - d3-format "1" - d3-geo "1" - d3-hierarchy "1" - d3-interpolate "1" - d3-path "1" - d3-polygon "1" - d3-quadtree "1" - d3-random "1" - d3-scale "2" - d3-scale-chromatic "1" - d3-selection "1" - d3-shape "1" - d3-time "1" - d3-time-format "2" - d3-timer "1" - d3-transition "1" - d3-voronoi "1" - d3-zoom "1" - -d3@^6.2.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/d3/-/d3-6.7.0.tgz#adac458597b4a2cafe8e08cf30948af0c95cd61f" - integrity sha512-hNHRhe+yCDLUG6Q2LwvR/WdNFPOJQ5VWqsJcwIYVeI401+d2/rrCjxSXkiAdIlpx7/73eApFB4Olsmh3YN7a6g== - dependencies: - d3-array "2" - d3-axis "2" - d3-brush "2" - d3-chord "2" - d3-color "2" - d3-contour "2" - d3-delaunay "5" - d3-dispatch "2" - d3-drag "2" - d3-dsv "2" - d3-ease "2" - d3-fetch "2" - d3-force "2" - d3-format "2" - d3-geo "2" - d3-hierarchy "2" - d3-interpolate "2" - d3-path "2" - d3-polygon "2" - d3-quadtree "2" - d3-random "2" - d3-scale "3" - d3-scale-chromatic "2" - d3-selection "2" - d3-shape "2" - d3-time "2" - d3-time-format "3" - d3-timer "2" - d3-transition "2" - d3-zoom "2" - -dagre-d3@^0.6.4: - version "0.6.4" - resolved "https://registry.yarnpkg.com/dagre-d3/-/dagre-d3-0.6.4.tgz#0728d5ce7f177ca2337df141ceb60fbe6eeb7b29" - integrity sha512-e/6jXeCP7/ptlAM48clmX4xTZc5Ek6T6kagS7Oz2HrYSdqcLZFLqpAfh7ldbZRFfxCZVyh61NEPR08UQRVxJzQ== - dependencies: - d3 "^5.14" - dagre "^0.8.5" - graphlib "^2.1.8" - lodash "^4.17.15" - -dagre@^0.8.5: - version "0.8.5" - resolved "https://registry.yarnpkg.com/dagre/-/dagre-0.8.5.tgz#ba30b0055dac12b6c1fcc247817442777d06afee" - integrity sha512-/aTqmnRta7x7MCCpExk7HQL2O4owCT2h8NT//9I1OQ9vt29Pa0BzSAkR5lwFUcQ7491yVi/3CXU9jQ5o0Mn2Sw== - dependencies: - graphlib "^2.1.8" - lodash "^4.17.15" - -damerau-levenshtein@^1.0.7: - version "1.0.8" - resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" - integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== - -data-urls@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== - dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" - -date-fns@^2.16.1: - version "2.28.0" - resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.28.0.tgz#9570d656f5fc13143e50c975a3b6bbeb46cd08b2" - integrity sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw== - -date-format@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/date-format/-/date-format-0.0.2.tgz#fafd448f72115ef1e2b739155ae92f2be6c28dd1" - integrity sha1-+v1Ej3IRXvHitzkVWukvK+bCjdE= - -debug@2.6.9, debug@^2.6.0, debug@^2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -decamelize@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" - integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== - -decimal.js@^10.2.1: - version "10.3.1" - resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" - integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - -decompress-response@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" - integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== - dependencies: - mimic-response "^3.1.0" - -dedent@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= - -deep-is@^0.1.3, deep-is@~0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^2.1.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-2.2.1.tgz#5d3ff22a01c00f645405a2fbc17d0778a1801170" - integrity sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA== - -deepmerge@^4.0.0, deepmerge@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== - -default-gateway@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" - integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== - dependencies: - execa "^5.0.0" - -defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" - integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= - dependencies: - clone "^1.0.2" - -defer-to-connect@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" - integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== - -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== - -define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" - -defined@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= - -delaunator@4: - version "4.0.1" - resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-4.0.1.tgz#3d779687f57919a7a418f8ab947d3bddb6846957" - integrity sha512-WNPWi1IRKZfCt/qIDMfERkDp93+iZEmOxN2yy4Jg+Xhv8SLk2UTqqbe1sfiipn0and9QrE914/ihdx82Y/Giag== - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= - -depd@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= - -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -detect-libc@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" - integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= - -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -detect-node@^2.0.4, detect-node@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== - -detect-port-alt@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" - integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== - dependencies: - address "^1.0.1" - debug "^2.6.0" - -detective@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/detective/-/detective-5.2.0.tgz#feb2a77e85b904ecdea459ad897cc90a99bd2a7b" - integrity sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg== - dependencies: - acorn-node "^1.6.1" - defined "^1.0.0" - minimist "^1.1.1" - -devtools-protocol@0.0.981744: - version "0.0.981744" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.981744.tgz#9960da0370284577d46c28979a0b32651022bacf" - integrity sha512-0cuGS8+jhR67Fy7qG3i3Pc7Aw494sb9yG9QgpG97SFVWwolgYjlhJg7n+UaHxOQT30d1TYu/EYe9k01ivLErIg== - -devtools-protocol@^0.0.998712: - version "0.0.998712" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.998712.tgz#35788d4e9e91c55288330f3ae8cc4584b21e07bc" - integrity sha512-KCl+wJ9RsnDyGSsW7nbkgLFYxcKxZ7nzr6/r/hMOjkS02q2x1p8PbUenzKRMfL0jALhYxkCHrYnPnV3GHVD9EQ== - -devtools@7.19.7: - version "7.19.7" - resolved "https://registry.yarnpkg.com/devtools/-/devtools-7.19.7.tgz#b9091006f698303021dfb6593c35fe2792ca2601" - integrity sha512-XkIuojwTD0LeCzEeoTozKfAYWZUAw5Sj6CNPUuift3eDD9wnY2AcKLmFw8e9ihVUevJriyV8tMNbDsl97HSYWg== - dependencies: - "@types/node" "^17.0.4" - "@types/ua-parser-js" "^0.7.33" - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/protocols" "7.19.0" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - chrome-launcher "^0.15.0" - edge-paths "^2.1.0" - puppeteer-core "^13.1.3" - query-selector-shadow-dom "^1.0.0" - ua-parser-js "^1.0.1" - uuid "^8.0.0" - -didyoumean@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -diff-sequences@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" - integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== - -diff-sequences@^28.0.2: - version "28.0.2" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-28.0.2.tgz#40f8d4ffa081acbd8902ba35c798458d0ff1af41" - integrity sha512-YtEoNynLDFCRznv/XDalsKGSZDoj0U5kLnXvY0JSq3nBboRrZXjD81+eSiwi+nzcZDwedMmcowcxNwwgFW23mQ== - -diff@5.0.0, diff@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -dlv@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" - integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== - -dns-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= - -dns-packet@^5.2.2: - version "5.3.1" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.3.1.tgz#eb94413789daec0f0ebe2fcc230bdc9d7c91b43d" - integrity sha512-spBwIj0TK0Ey3666GwIdWVfUpLyubpU53BTCu8iPn4r4oXd9O14Hjg3EHw3ts2oed77/SeckunUYCyRlSngqHw== - dependencies: - "@leichtgewicht/ip-codec" "^2.0.1" - -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -dom-accessibility-api@^0.5.6: - version "0.5.14" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" - integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== - -dom-accessibility-api@^0.5.9: - version "0.5.13" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.13.tgz#102ee5f25eacce09bdf1cfa5a298f86da473be4b" - integrity sha512-R305kwb5CcMDIpSHUnLyIAp7SrSPBx6F0VfQFB3M75xVMHhXJJIdePYgbPPh1o57vCHNu5QztokWUPsLjWzFqw== - -dom-converter@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-helpers@^5.0.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" - integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA== - dependencies: - "@babel/runtime" "^7.8.7" - csstype "^3.0.2" - -dom-serializer@0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - -dom-serializer@^1.0.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" - integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.2.0" - entities "^2.0.0" - -domelementtype@1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domelementtype@^2.0.1, domelementtype@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" - integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== - -domexception@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== - dependencies: - webidl-conversions "^5.0.0" - -domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" - integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== - dependencies: - domelementtype "^2.2.0" - -domutils@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^2.5.2, domutils@^2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" - integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== - dependencies: - dom-serializer "^1.0.1" - domelementtype "^2.2.0" - domhandler "^4.2.0" - -dot-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" - integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -dotenv-expand@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" - integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== - -dotenv@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" - integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== - -duplexer@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" - integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== - -easy-table@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/easy-table/-/easy-table-1.2.0.tgz#ba9225d7138fee307bfd4f0b5bc3c04bdc7c54eb" - integrity sha512-OFzVOv03YpvtcWGe5AayU5G2hgybsg3iqA6drU8UaoZyB9jLGMTrz9+asnLp/E+6qPh88yEI1gvyZFZ41dmgww== - dependencies: - ansi-regex "^5.0.1" - optionalDependencies: - wcwidth "^1.0.1" - -edge-paths@^2.1.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/edge-paths/-/edge-paths-2.2.1.tgz#d2d91513225c06514aeac9843bfce546abbf4391" - integrity sha512-AI5fC7dfDmCdKo3m5y7PkYE8m6bMqR6pvVpgtrZkkhcJXFLelUgkjrhk3kXXx8Kbw2cRaTT4LkOR7hqf39KJdw== - dependencies: - "@types/which" "^1.3.2" - which "^2.0.2" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= - -ejs@^3.0.1, ejs@^3.1.6, ejs@^3.1.7: - version "3.1.8" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" - integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== - dependencies: - jake "^10.8.5" - -electron-to-chromium@^1.4.118: - version "1.4.137" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.137.tgz#186180a45617283f1c012284458510cd99d6787f" - integrity sha512-0Rcpald12O11BUogJagX3HsCN3FE83DSqWjgXoHo5a72KUKMSfI39XBgJpgNNxS9fuGzytaFjE06kZkiVFy2qA== - -electron-to-chromium@^1.4.84: - version "1.4.107" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.107.tgz#564257014ab14033b4403a309c813123c58a3fb9" - integrity sha512-Huen6taaVrUrSy8o7mGStByba8PfOWWluHNxSHGBrCgEdFVLtvdQDBr9LBCF9Uci8SYxh28QNNMO0oC17wbGAg== - -emittery@^0.10.2: - version "0.10.2" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" - integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== - -emittery@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" - integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - -emojis-list@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= - -end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -enhanced-resolve@^5.9.3: - version "5.9.3" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz#44a342c012cbc473254af5cc6ae20ebd0aae5d88" - integrity sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" - integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== - -error-ex@^1.2.0, error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -error-stack-parser@^2.0.6: - version "2.0.7" - resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.7.tgz#b0c6e2ce27d0495cf78ad98715e0cad1219abb57" - integrity sha512-chLOW0ZGRf4s8raLrDxa5sdkvPec5YdvwbFnqJme4rk0rFajP8mPtrDL1+I+CwrQDCjswDA5sREX7jYQDQs9vA== - dependencies: - stackframe "^1.1.1" - -es-abstract@^1.17.2, es-abstract@^1.19.1, es-abstract@^1.19.2: - version "1.19.4" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.19.4.tgz#79a95527af382eb276075627e53762393ce8b57a" - integrity sha512-flV8e5g9/xulChMG48Fygk1ptpo4lQRJ0eJYtxJFgi7pklLx7EFcOJ34jnvr8pbWlaFN/AT1cZpe0hiFel9Hqg== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - get-intrinsic "^1.1.1" - get-symbol-description "^1.0.0" - has "^1.0.3" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - is-callable "^1.2.4" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-weakref "^1.0.2" - object-inspect "^1.12.0" - object-keys "^1.1.1" - object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.1" - -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== - -es-shim-unscopables@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" - integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== - dependencies: - has "^1.0.3" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= - -escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escodegen@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" - integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - dependencies: - esprima "^4.0.1" - estraverse "^5.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -eslint-config-react-app@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" - integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== - dependencies: - "@babel/core" "^7.16.0" - "@babel/eslint-parser" "^7.16.3" - "@rushstack/eslint-patch" "^1.1.0" - "@typescript-eslint/eslint-plugin" "^5.5.0" - "@typescript-eslint/parser" "^5.5.0" - babel-preset-react-app "^10.0.1" - confusing-browser-globals "^1.0.11" - eslint-plugin-flowtype "^8.0.3" - eslint-plugin-import "^2.25.3" - eslint-plugin-jest "^25.3.0" - eslint-plugin-jsx-a11y "^6.5.1" - eslint-plugin-react "^7.27.1" - eslint-plugin-react-hooks "^4.3.0" - eslint-plugin-testing-library "^5.0.1" - -eslint-import-resolver-node@^0.3.6: - version "0.3.6" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" - integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== - dependencies: - debug "^3.2.7" - resolve "^1.20.0" - -eslint-module-utils@^2.7.3: - version "2.7.3" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" - integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== - dependencies: - debug "^3.2.7" - find-up "^2.1.0" - -eslint-plugin-flowtype@^8.0.3: - version "8.0.3" - resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" - integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== - dependencies: - lodash "^4.17.21" - string-natural-compare "^3.0.1" - -eslint-plugin-import@^2.25.3: - version "2.26.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" - integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== - dependencies: - array-includes "^3.1.4" - array.prototype.flat "^1.2.5" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.6" - eslint-module-utils "^2.7.3" - has "^1.0.3" - is-core-module "^2.8.1" - is-glob "^4.0.3" - minimatch "^3.1.2" - object.values "^1.1.5" - resolve "^1.22.0" - tsconfig-paths "^3.14.1" - -eslint-plugin-jest@^25.3.0: - version "25.7.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" - integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== - dependencies: - "@typescript-eslint/experimental-utils" "^5.0.0" - -eslint-plugin-jsx-a11y@^6.5.1: - version "6.5.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8" - integrity sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g== - dependencies: - "@babel/runtime" "^7.16.3" - aria-query "^4.2.2" - array-includes "^3.1.4" - ast-types-flow "^0.0.7" - axe-core "^4.3.5" - axobject-query "^2.2.0" - damerau-levenshtein "^1.0.7" - emoji-regex "^9.2.2" - has "^1.0.3" - jsx-ast-utils "^3.2.1" - language-tags "^1.0.5" - minimatch "^3.0.4" - -eslint-plugin-react-hooks@^4.3.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.5.0.tgz#5f762dfedf8b2cf431c689f533c9d3fa5dcf25ad" - integrity sha512-8k1gRt7D7h03kd+SAAlzXkQwWK22BnK6GKZG+FJA6BAGy22CFvl8kCIXKpVux0cCxMWDQUPqSok0LKaZ0aOcCw== - -eslint-plugin-react@^7.27.1: - version "7.29.4" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.29.4.tgz#4717de5227f55f3801a5fd51a16a4fa22b5914d2" - integrity sha512-CVCXajliVh509PcZYRFyu/BoUEz452+jtQJq2b3Bae4v3xBUWPLCmtmBM+ZinG4MzwmxJgJ2M5rMqhqLVn7MtQ== - dependencies: - array-includes "^3.1.4" - array.prototype.flatmap "^1.2.5" - doctrine "^2.1.0" - estraverse "^5.3.0" - jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.1.2" - object.entries "^1.1.5" - object.fromentries "^2.0.5" - object.hasown "^1.1.0" - object.values "^1.1.5" - prop-types "^15.8.1" - resolve "^2.0.0-next.3" - semver "^6.3.0" - string.prototype.matchall "^4.0.6" - -eslint-plugin-testing-library@^5.0.1: - version "5.5.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.5.0.tgz#ce43113dac5a5d93e8b0a8d9937983cdbf63f049" - integrity sha512-eWQ19l6uWL7LW8oeMyQVSGjVYFnBqk7DMHjadm0yOHBvX3Xi9OBrsNuxoAMdX4r7wlQ5WWpW46d+CB6FWFL/PQ== - dependencies: - "@typescript-eslint/utils" "^5.13.0" - -eslint-scope@5.1.1, eslint-scope@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-scope@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" - integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-utils@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" - integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== - dependencies: - eslint-visitor-keys "^2.0.0" - -eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== - -eslint-visitor-keys@^3.0.0, eslint-visitor-keys@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" - integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== - -eslint-webpack-plugin@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.1.1.tgz#83dad2395e5f572d6f4d919eedaa9cf902890fcb" - integrity sha512-xSucskTN9tOkfW7so4EaiFIkulWLXwCB/15H917lR6pTv0Zot6/fetFucmENRb7J5whVSFKIvwnrnsa78SG2yg== - dependencies: - "@types/eslint" "^7.28.2" - jest-worker "^27.3.1" - micromatch "^4.0.4" - normalize-path "^3.0.0" - schema-utils "^3.1.1" - -eslint@^8.3.0: - version "8.15.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.15.0.tgz#fea1d55a7062da48d82600d2e0974c55612a11e9" - integrity sha512-GG5USZ1jhCu8HJkzGgeK8/+RGnHaNYZGrGDzUtigK3BsGESW/rs2az23XqE0WVwDxy1VRvvjSSGu5nB0Bu+6SA== - dependencies: - "@eslint/eslintrc" "^1.2.3" - "@humanwhocodes/config-array" "^0.9.2" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.1.1" - eslint-utils "^3.0.0" - eslint-visitor-keys "^3.3.0" - espree "^9.3.2" - esquery "^1.4.0" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^6.0.1" - globals "^13.6.0" - ignore "^5.2.0" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.1" - regexpp "^3.2.0" - strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" - text-table "^0.2.0" - v8-compile-cache "^2.0.3" - -espree@^9.3.2: - version "9.3.2" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" - integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== - dependencies: - acorn "^8.7.1" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.3.0" - -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -estree-walker@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" - integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== - -execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= - -expect-webdriverio@^3.0.0: - version "3.3.2" - resolved "https://registry.yarnpkg.com/expect-webdriverio/-/expect-webdriverio-3.3.2.tgz#7ba7de33d6605fddcddd64d4b9752c0dccb2212e" - integrity sha512-hYJvcewauSJsHS3dpo3s/SvV+5Chm9f8MBM7kBzAcmOLGjQ8OI0XPeknvG1bt57OzlBB9GihYmp3NmESpwpXZg== - dependencies: - expect "^27.0.2" - jest-matcher-utils "^28.0.2" - -expect@^27.0.2, expect@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" - integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== - dependencies: - "@jest/types" "^27.5.1" - jest-get-type "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - -express@^4.17.3: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.0" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.10.3" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -external-editor@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" - integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== - dependencies: - chardet "^0.7.0" - iconv-lite "^0.4.24" - tmp "^0.0.33" - -extract-zip@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" - integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== - dependencies: - debug "^4.1.1" - get-stream "^5.1.0" - yauzl "^2.10.0" - optionalDependencies: - "@types/yauzl" "^2.9.1" - -fast-deep-equal@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" - integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= - -fastq@^1.6.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" - integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== - dependencies: - reusify "^1.0.4" - -faye-websocket@^0.11.3: - version "0.11.4" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" - integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== - dependencies: - websocket-driver ">=0.5.1" - -fb-watchman@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" - integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== - dependencies: - bser "2.1.1" - -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" - integrity sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4= - dependencies: - pend "~1.2.0" - -fibers@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/fibers/-/fibers-5.0.1.tgz#bb9b02aa022685185d21aed227363e456d87660d" - integrity sha512-VMC7Frt87Oo0AOJ6EcPFbi+tZmkQ4tD85aatwyWL6I9cYMJmm2e+pXUJsfGZ36U7MffXtjou2XIiWJMtHriErw== - dependencies: - detect-libc "^1.0.3" - -figures@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" - integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== - dependencies: - escape-string-regexp "^1.0.5" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -file-loader@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" - integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - -filelist@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.2.tgz#80202f21462d4d1c2e214119b1807c1bc0380e5b" - integrity sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ== - dependencies: - minimatch "^3.0.4" - -filesize@^8.0.6: - version "8.0.7" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" - integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -find-cache-dir@^3.3.1: - version "3.3.2" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - -find-up@5.0.0, find-up@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= - dependencies: - locate-path "^2.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flat@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" - integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== - -flatted@^3.1.0: - version "3.2.5" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" - integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== - -follow-redirects@^1.0.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" - integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== - -fork-ts-checker-webpack-plugin@^6.5.0: - version "6.5.2" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" - integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== - dependencies: - "@babel/code-frame" "^7.8.3" - "@types/json-schema" "^7.0.5" - chalk "^4.1.0" - chokidar "^3.4.2" - cosmiconfig "^6.0.0" - deepmerge "^4.2.2" - fs-extra "^9.0.0" - glob "^7.1.6" - memfs "^3.1.2" - minimatch "^3.0.4" - schema-utils "2.7.0" - semver "^7.3.2" - tapable "^1.0.0" - -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -formik@^2.2.9: - version "2.2.9" - resolved "https://registry.yarnpkg.com/formik/-/formik-2.2.9.tgz#8594ba9c5e2e5cf1f42c5704128e119fc46232d0" - integrity sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA== - dependencies: - deepmerge "^2.1.1" - hoist-non-react-statics "^3.3.0" - lodash "^4.17.21" - lodash-es "^4.17.21" - react-fast-compare "^2.0.1" - tiny-warning "^1.0.2" - tslib "^1.10.0" - -forwarded@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fraction.js@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-extra@^10.0.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-extra@^9.0.0, fs-extra@^9.0.1: - version "9.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== - dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-monkey@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" - integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= - -gaze@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/gaze/-/gaze-1.1.3.tgz#c441733e13b927ac8c0ff0b4c3b033f28812924a" - integrity sha512-BRdNm8hbWzFzWHERTrejLqwHDfS4GibPoq5wjTPIoJHoBtKGPg3xAFfxmM+9ztbXelxcf2hwQcaz1PtmFeue8g== - dependencies: - globule "^1.0.0" - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" - integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - -get-own-enumerable-property-symbols@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" - integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -glob-parent@^5.1.2, glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.1, glob-parent@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob-to-regexp@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" - integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== - -glob@7.2.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.2.0" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" - integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@~7.1.1: - version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" - integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-modules@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - -global-prefix@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" - integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== - dependencies: - ini "^1.3.5" - kind-of "^6.0.2" - which "^1.3.1" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.6.0, globals@^13.9.0: - version "13.13.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.13.0.tgz#ac32261060d8070e2719dd6998406e27d2b5727b" - integrity sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A== - dependencies: - type-fest "^0.20.2" - -globby@^11.0.4: - version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -globule@^1.0.0: - version "1.3.3" - resolved "https://registry.yarnpkg.com/globule/-/globule-1.3.3.tgz#811919eeac1ab7344e905f2e3be80a13447973c2" - integrity sha512-mb1aYtDbIjTu4ShMB85m3UzjX9BVKe9WCzsnfMSZk+K5GpIbBOexgg4PPCt5eHDEG5/ZQAUX2Kct02zfiPLsKg== - dependencies: - glob "~7.1.1" - lodash "~4.17.10" - minimatch "~3.0.2" - -got@^11.0.2, got@^11.8.1, got@^11.8.2: - version "11.8.3" - resolved "https://registry.yarnpkg.com/got/-/got-11.8.3.tgz#f496c8fdda5d729a90b4905d2b07dbd148170770" - integrity sha512-7gtQ5KiPh1RtGS9/Jbv1ofDpBFuq42gyfEib+ejaRBJuj/3tQFeR5+gw57e4ipaU8c/rCjvX6fkQz2lyDlGAOg== - dependencies: - "@sindresorhus/is" "^4.0.0" - "@szmarczak/http-timer" "^4.0.5" - "@types/cacheable-request" "^6.0.1" - "@types/responselike" "^1.0.0" - cacheable-lookup "^5.0.3" - cacheable-request "^7.0.2" - decompress-response "^6.0.0" - http2-wrapper "^1.0.0-beta.5.2" - lowercase-keys "^2.0.0" - p-cancelable "^2.0.0" - responselike "^2.0.0" - -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - -grapheme-splitter@^1.0.2: - version "1.0.4" - resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" - integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== - -graphlib@^2.1.8: - version "2.1.8" - resolved "https://registry.yarnpkg.com/graphlib/-/graphlib-2.1.8.tgz#5761d414737870084c92ec7b5dbcb0592c9d35da" - integrity sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A== - dependencies: - lodash "^4.17.15" - -gzip-size@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" - integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== - dependencies: - duplexer "^0.1.2" - -hammerjs@^2.0.8: - version "2.0.8" - resolved "https://registry.yarnpkg.com/hammerjs/-/hammerjs-2.0.8.tgz#04ef77862cff2bb79d30f7692095930222bf60f1" - integrity sha1-BO93hiz/K7edMPdpIJWTAiK/YPE= - -handle-thing@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" - integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== - -harmony-reflect@^1.4.6: - version "1.6.2" - resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" - integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - -has-bigints@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" - integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -he@1.2.0, he@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -history@^4.9.0: - version "4.10.1" - resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3" - integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== - dependencies: - "@babel/runtime" "^7.1.2" - loose-envify "^1.2.0" - resolve-pathname "^3.0.0" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - value-equal "^1.0.1" - -hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" - integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== - dependencies: - react-is "^16.7.0" - -hoopy@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" - integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== - -hosted-git-info@^2.1.4: - version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" - integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== - -hpack.js@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - -html-entities@^2.1.0, html-entities@^2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" - integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -html-minifier-terser@^6.0.2: - version "6.1.0" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" - integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== - dependencies: - camel-case "^4.1.2" - clean-css "^5.2.2" - commander "^8.3.0" - he "^1.2.0" - param-case "^3.0.4" - relateurl "^0.2.7" - terser "^5.10.0" - -html-webpack-plugin@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" - integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== - dependencies: - "@types/html-minifier-terser" "^6.0.0" - html-minifier-terser "^6.0.2" - lodash "^4.17.21" - pretty-error "^4.0.0" - tapable "^2.0.0" - -htmlparser2@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" - integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.0.0" - domutils "^2.5.2" - entities "^2.0.0" - -http-cache-semantics@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" - integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.6.tgz#2e02406ab2df8af8a7abfba62e0da01c62b95afd" - integrity sha512-vDlkRPDJn93swjcjqMSaGSPABbIarsr1TLAui/gLDXzV5VsJNdXNzMYDyNBLQkjWQCJ1uizu8T2oDMhmGt0PRA== - -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== - dependencies: - "@tootallnate/once" "1" - agent-base "6" - debug "4" - -http-proxy-middleware@^2.0.1, http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== - dependencies: - "@types/http-proxy" "^1.17.8" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -http2-wrapper@^1.0.0-beta.5.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" - integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== - dependencies: - quick-lru "^5.1.1" - resolve-alpn "^1.0.0" - -https-proxy-agent@5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== - dependencies: - agent-base "6" - debug "4" - -https-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== - dependencies: - agent-base "6" - debug "4" - -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -hyphenate-style-name@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz#691879af8e220aea5750e8827db4ef62a54e361d" - integrity sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ== - -iconv-lite@0.4, iconv-lite@0.4.24, iconv-lite@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -iconv-lite@^0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - -icss-utils@^5.0.0, icss-utils@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" - integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== - -idb@^6.1.4: - version "6.1.5" - resolved "https://registry.yarnpkg.com/idb/-/idb-6.1.5.tgz#dbc53e7adf1ac7c59f9b2bf56e00b4ea4fce8c7b" - integrity sha512-IJtugpKkiVXQn5Y+LteyBCNk1N8xpGV3wWZk9EVtZWH8DYkjBn0bX1XnGP9RkyZF0sAcywa6unHqSWKe7q4LGw== - -identity-obj-proxy@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" - integrity sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ= - dependencies: - harmony-reflect "^1.4.6" - -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -ignore@^5.1.8, ignore@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== - -immer@^9.0.7: - version "9.0.12" - resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.12.tgz#2d33ddf3ee1d247deab9d707ca472c8c942a0f20" - integrity sha512-lk7UNmSbAukB5B6dh9fnh5D0bJTOFKxVg2cyJWTYrWRfhLrLMBquONcUs3aFq507hNoIZEDDh8lb8UtOizSMhA== - -immutability-helper@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/immutability-helper/-/immutability-helper-3.1.1.tgz#2b86b2286ed3b1241c9e23b7b21e0444f52f77b7" - integrity sha512-Q0QaXjPjwIju/28TsugCHNEASwoCcJSyJV3uO1sOIQGI0jKgm9f41Lvz0DZj3n46cNCyAZTsEYoY4C2bVRUzyQ== - -immutable@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.0.0.tgz#b86f78de6adef3608395efb269a91462797e2c23" - integrity sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw== - -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -indent-string@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" - integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= - -indent-string@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -ini@^1.3.5: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -inquirer@8.2.4: - version "8.2.4" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.4.tgz#ddbfe86ca2f67649a67daa6f1051c128f684f0b4" - integrity sha512-nn4F01dxU8VeKfq192IjLsxu0/OmMZ4Lg3xKAns148rCaXP6ntAoEkVYZThWjwON8AlzdZZi6oqnhNbxUG9hVg== - dependencies: - ansi-escapes "^4.2.1" - chalk "^4.1.1" - cli-cursor "^3.1.0" - cli-width "^3.0.0" - external-editor "^3.0.3" - figures "^3.0.0" - lodash "^4.17.21" - mute-stream "0.0.8" - ora "^5.4.1" - run-async "^2.4.0" - rxjs "^7.5.5" - string-width "^4.1.0" - strip-ansi "^6.0.0" - through "^2.3.6" - wrap-ansi "^7.0.0" - -internal-slot@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" - integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== - dependencies: - get-intrinsic "^1.1.0" - has "^1.0.3" - side-channel "^1.0.4" - -internmap@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95" - integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -ipaddr.js@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" - integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= - -is-bigint@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" - integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== - dependencies: - has-bigints "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-boolean-object@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" - integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-callable@^1.1.4, is-callable@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" - integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== - -is-core-module@^2.2.0, is-core-module@^2.8.1: - version "2.8.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211" - integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== - dependencies: - has "^1.0.3" - -is-date-object@^1.0.1: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" - integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== - dependencies: - has-tostringtag "^1.0.0" - -is-docker@^2.0.0, is-docker@^2.1.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-in-browser@^1.0.2, is-in-browser@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-in-browser/-/is-in-browser-1.1.3.tgz#56ff4db683a078c6082eb95dad7dc62e1d04f835" - integrity sha1-Vv9NtoOgeMYILrldrX3GLh0E+DU= - -is-interactive@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" - integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== - -is-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" - integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= - -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== - -is-number-object@^1.0.4: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" - integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== - dependencies: - has-tostringtag "^1.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-obj@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= - -is-plain-obj@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" - integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== - -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - -is-port-reachable@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-port-reachable/-/is-port-reachable-3.1.0.tgz#f6668d3bca9c36b07f737c48a8f875ab0653cd2b" - integrity sha512-vjc0SSRNZ32s9SbZBzGaiP6YVB+xglLShhgZD/FHMZUXBvQWaV9CtzgeVhjccFJrI6RAMV+LX7NYxueW/A8W5A== - -is-potential-custom-element-name@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" - integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== - -is-regex@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" - integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-regexp@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" - integrity sha1-/S2INUXEa6xaYz57mgnof6LLUGk= - -is-root@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" - integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== - -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== - dependencies: - call-bind "^1.0.2" - -is-stream@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -is-string@^1.0.5, is-string@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" - integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== - dependencies: - has-tostringtag "^1.0.0" - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== - dependencies: - has-symbols "^1.0.2" - -is-typedarray@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= - -is-unicode-supported@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" - integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== - -is-utf8@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" - integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= - -is-weakref@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" - integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== - dependencies: - call-bind "^1.0.2" - -is-wsl@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= - -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4: - version "5.1.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz#7b49198b657b27a730b8e9cb601f1e1bff24c59a" - integrity sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-instrument@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" - integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.4" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.4.tgz#1b6f068ecbc6c331040aab5741991273e609e40c" - integrity sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jake@^10.8.5: - version "10.8.5" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" - integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== - dependencies: - async "^3.2.3" - chalk "^4.0.2" - filelist "^1.0.1" - minimatch "^3.0.4" - -jest-changed-files@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" - integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== - dependencies: - "@jest/types" "^27.5.1" - execa "^5.0.0" - throat "^6.0.1" - -jest-circus@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" - integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" - -jest-cli@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" - integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== - dependencies: - "@jest/core" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - import-local "^3.0.2" - jest-config "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - prompts "^2.0.1" - yargs "^16.2.0" - -jest-config@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" - integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== - dependencies: - "@babel/core" "^7.8.0" - "@jest/test-sequencer" "^27.5.1" - "@jest/types" "^27.5.1" - babel-jest "^27.5.1" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.9" - jest-circus "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-get-type "^27.5.1" - jest-jasmine2 "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runner "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^27.5.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" - integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== - dependencies: - chalk "^4.0.0" - diff-sequences "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-diff@^28.1.0: - version "28.1.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-28.1.0.tgz#77686fef899ec1873dbfbf9330e37dd429703269" - integrity sha512-8eFd3U3OkIKRtlasXfiAQfbovgFgRDb0Ngcs2E+FMeBZ4rUezqIaGjuyggJBp+llosQXNEWofk/Sz4Hr5gMUhA== - dependencies: - chalk "^4.0.0" - diff-sequences "^28.0.2" - jest-get-type "^28.0.2" - pretty-format "^28.1.0" - -jest-docblock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" - integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== - dependencies: - detect-newline "^3.0.0" - -jest-each@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" - integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - jest-get-type "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - -jest-environment-jsdom@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" - integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - jsdom "^16.6.0" - -jest-environment-node@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" - integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -jest-get-type@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" - integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== - -jest-get-type@^28.0.2: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-28.0.2.tgz#34622e628e4fdcd793d46db8a242227901fcf203" - integrity sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA== - -jest-haste-map@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" - integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== - dependencies: - "@jest/types" "^27.5.1" - "@types/graceful-fs" "^4.1.2" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^27.5.1" - jest-serializer "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - micromatch "^4.0.4" - walker "^1.0.7" - optionalDependencies: - fsevents "^2.3.2" - -jest-jasmine2@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" - integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - throat "^6.0.1" - -jest-leak-detector@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" - integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== - dependencies: - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^27.0.0, jest-matcher-utils@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" - integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== - dependencies: - chalk "^4.0.0" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^28.0.2: - version "28.1.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-28.1.0.tgz#2ae398806668eeabd293c61712227cb94b250ccf" - integrity sha512-onnax0n2uTLRQFKAjC7TuaxibrPSvZgKTcSCnNUz/tOjJ9UhxNm7ZmPpoQavmTDUjXvUQ8KesWk2/VdrxIFzTQ== - dependencies: - chalk "^4.0.0" - jest-diff "^28.1.0" - jest-get-type "^28.0.2" - pretty-format "^28.1.0" - -jest-message-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" - integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^27.5.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^28.1.0: - version "28.1.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.0.tgz#7e8f0b9049e948e7b94c2a52731166774ba7d0af" - integrity sha512-RpA8mpaJ/B2HphDMiDlrAZdDytkmwFqgjDZovM21F35lHGeUeCvYmm6W+sbQ0ydaLpg5bFAUuWG1cjqOl8vqrw== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^28.1.0" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^28.1.0" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" - integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - -jest-pnp-resolver@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" - integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== - -jest-regex-util@^28.0.0: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" - integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== - -jest-resolve-dependencies@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" - integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== - dependencies: - "@jest/types" "^27.5.1" - jest-regex-util "^27.5.1" - jest-snapshot "^27.5.1" - -jest-resolve@^27.4.2, jest-resolve@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" - integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-pnp-resolver "^1.2.2" - jest-util "^27.5.1" - jest-validate "^27.5.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" - -jest-runner@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" - integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.8.1" - graceful-fs "^4.2.9" - jest-docblock "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-haste-map "^27.5.1" - jest-leak-detector "^27.5.1" - jest-message-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runtime "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - source-map-support "^0.5.6" - throat "^6.0.1" - -jest-runtime@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" - integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/globals" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-serializer@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" - integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== - dependencies: - "@types/node" "*" - graceful-fs "^4.2.9" - -jest-snapshot@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" - integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== - dependencies: - "@babel/core" "^7.7.2" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.0.0" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^27.5.1" - graceful-fs "^4.2.9" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - jest-haste-map "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - natural-compare "^1.4.0" - pretty-format "^27.5.1" - semver "^7.3.2" - -jest-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" - integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^28.1.0: - version "28.1.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.0.tgz#d54eb83ad77e1dd441408738c5a5043642823be5" - integrity sha512-qYdCKD77k4Hwkose2YBEqQk7PzUf/NSE+rutzceduFveQREeH6b+89Dc9+wjX9dAwHcgdx4yedGA3FQlU/qCTA== - dependencies: - "@jest/types" "^28.1.0" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" - integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== - dependencies: - "@jest/types" "^27.5.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^27.5.1" - leven "^3.1.0" - pretty-format "^27.5.1" - -jest-watch-typeahead@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" - integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== - dependencies: - ansi-escapes "^4.3.1" - chalk "^4.0.0" - jest-regex-util "^28.0.0" - jest-watcher "^28.0.0" - slash "^4.0.0" - string-length "^5.0.1" - strip-ansi "^7.0.1" - -jest-watcher@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" - integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== - dependencies: - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - jest-util "^27.5.1" - string-length "^4.0.1" - -jest-watcher@^28.0.0: - version "28.1.0" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.0.tgz#aaa7b4164a4e77eeb5f7d7b25ede5e7b4e9c9aaf" - integrity sha512-tNHMtfLE8Njcr2IRS+5rXYA4BhU90gAOwI9frTGOqd+jX0P/Au/JfRSNqsf5nUTcWdbVYuLxS1KjnzILSoR5hA== - dependencies: - "@jest/test-result" "^28.1.0" - "@jest/types" "^28.1.0" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.10.2" - jest-util "^28.1.0" - string-length "^4.0.1" - -jest-worker@^26.2.1: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" - integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^7.0.0" - -jest-worker@^27.0.2, jest-worker@^27.3.1, jest-worker@^27.4.5, jest-worker@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" - integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^27.4.3: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" - integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== - dependencies: - "@jest/core" "^27.5.1" - import-local "^3.0.2" - jest-cli "^27.5.1" - -js-sha3@0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" - integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@4.1.0, js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsdom@^16.6.0: - version "16.7.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== - dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" - cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" - escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" - symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= - -json-bigint-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-bigint-string/-/json-bigint-string-1.0.0.tgz#3399784d778bb78440bd72c7094af0e075bbe683" - integrity sha1-M5l4TXeLt4RAvXLHCUrw4HW75oM= - -json-buffer@3.0.1, json-buffer@~3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - -json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-schema@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= - -json-stringify-safe@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= - -json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== - dependencies: - minimist "^1.2.0" - -json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" - integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== - -jsonfile@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" - integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== - dependencies: - universalify "^2.0.0" - optionalDependencies: - graceful-fs "^4.1.6" - -jsonpointer@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.0.tgz#f802669a524ec4805fa7389eadbc9921d5dc8072" - integrity sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg== - -jss-plugin-camel-case@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-camel-case/-/jss-plugin-camel-case-10.9.0.tgz#4921b568b38d893f39736ee8c4c5f1c64670aaf7" - integrity sha512-UH6uPpnDk413/r/2Olmw4+y54yEF2lRIV8XIZyuYpgPYTITLlPOsq6XB9qeqv+75SQSg3KLocq5jUBXW8qWWww== - dependencies: - "@babel/runtime" "^7.3.1" - hyphenate-style-name "^1.0.3" - jss "10.9.0" - -jss-plugin-default-unit@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-default-unit/-/jss-plugin-default-unit-10.9.0.tgz#bb23a48f075bc0ce852b4b4d3f7582bc002df991" - integrity sha512-7Ju4Q9wJ/MZPsxfu4T84mzdn7pLHWeqoGd/D8O3eDNNJ93Xc8PxnLmV8s8ZPNRYkLdxZqKtm1nPQ0BM4JRlq2w== - dependencies: - "@babel/runtime" "^7.3.1" - jss "10.9.0" - -jss-plugin-global@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-global/-/jss-plugin-global-10.9.0.tgz#fc07a0086ac97aca174e37edb480b69277f3931f" - integrity sha512-4G8PHNJ0x6nwAFsEzcuVDiBlyMsj2y3VjmFAx/uHk/R/gzJV+yRHICjT4MKGGu1cJq2hfowFWCyrr/Gg37FbgQ== - dependencies: - "@babel/runtime" "^7.3.1" - jss "10.9.0" - -jss-plugin-nested@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-nested/-/jss-plugin-nested-10.9.0.tgz#cc1c7d63ad542c3ccc6e2c66c8328c6b6b00f4b3" - integrity sha512-2UJnDrfCZpMYcpPYR16oZB7VAC6b/1QLsRiAutOt7wJaaqwCBvNsosLEu/fUyKNQNGdvg2PPJFDO5AX7dwxtoA== - dependencies: - "@babel/runtime" "^7.3.1" - jss "10.9.0" - tiny-warning "^1.0.2" - -jss-plugin-props-sort@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-props-sort/-/jss-plugin-props-sort-10.9.0.tgz#30e9567ef9479043feb6e5e59db09b4de687c47d" - integrity sha512-7A76HI8bzwqrsMOJTWKx/uD5v+U8piLnp5bvru7g/3ZEQOu1+PjHvv7bFdNO3DwNPC9oM0a//KwIJsIcDCjDzw== - dependencies: - "@babel/runtime" "^7.3.1" - jss "10.9.0" - -jss-plugin-rule-value-function@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-rule-value-function/-/jss-plugin-rule-value-function-10.9.0.tgz#379fd2732c0746fe45168011fe25544c1a295d67" - integrity sha512-IHJv6YrEf8pRzkY207cPmdbBstBaE+z8pazhPShfz0tZSDtRdQua5jjg6NMz3IbTasVx9FdnmptxPqSWL5tyJg== - dependencies: - "@babel/runtime" "^7.3.1" - jss "10.9.0" - tiny-warning "^1.0.2" - -jss-plugin-vendor-prefixer@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss-plugin-vendor-prefixer/-/jss-plugin-vendor-prefixer-10.9.0.tgz#aa9df98abfb3f75f7ed59a3ec50a5452461a206a" - integrity sha512-MbvsaXP7iiVdYVSEoi+blrW+AYnTDvHTW6I6zqi7JcwXdc6I9Kbm234nEblayhF38EftoenbM+5218pidmC5gA== - dependencies: - "@babel/runtime" "^7.3.1" - css-vendor "^2.0.8" - jss "10.9.0" - -jss@10.9.0, jss@^10.5.1: - version "10.9.0" - resolved "https://registry.yarnpkg.com/jss/-/jss-10.9.0.tgz#7583ee2cdc904a83c872ba695d1baab4b59c141b" - integrity sha512-YpzpreB6kUunQBbrlArlsMpXYyndt9JATbt95tajx0t4MTJJcCJdd4hdNpHmOIDiUJrF/oX5wtVFrS3uofWfGw== - dependencies: - "@babel/runtime" "^7.3.1" - csstype "^3.0.2" - is-in-browser "^1.1.3" - tiny-warning "^1.0.2" - -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.2.1: - version "3.2.2" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.2.tgz#6ab1e52c71dfc0c0707008a91729a9491fe9f76c" - integrity sha512-HDAyJ4MNQBboGpUnHAVUNJs6X0lh058s6FuixsFGP7MgJYpD6Vasd6nzSG5iIfXu1zAYlHJ/zsOKNlrenTUBnw== - dependencies: - array-includes "^3.1.4" - object.assign "^4.1.2" - -junit-report-builder@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/junit-report-builder/-/junit-report-builder-3.0.0.tgz#829680cfb20d99ce70e26cbb426162afe199fedc" - integrity sha512-aW7DnfLddUb51T+V08bJyecexaLomy5ID/0FXvhwsRXs9E0abvDaDT024U99J2agU3dt4q0ppzfKxSwrIIgXWg== - dependencies: - date-format "0.0.2" - lodash "^4.17.15" - make-dir "^1.3.0" - xmlbuilder "^15.1.1" - -keycharm@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/keycharm/-/keycharm-0.3.1.tgz#1de258425454752b95c4d8a6cab9ec83218670de" - integrity sha512-zn47Ti4FJT9zdF+YBBLWJsfKF/fYQHkrYlBeB5Ez5e2PjW7SoIxr43yehAne2HruulIoid4NKZZxO0dHBygCtQ== - -keyv@^4.0.0: - version "4.2.2" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.2.2.tgz#4b6f602c0228ef4d8214c03c520bef469ed6b768" - integrity sha512-uYS0vKTlBIjNCAUqrjlxmruxOEiZxZIHXyp32sdcGmP+ukFrmWUnE//RcPXJH3Vxrni1H2gsQbjHE0bH7MtMQQ== - dependencies: - compress-brotli "^1.3.6" - json-buffer "3.0.1" - -kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -klona@^2.0.4, klona@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" - integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== - -ky@^0.30.0: - version "0.30.0" - resolved "https://registry.yarnpkg.com/ky/-/ky-0.30.0.tgz#a3d293e4f6c4604a9a4694eceb6ce30e73d27d64" - integrity sha512-X/u76z4JtDVq10u1JA5UQfatPxgPaVDMYTrgHyiTpGN2z4TMEJkIHsoSBBSg9SWZEIXTKsi9kHgiQ9o3Y/4yog== - -language-subtag-registry@~0.3.2: - version "0.3.21" - resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" - integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== - -language-tags@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" - integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= - dependencies: - language-subtag-registry "~0.3.2" - -lazystream@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638" - integrity sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw== - dependencies: - readable-stream "^2.0.5" - -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -lighthouse-logger@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/lighthouse-logger/-/lighthouse-logger-1.3.0.tgz#ba6303e739307c4eee18f08249524e7dafd510db" - integrity sha512-BbqAKApLb9ywUli+0a+PcV04SyJ/N1q/8qgCNe6U97KbPCS1BTksEuHFLYdvc8DltuhfxIUBqDZsC0bBGtl3lA== - dependencies: - debug "^2.6.9" - marky "^1.2.2" - -lilconfig@^2.0.3, lilconfig@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.5.tgz#19e57fd06ccc3848fd1891655b5a447092225b25" - integrity sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg== - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -load-json-file@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" - integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - pinkie-promise "^2.0.0" - strip-bom "^2.0.0" - -loader-runner@^4.2.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" - integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== - -loader-utils@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" - integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -loader-utils@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" - integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash-es@^4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" - integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== - -lodash.clonedeep@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= - -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= - -lodash.defaults@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" - integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw= - -lodash.difference@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.difference/-/lodash.difference-4.5.0.tgz#9ccb4e505d486b91651345772885a2df27fd017c" - integrity sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw= - -lodash.flatten@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= - -lodash.flattendeep@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" - integrity sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI= - -lodash.isobject@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-3.0.2.tgz#3c8fb8d5b5bf4bf90ae06e14f2a530a4ed935e1d" - integrity sha1-PI+41bW/S/kK4G4U8qUwpO2TXh0= - -lodash.isplainobject@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" - integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= - -lodash.mapvalues@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.mapvalues/-/lodash.mapvalues-4.6.0.tgz#1bafa5005de9dd6f4f26668c30ca37230cc9689c" - integrity sha1-G6+lAF3p3W9PJmaMMMo3IwzJaJw= - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= - -lodash.merge@^4.6.1, lodash.merge@^4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lodash.orderby@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.orderby/-/lodash.orderby-4.6.0.tgz#e697f04ce5d78522f54d9338b32b81a3393e4eb3" - integrity sha1-5pfwTOXXhSL1TZM4syuBozk+TrM= - -lodash.pickby@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.pickby/-/lodash.pickby-4.6.0.tgz#7dea21d8c18d7703a27c704c15d3b84a67e33aff" - integrity sha1-feoh2MGNdwOifHBMFdO4SmfjOv8= - -lodash.sortby@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" - integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= - -lodash.union@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88" - integrity sha1-SLtQiECfFvGCFmZkHETdGqrjzYg= - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= - -lodash.zip@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.zip/-/lodash.zip-4.2.0.tgz#ec6662e4896408ed4ab6c542a3990b72cc080020" - integrity sha1-7GZi5IlkCO1KtsVCo5kLcswIACA= - -lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.10: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -log-symbols@4.1.0, log-symbols@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" - integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== - dependencies: - chalk "^4.1.0" - is-unicode-supported "^0.1.0" - -loglevel-plugin-prefix@^0.8.4: - version "0.8.4" - resolved "https://registry.yarnpkg.com/loglevel-plugin-prefix/-/loglevel-plugin-prefix-0.8.4.tgz#2fe0e05f1a820317d98d8c123e634c1bd84ff644" - integrity sha512-WpG9CcFAOjz/FtNht+QJeGpvVl/cdR6P0z6OcXSkr8wFJOsV2GRj2j10JLfjuA4aYkcKCNIEqRGCyTife9R8/g== - -loglevel@^1.6.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.8.0.tgz#e7ec73a57e1e7b419cb6c6ac06bf050b67356114" - integrity sha512-G6A/nJLRgWOuuwdNuA6koovfEV1YpqqAG4pRUlFaz3jj2QNZ8M4vBqnVA+HBTmU/AMNUtlOsMmSpF6NyOjztbA== - -loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" - integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== - dependencies: - tslib "^2.0.3" - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - -lru-cache@^4.0.1: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -lz-string@^1.4.4: - version "1.4.4" - resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" - integrity sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= - -magic-string@^0.25.0, magic-string@^0.25.7: - version "0.25.9" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" - integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== - dependencies: - sourcemap-codec "^1.4.8" - -make-dir@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== - dependencies: - pify "^3.0.0" - -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -makeerror@1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -map-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" - integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= - -marky@^1.2.2: - version "1.2.4" - resolved "https://registry.yarnpkg.com/marky/-/marky-1.2.4.tgz#d02bb4c08be2366687c778ecd2a328971ce23d7f" - integrity sha512-zd2/GiSn6U3/jeFVZ0J9CA1LzQ8RfIVvXkb/U0swFHF/zT+dVohTAWjmo2DcIuofmIIIROlwTbd+shSeXmxr0w== - -match-sorter@^6.0.2: - version "6.3.1" - resolved "https://registry.yarnpkg.com/match-sorter/-/match-sorter-6.3.1.tgz#98cc37fda756093424ddf3cbc62bfe9c75b92bda" - integrity sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw== - dependencies: - "@babel/runtime" "^7.12.5" - remove-accents "0.4.2" - -mdn-data@2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" - integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== - -mdn-data@2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" - integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= - -memfs@^3.1.2, memfs@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.1.tgz#b78092f466a0dce054d63d39275b24c71d3f1305" - integrity sha512-1c9VPVvW5P7I85c35zAdEr1TD5+F11IToIHIlrVIcflfnzPkJa0ZoYEoEdYDP8KgPFoSZ/opDrUsAoZWym3mtw== - dependencies: - fs-monkey "1.0.3" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= - -micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -microseconds@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/microseconds/-/microseconds-0.2.0.tgz#233b25f50c62a65d861f978a4a4f8ec18797dc39" - integrity sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA== - -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -mimic-response@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - -mimic-response@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" - integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== - -min-indent@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" - integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== - -mini-create-react-context@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e" - integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== - dependencies: - "@babel/runtime" "^7.12.1" - tiny-warning "^1.0.3" - -mini-css-extract-plugin@^2.4.5: - version "2.6.0" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.0.tgz#578aebc7fc14d32c0ad304c2c34f08af44673f5e" - integrity sha512-ndG8nxCEnAemsg4FSgS+yNyHKgkTB4nPKqCOgh65j3/30qqC5RaSQQXMm++Y6sb6E1zRSxPkztj9fqxhS1Eo6w== - dependencies: - schema-utils "^4.0.0" - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@5.0.1, minimatch@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.0.1.tgz#fb9022f7528125187c92bd9e9b6366be1cf3415b" - integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^3.0.4, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@~3.0.2: - version "3.0.8" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" - integrity sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - -mkdirp@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - -mkdirp@~0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -mocha@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.0.0.tgz#205447d8993ec755335c4b13deba3d3a13c4def9" - integrity sha512-0Wl+elVUD43Y0BqPZBzZt8Tnkw9CMUdNYnUsTfOM1vuhJVZL+kiesFYsqwBkEEuEixaiPe5ZQdqDgX2jddhmoA== - dependencies: - "@ungap/promise-all-settled" "1.1.2" - ansi-colors "4.1.1" - browser-stdout "1.3.1" - chokidar "3.5.3" - debug "4.3.4" - diff "5.0.0" - escape-string-regexp "4.0.0" - find-up "5.0.0" - glob "7.2.0" - he "1.2.0" - js-yaml "4.1.0" - log-symbols "4.1.0" - minimatch "5.0.1" - ms "2.1.3" - nanoid "3.3.3" - serialize-javascript "6.0.0" - strip-json-comments "3.1.1" - supports-color "8.1.1" - workerpool "6.2.1" - yargs "16.2.0" - yargs-parser "20.2.4" - yargs-unparser "2.0.0" - -moment@^2.29.2: - version "2.29.3" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" - integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3, ms@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -multicast-dns@^7.2.4: - version "7.2.4" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.4.tgz#cf0b115c31e922aeb20b64e6556cbeb34cf0dd19" - integrity sha512-XkCYOU+rr2Ft3LI6w4ye51M3VK31qJXFIxu0XLw169PtKG0Zx47OrXeVW/GCYOfpC9s1yyyf1S+L8/4LY0J9Zw== - dependencies: - dns-packet "^5.2.2" - thunky "^1.0.2" - -mute-stream@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" - integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== - -nano-time@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/nano-time/-/nano-time-1.0.0.tgz#b0554f69ad89e22d0907f7a12b0993a5d96137ef" - integrity sha1-sFVPaa2J4i0JB/ehKwmTpdlhN+8= - dependencies: - big-integer "^1.6.16" - -nanoclone@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/nanoclone/-/nanoclone-0.2.1.tgz#dd4090f8f1a110d26bb32c49ed2f5b9235209ed4" - integrity sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA== - -nanoid@3.3.3: - version "3.3.3" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.3.tgz#fd8e8b7aa761fe807dba2d1b98fb7241bb724a25" - integrity sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w== - -nanoid@^2.1.0: - version "2.1.11" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-2.1.11.tgz#ec24b8a758d591561531b4176a01e3ab4f0f0280" - integrity sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA== - -nanoid@^3.3.3: - version "3.3.4" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" - integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= - -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -no-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" - integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== - dependencies: - lower-case "^2.0.2" - tslib "^2.0.3" - -node-fetch@2.6.7: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - -node-forge@^1, node-forge@^1.3.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= - -node-releases@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.3.tgz#225ee7488e4a5e636da8da52854844f9d716ca96" - integrity sha512-maHFz6OLqYxz+VQyCAtA3PTX4UP/53pa05fyDNc9CwjvJ0yEh6+xBwKsgCxMNhS8taUKBFYxfuiaD9U/55iFaw== - -node-releases@^2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476" - integrity sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ== - -normalize-package-data@^2.3.2: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-range@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= - -normalize-url@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" - integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nth-check@^1.0.2, nth-check@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.1.tgz#2efe162f5c3da06a28959fbd3db75dbeea9f0fc2" - integrity sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w== - dependencies: - boolbase "^1.0.0" - -nwsapi@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" - integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== - -object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= - -object-hash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - -object-inspect@^1.10.3, object-inspect@^1.12.0, object-inspect@^1.9.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" - integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== - -object-keys@^1.0.12, object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.0, object.assign@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" - integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== - dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - has-symbols "^1.0.1" - object-keys "^1.1.1" - -object.entries@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" - integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.fromentries@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" - integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.getownpropertydescriptors@^2.1.0: - version "2.1.3" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz#b223cf38e17fefb97a63c10c91df72ccb386df9e" - integrity sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.hasown@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.0.tgz#7232ed266f34d197d15cac5880232f7a4790afe5" - integrity sha512-MhjYRfj3GBlhSkDHo6QmvgjRLXQ2zndabdf3nX0yTyZK9rPfxb6uRpAac8HXNLy1GpqWtZ81Qh4v3uOls2sRAg== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.values@^1.1.0, object.values@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" - integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -oblivious-set@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/oblivious-set/-/oblivious-set-1.0.0.tgz#c8316f2c2fb6ff7b11b6158db3234c49f733c566" - integrity sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw== - -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -onetime@^5.1.0, onetime@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -open@^8.0.9, open@^8.4.0: - version "8.4.0" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" - integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - -optionator@^0.8.1: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - -ora@^5.4.1: - version "5.4.1" - resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" - integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== - dependencies: - bl "^4.1.0" - chalk "^4.1.0" - cli-cursor "^3.1.0" - cli-spinners "^2.5.0" - is-interactive "^1.0.0" - is-unicode-supported "^0.1.0" - log-symbols "^4.1.0" - strip-ansi "^6.0.0" - wcwidth "^1.0.1" - -os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= - -p-cancelable@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" - integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== - -p-iteration@^1.1.8: - version "1.1.8" - resolved "https://registry.yarnpkg.com/p-iteration/-/p-iteration-1.1.8.tgz#14df726d55af368beba81bcc92a26bb1b48e714a" - integrity sha512-IMFBSDIYcPNnW7uWYGrBqmvTiq7W0uB0fJn6shQZs7dlF3OvrHOre+JT9ikSZ7gZS3vWqclVgoQSvToJrns7uQ== - -p-limit@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" - integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== - dependencies: - p-try "^1.0.0" - -p-limit@^2.0.0, p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= - dependencies: - p-limit "^1.1.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-retry@^4.5.0: - version "4.6.2" - resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" - integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== - dependencies: - "@types/retry" "0.12.0" - retry "^0.13.1" - -p-try@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" - integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -param-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" - integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== - dependencies: - dot-case "^3.0.4" - tslib "^2.0.3" - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" - integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= - dependencies: - error-ex "^1.2.0" - -parse-json@^5.0.0, parse-json@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -parse-ms@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d" - integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA== - -parse-svg-path@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/parse-svg-path/-/parse-svg-path-0.1.2.tgz#7a7ec0d1eb06fa5325c7d3e009b859a09b5d49eb" - integrity sha1-en7A0esG+lMlx9PgCbhZoJtdSes= - -parse5@6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascal-case@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" - integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= - dependencies: - pinkie-promise "^2.0.0" - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.6, path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= - -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - -path-type@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" - integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= - dependencies: - graceful-fs "^4.1.2" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - integrity sha1-elfrVQpng/kRUzH89GY9XI4AelA= - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= - -picocolors@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.0, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pify@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= - -pirates@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== - -pkg-dir@4.2.0, pkg-dir@^4.1.0, pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -popper.js@1.16.1-lts: - version "1.16.1-lts" - resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.16.1-lts.tgz#cf6847b807da3799d80ee3d6d2f90df8a3f50b05" - integrity sha512-Kjw8nKRl1m+VrSFCoVGPph93W/qrSO7ZkqPpTf7F4bk/sqcfWK019dWBUpE/fBOsOQY1dks/Bmcbfn1heM/IsA== - -postcss-attribute-case-insensitive@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.0.tgz#39cbf6babf3ded1e4abf37d09d6eda21c644105c" - integrity sha512-b4g9eagFGq9T5SWX4+USfVyjIb3liPnjhHHRMP7FMB2kFVpYyfEscV0wP3eaXhKlcHKUut8lt5BGoeylWA/dBQ== - dependencies: - postcss-selector-parser "^6.0.2" - -postcss-browser-comments@^4: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" - integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== - -postcss-calc@^8.2.3: - version "8.2.4" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" - integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== - dependencies: - postcss-selector-parser "^6.0.9" - postcss-value-parser "^4.2.0" - -postcss-clamp@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" - integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-functional-notation@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.2.tgz#f59ccaeb4ee78f1b32987d43df146109cc743073" - integrity sha512-DXVtwUhIk4f49KK5EGuEdgx4Gnyj6+t2jBSEmxvpIK9QI40tWrpS2Pua8Q7iIZWBrki2QOaeUdEaLPPa91K0RQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-hex-alpha@^8.0.3: - version "8.0.3" - resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.3.tgz#61a0fd151d28b128aa6a8a21a2dad24eebb34d52" - integrity sha512-fESawWJCrBV035DcbKRPAVmy21LpoyiXdPTuHUfWJ14ZRjY7Y7PA6P4g8z6LQGYhU1WAxkTxjIjurXzoe68Glw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-rebeccapurple@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.0.2.tgz#5d397039424a58a9ca628762eb0b88a61a66e079" - integrity sha512-SFc3MaocHaQ6k3oZaFwH8io6MdypkUtEy/eXzXEB1vEQlO3S3oDc/FSZA8AsS04Z25RirQhlDlHLh3dn7XewWw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-colormin@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" - integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - colord "^2.9.1" - postcss-value-parser "^4.2.0" - -postcss-convert-values@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.0.tgz#f8d3abe40b4ce4b1470702a0706343eac17e7c10" - integrity sha512-GkyPbZEYJiWtQB0KZ0X6qusqFHUepguBCNFi9t5JJc7I2OTXG7C0twbTLvCfaKOLl3rSXmpAwV7W5txd91V84g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-media@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.0.tgz#1be6aff8be7dc9bf1fe014bde3b71b92bb4552f1" - integrity sha512-FvO2GzMUaTN0t1fBULDeIvxr5IvbDXcIatt6pnJghc736nqNgsGao5NT+5+WVLAQiTt6Cb3YUms0jiPaXhL//g== - -postcss-custom-properties@^12.1.7: - version "12.1.7" - resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.7.tgz#ca470fd4bbac5a87fd868636dafc084bc2a78b41" - integrity sha512-N/hYP5gSoFhaqxi2DPCmvto/ZcRDVjE3T1LiAMzc/bg53hvhcHOLpXOHb526LzBBp5ZlAUhkuot/bfpmpgStJg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-selectors@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.0.tgz#022839e41fbf71c47ae6e316cb0e6213012df5ef" - integrity sha512-/1iyBhz/W8jUepjGyu7V1OPcGbc636snN1yXEQCinb6Bwt7KxsiU7/bLQlp8GwAXzCh7cobBU5odNn/2zQWR8Q== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-dir-pseudo-class@^6.0.4: - version "6.0.4" - resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.4.tgz#9afe49ea631f0cb36fa0076e7c2feb4e7e3f049c" - integrity sha512-I8epwGy5ftdzNWEYok9VjW9whC4xnelAtbajGv4adql4FIF09rnrxnA9Y8xSHN47y7gqFIv10C5+ImsLeJpKBw== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-discard-comments@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.1.tgz#e90019e1a0e5b99de05f63516ce640bd0df3d369" - integrity sha512-5JscyFmvkUxz/5/+TB3QTTT9Gi9jHkcn8dcmmuN68JQcv3aQg4y88yEHHhwFB52l/NkaJ43O0dbksGMAo49nfQ== - -postcss-discard-duplicates@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" - integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== - -postcss-discard-empty@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" - integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== - -postcss-discard-overridden@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" - integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== - -postcss-double-position-gradients@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.1.tgz#a12cfdb7d11fa1a99ccecc747f0c19718fb37152" - integrity sha512-jM+CGkTs4FcG53sMPjrrGE0rIvLDdCrqMzgDC5fLI7JHDO7o6QG8C5TQBtExb13hdBdoH9C2QVbG4jo2y9lErQ== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-env-function@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" - integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-flexbugs-fixes@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" - integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== - -postcss-focus-visible@^6.0.4: - version "6.0.4" - resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" - integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-focus-within@^5.0.4: - version "5.0.4" - resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" - integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-font-variant@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" - integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== - -postcss-gap-properties@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.3.tgz#6401bb2f67d9cf255d677042928a70a915e6ba60" - integrity sha512-rPPZRLPmEKgLk/KlXMqRaNkYTUpE7YC+bOIQFN5xcu1Vp11Y4faIXv6/Jpft6FMnl6YRxZqDZG0qQOW80stzxQ== - -postcss-image-set-function@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.6.tgz#bcff2794efae778c09441498f40e0c77374870a9" - integrity sha512-KfdC6vg53GC+vPd2+HYzsZ6obmPqOk6HY09kttU19+Gj1nC3S3XBVEXDHxkhxTohgZqzbUb94bKXvKDnYWBm/A== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-initial@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" - integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== - -postcss-js@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" - integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== - dependencies: - camelcase-css "^2.0.1" - -postcss-lab-function@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.2.0.tgz#e054e662c6480202f5760887ec1ae0d153357123" - integrity sha512-Zb1EO9DGYfa3CP8LhINHCcTTCTLI+R3t7AX2mKsDzdgVQ/GkCpHOTgOr6HBHslP7XDdVbqgHW5vvRPMdVANQ8w== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-load-config@^3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" - integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== - dependencies: - lilconfig "^2.0.5" - yaml "^1.10.2" - -postcss-loader@^6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" - integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== - dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.5" - -postcss-logical@^5.0.4: - version "5.0.4" - resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" - integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== - -postcss-media-minmax@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" - integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== - -postcss-merge-longhand@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.4.tgz#0f46f8753989a33260efc47de9a0cdc571f2ec5c" - integrity sha512-hbqRRqYfmXoGpzYKeW0/NCZhvNyQIlQeWVSao5iKWdyx7skLvCfQFGIUsP9NUs3dSbPac2IC4Go85/zG+7MlmA== - dependencies: - postcss-value-parser "^4.2.0" - stylehacks "^5.1.0" - -postcss-merge-rules@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.1.tgz#d327b221cd07540bcc8d9ff84446d8b404d00162" - integrity sha512-8wv8q2cXjEuCcgpIB1Xx1pIy8/rhMPIQqYKNzEdyx37m6gpq83mQQdCxgIkFgliyEnKvdwJf/C61vN4tQDq4Ww== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - cssnano-utils "^3.1.0" - postcss-selector-parser "^6.0.5" - -postcss-minify-font-values@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" - integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-minify-gradients@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" - integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== - dependencies: - colord "^2.9.1" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-params@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.2.tgz#77e250780c64198289c954884ebe3ee4481c3b1c" - integrity sha512-aEP+p71S/urY48HWaRHasyx4WHQJyOYaKpQ6eXl8k0kxg66Wt/30VR6/woh8THgcpRbonJD5IeD+CzNhPi1L8g== - dependencies: - browserslist "^4.16.6" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-selectors@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.0.tgz#17c2be233e12b28ffa8a421a02fc8b839825536c" - integrity sha512-vYxvHkW+iULstA+ctVNx0VoRAR4THQQRkG77o0oa4/mBS0OzGvvzLIvHDv/nNEM0crzN2WIyFU5X7wZhaUK3RA== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== - -postcss-modules-local-by-default@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" - integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== - dependencies: - icss-utils "^5.0.0" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.1.0" - -postcss-modules-scope@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" - integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-modules-values@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" - integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== - dependencies: - icss-utils "^5.0.0" - -postcss-nested@5.0.6: - version "5.0.6" - resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" - integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== - dependencies: - postcss-selector-parser "^6.0.6" - -postcss-nesting@^10.1.4: - version "10.1.5" - resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.1.5.tgz#0711260e394cd0e117ff1f879eb1ee9a94550352" - integrity sha512-+NyBBE/wUcJ+NJgVd2FyKIZ414lul6ExqkOt1qXXw7oRzpQ0iT68cVpx+QfHh42QUMHXNoVLlN9InFY9XXK8ng== - dependencies: - "@csstools/selector-specificity" "1.0.0" - postcss-selector-parser "^6.0.10" - -postcss-normalize-charset@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" - integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== - -postcss-normalize-display-values@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" - integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-positions@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.0.tgz#902a7cb97cf0b9e8b1b654d4a43d451e48966458" - integrity sha512-8gmItgA4H5xiUxgN/3TVvXRoJxkAWLW6f/KKhdsH03atg0cB8ilXnrB5PpSshwVu/dD2ZsRFQcR1OEmSBDAgcQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-repeat-style@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.0.tgz#f6d6fd5a54f51a741cc84a37f7459e60ef7a6398" - integrity sha512-IR3uBjc+7mcWGL6CtniKNQ4Rr5fTxwkaDHwMBDGGs1x9IVRkYIT/M4NelZWkAOBdV6v3Z9S46zqaKGlyzHSchw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-string@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" - integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-timing-functions@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" - integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-unicode@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" - integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== - dependencies: - browserslist "^4.16.6" - postcss-value-parser "^4.2.0" - -postcss-normalize-url@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" - integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== - dependencies: - normalize-url "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-normalize-whitespace@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" - integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" - integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== - dependencies: - "@csstools/normalize.css" "*" - postcss-browser-comments "^4" - sanitize.css "*" - -postcss-opacity-percentage@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" - integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== - -postcss-ordered-values@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.1.tgz#0b41b610ba02906a3341e92cab01ff8ebc598adb" - integrity sha512-7lxgXF0NaoMIgyihL/2boNAEZKiW0+HkMhdKMTD93CjW8TdCy2hSdj8lsAo+uwm7EDG16Da2Jdmtqpedl0cMfw== - dependencies: - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-overflow-shorthand@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.3.tgz#ebcfc0483a15bbf1b27fdd9b3c10125372f4cbc2" - integrity sha512-CxZwoWup9KXzQeeIxtgOciQ00tDtnylYIlJBBODqkgS/PU2jISuWOL/mYLHmZb9ZhZiCaNKsCRiLp22dZUtNsg== - -postcss-page-break@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" - integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== - -postcss-place@^7.0.4: - version "7.0.4" - resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.4.tgz#eb026650b7f769ae57ca4f938c1addd6be2f62c9" - integrity sha512-MrgKeiiu5OC/TETQO45kV3npRjOFxEHthsqGtkh3I1rPbZSbXGD/lZVi9j13cYh+NA8PIAPyk6sGjT9QbRyvSg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-preset-env@^7.0.1: - version "7.5.0" - resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.5.0.tgz#0c1f23933597d55dab4a90f61eda30b76e710658" - integrity sha512-0BJzWEfCdTtK2R3EiKKSdkE51/DI/BwnhlnicSW482Ym6/DGHud8K0wGLcdjip1epVX0HKo4c8zzTeV/SkiejQ== - dependencies: - "@csstools/postcss-color-function" "^1.1.0" - "@csstools/postcss-font-format-keywords" "^1.0.0" - "@csstools/postcss-hwb-function" "^1.0.0" - "@csstools/postcss-ic-unit" "^1.0.0" - "@csstools/postcss-is-pseudo-class" "^2.0.2" - "@csstools/postcss-normalize-display-values" "^1.0.0" - "@csstools/postcss-oklab-function" "^1.1.0" - "@csstools/postcss-progressive-custom-properties" "^1.3.0" - "@csstools/postcss-stepped-value-functions" "^1.0.0" - "@csstools/postcss-unset-value" "^1.0.0" - autoprefixer "^10.4.6" - browserslist "^4.20.3" - css-blank-pseudo "^3.0.3" - css-has-pseudo "^3.0.4" - css-prefers-color-scheme "^6.0.3" - cssdb "^6.6.1" - postcss-attribute-case-insensitive "^5.0.0" - postcss-clamp "^4.1.0" - postcss-color-functional-notation "^4.2.2" - postcss-color-hex-alpha "^8.0.3" - postcss-color-rebeccapurple "^7.0.2" - postcss-custom-media "^8.0.0" - postcss-custom-properties "^12.1.7" - postcss-custom-selectors "^6.0.0" - postcss-dir-pseudo-class "^6.0.4" - postcss-double-position-gradients "^3.1.1" - postcss-env-function "^4.0.6" - postcss-focus-visible "^6.0.4" - postcss-focus-within "^5.0.4" - postcss-font-variant "^5.0.0" - postcss-gap-properties "^3.0.3" - postcss-image-set-function "^4.0.6" - postcss-initial "^4.0.1" - postcss-lab-function "^4.2.0" - postcss-logical "^5.0.4" - postcss-media-minmax "^5.0.0" - postcss-nesting "^10.1.4" - postcss-opacity-percentage "^1.1.2" - postcss-overflow-shorthand "^3.0.3" - postcss-page-break "^3.0.4" - postcss-place "^7.0.4" - postcss-pseudo-class-any-link "^7.1.2" - postcss-replace-overflow-wrap "^4.0.0" - postcss-selector-not "^5.0.0" - postcss-value-parser "^4.2.0" - -postcss-pseudo-class-any-link@^7.1.2: - version "7.1.3" - resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.3.tgz#0e4753518b9f6caa8b649c75b56e69e391d0c12f" - integrity sha512-I9Yp1VV2r8xFwg/JrnAlPCcKmutv6f6Ig6/CHFPqGJiDgYXM9C+0kgLfK4KOXbKNw+63QYl4agRUB0Wi9ftUIg== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-reduce-initial@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" - integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - -postcss-reduce-transforms@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" - integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-replace-overflow-wrap@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" - integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== - -postcss-selector-not@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-5.0.0.tgz#ac5fc506f7565dd872f82f5314c0f81a05630dc7" - integrity sha512-/2K3A4TCP9orP4TNS7u3tGdRFVKqz/E6pX3aGnriPG0jU78of8wsUcqE4QAhWEU0d+WnMSF93Ah3F//vUtK+iQ== - dependencies: - balanced-match "^1.0.0" - -postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: - version "6.0.10" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" - integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-svgo@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" - integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== - dependencies: - postcss-value-parser "^4.2.0" - svgo "^2.7.0" - -postcss-unique-selectors@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" - integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" - integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== - -postcss@^7.0.35: - version "7.0.39" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.3.5, postcss@^8.4.12, postcss@^8.4.4, postcss@^8.4.7: - version "8.4.13" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.13.tgz#7c87bc268e79f7f86524235821dfdf9f73e5d575" - integrity sha512-jtL6eTBrza5MPzy8oJLFuUscHDXTV5KcLlqAWHl5q5WYRfnNRGSmOZmOZ1T6Gy7A99mOZfqungmZMpMmCVJ8ZA== - dependencies: - nanoid "^3.3.3" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= - -prettier@^2.2.1: - version "2.6.2" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.6.2.tgz#e26d71a18a74c3d0f0597f55f01fb6c06c206032" - integrity sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew== - -pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: - version "5.6.0" - resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" - integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== - -pretty-error@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" - integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== - dependencies: - lodash "^4.17.20" - renderkid "^3.0.0" - -pretty-format@^27.0.0, pretty-format@^27.0.2, pretty-format@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" - integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== - dependencies: - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^17.0.1" - -pretty-format@^28.1.0: - version "28.1.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.0.tgz#8f5836c6a0dfdb834730577ec18029052191af55" - integrity sha512-79Z4wWOYCdvQkEoEuSlBhHJqWeZ8D8YRPiPctJFCtvuaClGpiwiQYSCUOE6IEKUbbFukKOTFIUAXE8N4EQTo1Q== - dependencies: - "@jest/schemas" "^28.0.2" - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -pretty-ms@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-7.0.1.tgz#7d903eaab281f7d8e03c66f867e239dc32fb73e8" - integrity sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q== - dependencies: - parse-ms "^2.1.0" - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -progress@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -promise@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e" - integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== - dependencies: - asap "~2.0.6" - -prompts@^2.0.1, prompts@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: - version "15.8.1" - resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" - integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.13.1" - -propagating-hammerjs@^1.4.7: - version "1.5.0" - resolved "https://registry.yarnpkg.com/propagating-hammerjs/-/propagating-hammerjs-1.5.0.tgz#223d58465489b64879fb0cef2c99ba92b294c239" - integrity sha512-3PUXWmomwutoZfydC+lJwK1bKCh6sK6jZGB31RUX6+4EXzsbkDZrK4/sVR7gBrvJaEIwpTVyxQUAd29FKkmVdw== - dependencies: - hammerjs "^2.0.8" - -property-expr@^2.0.4: - version "2.0.5" - resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.5.tgz#278bdb15308ae16af3e3b9640024524f4dc02cb4" - integrity sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA== - -proxy-addr@~2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -proxy-from-env@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= - -psl@^1.1.33: - version "1.8.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -puppeteer-core@^13.1.3: - version "13.7.0" - resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-13.7.0.tgz#3344bee3994163f49120a55ddcd144a40575ba5b" - integrity sha512-rXja4vcnAzFAP1OVLq/5dWNfwBGuzcOARJ6qGV7oAZhnLmVRU8G5MsdeQEAOy332ZhkIOnn9jp15R89LKHyp2Q== - dependencies: - cross-fetch "3.1.5" - debug "4.3.4" - devtools-protocol "0.0.981744" - extract-zip "2.0.1" - https-proxy-agent "5.0.1" - pkg-dir "4.2.0" - progress "2.0.3" - proxy-from-env "1.1.0" - rimraf "3.0.2" - tar-fs "2.1.1" - unbzip2-stream "1.4.3" - ws "8.5.0" - -q@^1.1.2: - version "1.5.1" - resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" - integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= - -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" - -query-selector-shadow-dom@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/query-selector-shadow-dom/-/query-selector-shadow-dom-1.0.0.tgz#8fa7459a4620f094457640e74e953a9dbe61a38e" - integrity sha512-bK0/0cCI+R8ZmOF1QjT7HupDUYCxbf/9TJgAmSXQxZpftXmTAeil9DRoCnTDkWbvOyZzhcMBwKpptWcdkGFIMg== - -query-state-core@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/query-state-core/-/query-state-core-2.5.0.tgz#7cac3fdc1f79c58c22f35efe8a5f5880f55728d3" - integrity sha512-XVo7I/K+gKXqu+HlxtGXfjUtQ+LPjs5bTHB4RC4vDs6yCYLmchc4IxcZWt5EdZZLqIg/CuY+PUxN141t3J17fQ== - -querystringify@^2.1.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" - integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -quick-lru@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" - integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== - -raf-schd@^4.0.2: - version "4.0.3" - resolved "https://registry.yarnpkg.com/raf-schd/-/raf-schd-4.0.3.tgz#5d6c34ef46f8b2a0e880a8fcdb743efc5bfdbc1a" - integrity sha512-tQkJl2GRWh83ui2DiPTJz9wEiMN20syf+5oKfB03yYP7ioZcJwsIK8FjrtLwH1m7C7e+Tt2yYBlrOpdT+dyeIQ== - -raf@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" - integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== - dependencies: - performance-now "^2.1.0" - -randombytes@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -react-app-polyfill@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" - integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== - dependencies: - core-js "^3.19.2" - object-assign "^4.1.1" - promise "^8.1.0" - raf "^3.4.1" - regenerator-runtime "^0.13.9" - whatwg-fetch "^3.6.2" - -react-cron-generator@^1.3.5: - version "1.3.5" - resolved "https://registry.yarnpkg.com/react-cron-generator/-/react-cron-generator-1.3.5.tgz#3fc819f166f0403d6e93f96b548e4d7934ab90af" - integrity sha512-gasLva6PV9d/8WrpAf2SHyMse71XXS8LiLWgKKCCty/rbzmpNNotNM9VCOVpzyuLiWikkZM9DQ5K5cY9cngTow== - dependencies: - cronstrue "1.92.0" - -react-data-table-component@^6.11.8: - version "6.11.8" - resolved "https://registry.yarnpkg.com/react-data-table-component/-/react-data-table-component-6.11.8.tgz#483677d22385c0e4458cda299954b2dc4ff080fc" - integrity sha512-ukKJKaKNDU5+jEEZFo16+4zwQPRvw1Z13S7FOj4dr73JWRf/lKkE108jciK2tj1JPMub3qXG2h0zXDn5y2WUfQ== - dependencies: - deepmerge "^4.2.2" - lodash.orderby "^4.6.0" - shortid "^2.2.16" - -react-dev-utils@^12.0.1: - version "12.0.1" - resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" - integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== - dependencies: - "@babel/code-frame" "^7.16.0" - address "^1.1.2" - browserslist "^4.18.1" - chalk "^4.1.2" - cross-spawn "^7.0.3" - detect-port-alt "^1.1.6" - escape-string-regexp "^4.0.0" - filesize "^8.0.6" - find-up "^5.0.0" - fork-ts-checker-webpack-plugin "^6.5.0" - global-modules "^2.0.0" - globby "^11.0.4" - gzip-size "^6.0.0" - immer "^9.0.7" - is-root "^2.1.0" - loader-utils "^3.2.0" - open "^8.4.0" - pkg-up "^3.1.0" - prompts "^2.4.2" - react-error-overlay "^6.0.11" - recursive-readdir "^2.2.2" - shell-quote "^1.7.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - -react-dom@^16.8.0: - version "16.14.0" - resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.14.0.tgz#7ad838ec29a777fb3c75c3a190f661cf92ab8b89" - integrity sha512-1gCeQXDLoIqMgqD3IO2Ah9bnf0w9kzhwN5q4FGnHZ67hBm9yePzB5JJAIQCc8x3pFnNlwFq4RidZggNAAkzWWw== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - prop-types "^15.6.2" - scheduler "^0.19.1" - -react-error-overlay@^6.0.11: - version "6.0.11" - resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" - integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== - -react-fast-compare@^2.0.1: - version "2.0.4" - resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-2.0.4.tgz#e84b4d455b0fec113e0402c329352715196f81f9" - integrity sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw== - -react-fast-compare@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.0.tgz#641a9da81b6a6320f270e89724fb45a0b39e43bb" - integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA== - -react-helmet@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/react-helmet/-/react-helmet-6.1.0.tgz#a750d5165cb13cf213e44747502652e794468726" - integrity sha512-4uMzEY9nlDlgxr61NL3XbKRy1hEkXmKNXhjbAIOVw5vcFrsdYbH2FEwcNyWvWinl103nXgzYNlns9ca+8kFiWw== - dependencies: - object-assign "^4.1.1" - prop-types "^15.7.2" - react-fast-compare "^3.1.1" - react-side-effect "^2.1.0" - -react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: - version "16.13.1" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -"react-is@^16.8.0 || ^17.0.0", react-is@^17.0.1, react-is@^17.0.2: - version "17.0.2" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" - integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== - -react-is@^18.0.0: - version "18.1.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.1.0.tgz#61aaed3096d30eacf2a2127118b5b41387d32a67" - integrity sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg== - -react-query@^3.19.4: - version "3.39.0" - resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.39.0.tgz#0caca7b0da98e65008bbcd4df0d25618c2100050" - integrity sha512-Od0IkSuS79WJOhzWBx/ys0x13+7wFqgnn64vBqqAAnZ9whocVhl/y1padD5uuZ6EIkXbFbInax0qvY7zGM0thA== - dependencies: - "@babel/runtime" "^7.5.5" - broadcast-channel "^3.4.1" - match-sorter "^6.0.2" - -react-refresh@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" - integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== - -react-resize-detector@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-5.2.0.tgz#992083834432308c551a8251a2c52306d9d16718" - integrity sha512-PQAc03J2eyhvaiWgEdQ8+bKbbyGJzLEr70KuivBd1IEmP/iewNakLUMkxm6MWnDqsRPty85pioyg8MvGb0qC8A== - dependencies: - lodash "^4.17.20" - prop-types "^15.7.2" - raf-schd "^4.0.2" - resize-observer-polyfill "^1.5.1" - -react-router-dom@^5.2.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.1.tgz#0151baf2365c5fcd8493f6ec9b9b31f34d0f8ae1" - integrity sha512-f0pj/gMAbv9e8gahTmCEY20oFhxhrmHwYeIwH5EO5xu0qme+wXtsdB8YfUOAZzUz4VaXmb58m3ceiLtjMhqYmQ== - dependencies: - "@babel/runtime" "^7.12.13" - history "^4.9.0" - loose-envify "^1.3.1" - prop-types "^15.6.2" - react-router "5.3.1" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - -react-router-use-location-state@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/react-router-use-location-state/-/react-router-use-location-state-2.5.0.tgz#4fe1cb6aa3cd5f8f997cfb77e7a6d5d8a55ea00f" - integrity sha512-p0duQtatgL8SZzIITI3He2MP/4d9x9GQHJs93spPYAckVrvCRLAlQxS7k04RHYZb0e4yxwW76Rp/PBvezyez6g== - dependencies: - use-location-state "^2.5.0" - -react-router@5.3.1, react-router@^5.2.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.1.tgz#b13e84a016c79b9e80dde123ca4112c4f117e3cf" - integrity sha512-v+zwjqb7bakqgF+wMVKlAPTca/cEmPOvQ9zt7gpSNyPXau1+0qvuYZ5BWzzNDP1y6s15zDwgb9rPN63+SIniRQ== - dependencies: - "@babel/runtime" "^7.12.13" - history "^4.9.0" - hoist-non-react-statics "^3.1.0" - loose-envify "^1.3.1" - mini-create-react-context "^0.4.0" - path-to-regexp "^1.7.0" - prop-types "^15.6.2" - react-is "^16.6.0" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - -react-scripts@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" - integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== - dependencies: - "@babel/core" "^7.16.0" - "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" - "@svgr/webpack" "^5.5.0" - babel-jest "^27.4.2" - babel-loader "^8.2.3" - babel-plugin-named-asset-import "^0.3.8" - babel-preset-react-app "^10.0.1" - bfj "^7.0.2" - browserslist "^4.18.1" - camelcase "^6.2.1" - case-sensitive-paths-webpack-plugin "^2.4.0" - css-loader "^6.5.1" - css-minimizer-webpack-plugin "^3.2.0" - dotenv "^10.0.0" - dotenv-expand "^5.1.0" - eslint "^8.3.0" - eslint-config-react-app "^7.0.1" - eslint-webpack-plugin "^3.1.1" - file-loader "^6.2.0" - fs-extra "^10.0.0" - html-webpack-plugin "^5.5.0" - identity-obj-proxy "^3.0.0" - jest "^27.4.3" - jest-resolve "^27.4.2" - jest-watch-typeahead "^1.0.0" - mini-css-extract-plugin "^2.4.5" - postcss "^8.4.4" - postcss-flexbugs-fixes "^5.0.2" - postcss-loader "^6.2.1" - postcss-normalize "^10.0.1" - postcss-preset-env "^7.0.1" - prompts "^2.4.2" - react-app-polyfill "^3.0.0" - react-dev-utils "^12.0.1" - react-refresh "^0.11.0" - resolve "^1.20.0" - resolve-url-loader "^4.0.0" - sass-loader "^12.3.0" - semver "^7.3.5" - source-map-loader "^3.0.0" - style-loader "^3.3.1" - tailwindcss "^3.0.2" - terser-webpack-plugin "^5.2.5" - webpack "^5.64.4" - webpack-dev-server "^4.6.0" - webpack-manifest-plugin "^4.0.2" - workbox-webpack-plugin "^6.4.1" - optionalDependencies: - fsevents "^2.3.2" - -react-side-effect@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/react-side-effect/-/react-side-effect-2.1.1.tgz#66c5701c3e7560ab4822a4ee2742dee215d72eb3" - integrity sha512-2FoTQzRNTncBVtnzxFOk2mCpcfxQpenBMbk5kSVBg5UcPqV9fRbgY2zhb7GTWWOlpFmAxhClBDlIq8Rsubz1yQ== - -react-transition-group@^4.4.0: - version "4.4.2" - resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.2.tgz#8b59a56f09ced7b55cbd53c36768b922890d5470" - integrity sha512-/RNYfRAMlZwDSr6z4zNKV6xu53/e2BuaBbGhbyYIXTrmgu/bGHzmqOs7mJSJBHy9Ud+ApHx3QjrkKSp1pxvlFg== - dependencies: - "@babel/runtime" "^7.5.5" - dom-helpers "^5.0.1" - loose-envify "^1.4.0" - prop-types "^15.6.2" - -react-vis-timeline-2@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/react-vis-timeline-2/-/react-vis-timeline-2-2.1.6.tgz#4b9a8d5dc1cde31ee7b084952531477f154d9c09" - integrity sha512-/RggBBK3E89E1pl4DPe3lEZ+zR3cUZWKpO+/i9vtgv/XiPgSp/QiEY1s05F0AvLjHeYUVgks3XqL1pQxAFossQ== - dependencies: - "@egjs/hammerjs" "^2.0.17" - component-emitter "^1.3.0" - keycharm "^0.3.1" - propagating-hammerjs "^1.4.7" - uuid "^7.0.0" - vis-data "^7.1.0" - vis-timeline "^7.4.2" - vis-util "^4.3.4" - -react@^16.8.0: - version "16.14.0" - resolved "https://registry.yarnpkg.com/react/-/react-16.14.0.tgz#94d776ddd0aaa37da3eda8fc5b6b18a4c9a3114d" - integrity sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - prop-types "^15.6.2" - -read-pkg-up@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" - integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= - dependencies: - find-up "^1.0.0" - read-pkg "^1.0.0" - -read-pkg@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" - integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= - dependencies: - load-json-file "^1.0.0" - normalize-package-data "^2.3.2" - path-type "^1.0.0" - -readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.5: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdir-glob@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.1.tgz#f0e10bb7bf7bfa7e0add8baffdc54c3f7dbee6c4" - integrity sha512-91/k1EzZwDx6HbERR+zucygRFfiPl2zkIYZtv3Jjr6Mn7SkKcVct8aVO+sSRiGMc6fLf72du3d92/uY63YPdEA== - dependencies: - minimatch "^3.0.4" - -readdirp@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -recursive-readdir@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" - integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== - dependencies: - minimatch "3.0.4" - -redent@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" - integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== - dependencies: - indent-string "^4.0.0" - strip-indent "^3.0.0" - -regenerate-unicode-properties@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz#7f442732aa7934a3740c779bb9b3340dccc1fb56" - integrity sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw== - dependencies: - regenerate "^1.4.2" - -regenerate@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" - integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== - -regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: - version "0.13.9" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== - -regenerator-transform@^0.15.0: - version "0.15.0" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" - integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== - dependencies: - "@babel/runtime" "^7.8.4" - -regex-parser@^2.2.11: - version "2.2.11" - resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" - integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== - -regexp.prototype.flags@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.1.tgz#b3f4c0059af9e47eca9f3f660e51d81307e72307" - integrity sha512-pMR7hBVUUGI7PMA37m2ofIdQCsomVnas+Jn5UPGAHQ+/LlwKm/aTLJHdasmHRzlfeZwHiAOaRSo2rbBDm3nNUQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -regexpp@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" - integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== - -regexpu-core@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.0.1.tgz#c531122a7840de743dcf9c83e923b5560323ced3" - integrity sha512-CriEZlrKK9VJw/xQGJpQM5rY88BtuL8DM+AEwvcThHilbxiTAy8vq4iJnd2tqq8wLmjbGZzP7ZcKFjbGkmEFrw== - dependencies: - regenerate "^1.4.2" - regenerate-unicode-properties "^10.0.1" - regjsgen "^0.6.0" - regjsparser "^0.8.2" - unicode-match-property-ecmascript "^2.0.0" - unicode-match-property-value-ecmascript "^2.0.0" - -regjsgen@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.6.0.tgz#83414c5354afd7d6627b16af5f10f41c4e71808d" - integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA== - -regjsparser@^0.8.2: - version "0.8.4" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.8.4.tgz#8a14285ffcc5de78c5b95d62bbf413b6bc132d5f" - integrity sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA== - dependencies: - jsesc "~0.5.0" - -relateurl@^0.2.7: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= - -remove-accents@0.4.2: - version "0.4.2" - resolved "https://registry.yarnpkg.com/remove-accents/-/remove-accents-0.4.2.tgz#0a43d3aaae1e80db919e07ae254b285d9e1c7bb5" - integrity sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U= - -renderkid@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" - integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== - dependencies: - css-select "^4.1.3" - dom-converter "^0.2.0" - htmlparser2 "^6.1.0" - lodash "^4.17.21" - strip-ansi "^6.0.1" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - -require-from-string@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= - -resize-observer-polyfill@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz#0e9020dd3d21024458d4ebd27e23e40269810464" - integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== - -resolve-alpn@^1.0.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" - integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-pathname@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" - integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== - -resolve-url-loader@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" - integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== - dependencies: - adjust-sourcemap-loader "^4.0.0" - convert-source-map "^1.7.0" - loader-utils "^2.0.0" - postcss "^7.0.35" - source-map "0.6.1" - -resolve.exports@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" - integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== - -resolve@^1.10.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0: - version "1.22.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" - integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== - dependencies: - is-core-module "^2.8.1" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^2.0.0-next.3: - version "2.0.0-next.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" - integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== - dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" - -responselike@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.0.tgz#26391bcc3174f750f9a79eacc40a12a5c42d7723" - integrity sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw== - dependencies: - lowercase-keys "^2.0.0" - -resq@^1.9.1: - version "1.10.2" - resolved "https://registry.yarnpkg.com/resq/-/resq-1.10.2.tgz#cedf4f20d53f6e574b1e12afbda446ad9576c193" - integrity sha512-HmgVS3j+FLrEDBTDYysPdPVF9/hioDMJ/otOiQDKqk77YfZeeLOj0qi34yObumcud1gBpk+wpBTEg4kMicD++A== - dependencies: - fast-deep-equal "^2.0.1" - -restore-cursor@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" - integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== - dependencies: - onetime "^5.1.0" - signal-exit "^3.0.2" - -retry@^0.13.1: - version "0.13.1" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rgb2hex@0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/rgb2hex/-/rgb2hex-0.2.5.tgz#f82230cd3ab1364fa73c99be3a691ed688f8dbdc" - integrity sha512-22MOP1Rh7sAo1BZpDG6R5RFYzR2lYEgwq7HEmyW2qcsOqR2lQKmn+O//xV3YG/0rrhMC6KVX2hU+ZXuaw9a5bw== - -rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -rison@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/rison/-/rison-0.1.1.tgz#4dcc0557b241aff60e76178e7792135713f33120" - integrity sha1-TcwFV7JBr/YOdheOd5ITVxPzMSA= - -rollup-plugin-terser@^7.0.0: - version "7.0.2" - resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" - integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== - dependencies: - "@babel/code-frame" "^7.10.4" - jest-worker "^26.2.1" - serialize-javascript "^4.0.0" - terser "^5.0.0" - -rollup@^2.43.1: - version "2.72.1" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.72.1.tgz#861c94790537b10008f0ca0fbc60e631aabdd045" - integrity sha512-NTc5UGy/NWFGpSqF1lFY8z9Adri6uhyMLI6LvPAXdBKoPRFhIIiBUpt+Qg2awixqO3xvzSijjhnb4+QEZwJmxA== - optionalDependencies: - fsevents "~2.3.2" - -run-async@^2.4.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" - integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -rw@1: - version "1.3.3" - resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" - integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= - -rxjs@^7.2.0, rxjs@^7.5.5: - version "7.5.5" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.5.tgz#2ebad89af0f560f460ad5cc4213219e1f7dd4e9f" - integrity sha512-sy+H0pQofO95VDmFLzyaw9xNJU4KTRSwQIGM6+iG3SypAtCiLDzpeG8sJrNCWn2Up9km+KhkvTdbkrdy+yzZdw== - dependencies: - tslib "^2.1.0" - -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -sanitize.css@*: - version "13.0.0" - resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" - integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== - -sass-loader@^12.3.0: - version "12.6.0" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" - integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== - dependencies: - klona "^2.0.4" - neo-async "^2.6.2" - -sass@^1.49.9: - version "1.51.0" - resolved "https://registry.yarnpkg.com/sass/-/sass-1.51.0.tgz#25ea36cf819581fe1fe8329e8c3a4eaaf70d2845" - integrity sha512-haGdpTgywJTvHC2b91GSq+clTKGbtkkZmVAb82jZQN/wTy6qs8DdFm2lhEQbEwrY0QDRgSQ3xDurqM977C3noA== - dependencies: - chokidar ">=3.0.0 <4.0.0" - immutable "^4.0.0" - source-map-js ">=0.6.2 <2.0.0" - -sax@~1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== - -saxes@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== - dependencies: - xmlchars "^2.2.0" - -scheduler@^0.19.1: - version "0.19.1" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.19.1.tgz#4f3e2ed2c1a7d65681f4c854fa8c5a1ccb40f196" - integrity sha512-n/zwRWRYSUj0/3g/otKDRPMh6qv2SYMWNq85IEa8iZyAv8od9zDYpGSnpBEjNgcMNq6Scbu5KfIPxNF72R/2EA== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - -schema-utils@2.7.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - -schema-utils@^2.6.5: - version "2.7.1" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== - dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" - -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" - integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" - integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== - dependencies: - "@types/json-schema" "^7.0.9" - ajv "^8.8.0" - ajv-formats "^2.1.1" - ajv-keywords "^5.0.0" - -select-hose@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= - -selenium-standalone@^8.0.3: - version "8.1.3" - resolved "https://registry.yarnpkg.com/selenium-standalone/-/selenium-standalone-8.1.3.tgz#24ad1ee575a88492e640f9d5abad73a7f10808d1" - integrity sha512-Xo6wU8nj9DNrhPKVgtZLCX4oa7Yp0L+XLB7PRozkx//LBaFogQBPZHzDB0MFkpQQIohcqYXn8wZGwVuewO99Xw== - dependencies: - commander "^9.0.0" - cross-spawn "^7.0.3" - debug "^4.3.1" - fs-extra "^10.0.0" - got "^11.8.2" - is-port-reachable "^3.0.0" - lodash.mapvalues "^4.6.0" - lodash.merge "^4.6.2" - minimist "^1.2.5" - mkdirp "^1.0.4" - progress "2.0.3" - tar-stream "2.2.0" - which "^2.0.2" - yauzl "^2.10.0" - -selfsigned@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.1.tgz#8b2df7fa56bf014d19b6007655fff209c0ef0a56" - integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ== - dependencies: - node-forge "^1" - -"semver@2 || 3 || 4 || 5": - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -semver@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" - integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -semver@^7.3.2, semver@^7.3.5: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" - -serialize-error@^8.0.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-8.1.0.tgz#3a069970c712f78634942ddd50fbbc0eaebe2f67" - integrity sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ== - dependencies: - type-fest "^0.20.2" - -serialize-javascript@6.0.0, serialize-javascript@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" - integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== - dependencies: - randombytes "^2.1.0" - -serialize-javascript@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" - integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== - dependencies: - randombytes "^2.1.0" - -serve-index@^1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" - -setprototypeof@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -shallowequal@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" - integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shell-quote@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" - integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== - -shortid@^2.2.16: - version "2.2.16" - resolved "https://registry.yarnpkg.com/shortid/-/shortid-2.2.16.tgz#b742b8f0cb96406fd391c76bfc18a67a57fe5608" - integrity sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g== - dependencies: - nanoid "^2.1.0" - -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -signal-exit@^3.0.2, signal-exit@^3.0.3: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -slash@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== - -sockjs@^0.3.21: - version "0.3.24" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" - integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== - dependencies: - faye-websocket "^0.11.3" - uuid "^8.3.2" - websocket-driver "^0.7.4" - -source-list-map@^2.0.0, source-list-map@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.1, source-map-js@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== - -source-map-loader@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" - integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== - dependencies: - abab "^2.0.5" - iconv-lite "^0.6.3" - source-map-js "^1.0.1" - -source-map-resolve@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" - integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - -source-map-support@^0.5.6, source-map-support@~0.5.20: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.5.0: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= - -source-map@^0.7.3: - version "0.7.3" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" - integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== - -source-map@^0.8.0-beta.0, source-map@~0.8.0-beta.0: - version "0.8.0-beta.0" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" - integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== - dependencies: - whatwg-url "^7.0.0" - -sourcemap-codec@^1.4.8: - version "1.4.8" - resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" - integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== - -spdx-correct@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" - integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" - integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.11" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz#50c0d8c40a14ec1bf449bae69a0ea4685a9d9f95" - integrity sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g== - -spdy-transport@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" - integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - -split2@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/split2/-/split2-4.1.0.tgz#101907a24370f85bb782f08adaabe4e281ecf809" - integrity sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= - -stable@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -stack-utils@^2.0.3: - version "2.0.5" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" - integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== - dependencies: - escape-string-regexp "^2.0.0" - -stackframe@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.1.tgz#1033a3473ee67f08e2f2fc8eba6aef4f845124e1" - integrity sha512-h88QkzREN/hy8eRdyNhhsO7RSJ5oyTqxxmmn0dzBIMUclZsjpfmrsg81vp8mjjAs2vAZ72nyWxRUwSwmh0e4xg== - -state-local@^1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/state-local/-/state-local-1.0.7.tgz#da50211d07f05748d53009bee46307a37db386d5" - integrity sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w== - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= - -stream-buffers@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/stream-buffers/-/stream-buffers-3.0.2.tgz#5249005a8d5c2d00b3a32e6e0a6ea209dc4f3521" - integrity sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ== - -string-length@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-length@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" - integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== - dependencies: - char-regex "^2.0.0" - strip-ansi "^7.0.1" - -string-natural-compare@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" - integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string.prototype.matchall@^4.0.6: - version "4.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" - integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - get-intrinsic "^1.1.1" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - regexp.prototype.flags "^1.4.1" - side-channel "^1.0.4" - -string.prototype.trimend@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" - integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -string.prototype.trimstart@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" - integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -stringify-object@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" - integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== - dependencies: - get-own-enumerable-property-symbols "^3.0.0" - is-obj "^1.0.1" - is-regexp "^1.0.0" - -strip-ansi@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" - integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== - dependencies: - ansi-regex "^6.0.1" - -strip-bom@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" - integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= - dependencies: - is-utf8 "^0.2.0" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-comments@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" - integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-indent@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" - integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== - dependencies: - min-indent "^1.0.0" - -strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -style-loader@^3.3.1: - version "3.3.1" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" - integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== - -styled-components@^5.3.0: - version "5.3.5" - resolved "https://registry.yarnpkg.com/styled-components/-/styled-components-5.3.5.tgz#a750a398d01f1ca73af16a241dec3da6deae5ec4" - integrity sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/traverse" "^7.4.5" - "@emotion/is-prop-valid" "^1.1.0" - "@emotion/stylis" "^0.8.4" - "@emotion/unitless" "^0.7.4" - babel-plugin-styled-components ">= 1.12.0" - css-to-react-native "^3.0.0" - hoist-non-react-statics "^3.0.0" - shallowequal "^1.1.0" - supports-color "^5.5.0" - -stylehacks@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" - integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== - dependencies: - browserslist "^4.16.6" - postcss-selector-parser "^6.0.4" - -suffix@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/suffix/-/suffix-0.1.1.tgz#cc58231646a0ef1102f79478ef3a9248fd9c842f" - integrity sha1-zFgjFkag7xEC95R47zqSSP2chC8= - -supports-color@8.1.1, supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - -supports-color@^5.3.0, supports-color@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" - integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -svg-parser@^2.0.2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" - integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== - -svgo@^1.2.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" - integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== - dependencies: - chalk "^2.4.1" - coa "^2.0.2" - css-select "^2.0.0" - css-select-base-adapter "^0.1.1" - css-tree "1.0.0-alpha.37" - csso "^4.0.2" - js-yaml "^3.13.1" - mkdirp "~0.5.1" - object.values "^1.1.0" - sax "~1.2.4" - stable "^0.1.8" - unquote "~1.1.1" - util.promisify "~1.0.0" - -svgo@^2.7.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== - dependencies: - "@trysound/sax" "0.2.0" - commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" - picocolors "^1.0.0" - stable "^0.1.8" - -symbol-tree@^3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" - integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== - -tailwindcss@^3.0.2: - version "3.0.24" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.0.24.tgz#22e31e801a44a78a1d9a81ecc52e13b69d85704d" - integrity sha512-H3uMmZNWzG6aqmg9q07ZIRNIawoiEcNFKDfL+YzOPuPsXuDXxJxB9icqzLgdzKNwjG3SAro2h9SYav8ewXNgig== - dependencies: - arg "^5.0.1" - chokidar "^3.5.3" - color-name "^1.1.4" - detective "^5.2.0" - didyoumean "^1.2.2" - dlv "^1.1.3" - fast-glob "^3.2.11" - glob-parent "^6.0.2" - is-glob "^4.0.3" - lilconfig "^2.0.5" - normalize-path "^3.0.0" - object-hash "^3.0.0" - picocolors "^1.0.0" - postcss "^8.4.12" - postcss-js "^4.0.0" - postcss-load-config "^3.1.4" - postcss-nested "5.0.6" - postcss-selector-parser "^6.0.10" - postcss-value-parser "^4.2.0" - quick-lru "^5.1.1" - resolve "^1.22.0" - -tapable@^1.0.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -tar-fs@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-stream@2.2.0, tar-stream@^2.1.4, tar-stream@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -temp-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" - integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== - -tempy@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" - integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== - dependencies: - is-stream "^2.0.0" - temp-dir "^2.0.0" - type-fest "^0.16.0" - unique-string "^2.0.0" - -terminal-link@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: - version "5.3.1" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz#0320dcc270ad5372c1e8993fabbd927929773e54" - integrity sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g== - dependencies: - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - terser "^5.7.2" - -terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: - version "5.13.1" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.13.1.tgz#66332cdc5a01b04a224c9fad449fc1a18eaa1799" - integrity sha512-hn4WKOfwnwbYfe48NgrQjqNOH9jzLqRcIfbYytOXCOv46LBfWr9bDS17MQqOi+BWGD0sJK3Sj5NC/gJjiojaoA== - dependencies: - acorn "^8.5.0" - commander "^2.20.0" - source-map "~0.8.0-beta.0" - source-map-support "~0.5.20" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= - -throat@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" - integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== - -through@^2.3.6, through@^2.3.8: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - -thunky@^1.0.2: - version "1.1.0" - resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" - integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== - -tiny-invariant@^1.0.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" - integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== - -tiny-warning@^1.0.0, tiny-warning@^1.0.2, tiny-warning@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" - integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== - -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== - dependencies: - os-tmpdir "~1.0.2" - -tmpl@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toidentifier@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -toposort@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" - integrity sha1-riF2gXXRVZ1IvvNUILL0li8JwzA= - -tough-cookie@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" - integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== - dependencies: - psl "^1.1.33" - punycode "^2.1.1" - universalify "^0.1.2" - -tr46@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" - integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= - dependencies: - punycode "^2.1.0" - -tr46@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== - dependencies: - punycode "^2.1.1" - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= - -tryer@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" - integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== - -tsconfig-paths@^3.14.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" - integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== - dependencies: - "@types/json5" "^0.0.29" - json5 "^1.0.1" - minimist "^1.2.6" - strip-bom "^3.0.0" - -tslib@^1.10.0, tslib@^1.8.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@^2.0.3: - version "2.3.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" - integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== - -tslib@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tsutils@^3.21.0: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= - dependencies: - prelude-ls "~1.1.2" - -type-detect@4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.16.0: - version "0.16.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" - integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== - -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - -typescript@^4.6.3: - version "4.6.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.4.tgz#caa78bbc3a59e6a5c510d35703f6a09877ce45e9" - integrity sha512-9ia/jWHIEbo49HfjrLGfKbZSuWo9iTMwXO+Ca3pRsSpbsMbc7/IU8NKdCZVRRBafVPGnoJeFL76ZOAA84I9fEg== - -ua-parser-js@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-1.0.2.tgz#e2976c34dbfb30b15d2c300b2a53eac87c57a775" - integrity sha512-00y/AXhx0/SsnI51fTc0rLRmafiGOM4/O+ny10Ps7f+j/b8p/ZY11ytMgznXkOVo4GQ+KwQG5UQLkLGirsACRg== - -unbox-primitive@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" - integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== - dependencies: - function-bind "^1.1.1" - has-bigints "^1.0.1" - has-symbols "^1.0.2" - which-boxed-primitive "^1.0.2" - -unbzip2-stream@1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== - dependencies: - buffer "^5.2.1" - through "^2.3.8" - -unicode-canonical-property-names-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" - integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== - -unicode-match-property-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" - integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== - dependencies: - unicode-canonical-property-names-ecmascript "^2.0.0" - unicode-property-aliases-ecmascript "^2.0.0" - -unicode-match-property-value-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" - integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== - -unicode-property-aliases-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8" - integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== - -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - -universalify@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== - -universalify@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== - -unload@2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/unload/-/unload-2.2.0.tgz#ccc88fdcad345faa06a92039ec0f80b488880ef7" - integrity sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA== - dependencies: - "@babel/runtime" "^7.6.2" - detect-node "^2.0.4" - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= - -unquote@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" - integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= - -upath@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -url-parse@^1.5.1: - version "1.5.10" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" - integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -use-local-storage-state@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/use-local-storage-state/-/use-local-storage-state-10.0.0.tgz#862d142e5d1fff30820102f0cd871e96e5527342" - integrity sha512-NCab0oYOMZA8oT9y4OE7tMT6JS21SiyPsTjZdapnyvHe7bVFlIMSp6LaiuHBdS1OvduuLtG+pX/duFIBkd0PCA== - -use-location-state@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/use-location-state/-/use-location-state-2.5.0.tgz#c71e6b5391898fa23ee1a6242d193924db6767c0" - integrity sha512-Gsn37xXWTVa4gGZA8WobtmC7ixm46TkQUyr9MApLhh9YIDcxOKuLCH/0wuKY7YcrCsb5t/S0b77qP50/mbvibQ== - dependencies: - query-state-core "^2.5.0" - -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -util.promisify@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" - integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.2" - has-symbols "^1.0.1" - object.getownpropertydescriptors "^2.1.0" - -utila@~0.4: - version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= - -uuid@^7.0.0: - version "7.0.3" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-7.0.3.tgz#c5c9f2c8cf25dc0a372c4df1441c41f5bd0c680b" - integrity sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg== - -uuid@^8.0.0, uuid@^8.3.2: - version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== - -v8-to-istanbul@^8.1.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" - integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - -validator@^13.0.0, validator@^13.7.0: - version "13.7.0" - resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" - integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw== - -value-equal@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" - integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== - -vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= - -vis-data@^7.1.0: - version "7.1.4" - resolved "https://registry.yarnpkg.com/vis-data/-/vis-data-7.1.4.tgz#90e5e796a79e1901de14c0808fb32a1a0735c1dc" - integrity sha512-usy+ePX1XnArNvJ5BavQod7YRuGQE1pjFl+pu7IS6rCom2EBoG0o1ZzCqf3l5US6MW51kYkLR+efxRbnjxNl7w== - -vis-timeline@^7.4.2: - version "7.5.1" - resolved "https://registry.yarnpkg.com/vis-timeline/-/vis-timeline-7.5.1.tgz#c15d14ccec40c3fc4cc0936cd363aa228daf6dd7" - integrity sha512-XZMHHbA8xm9/Y/iu3mE9MT7J5tfWgbdsW+PmqrgINU2QRX24AiqifNHZHV4YYzeJstiTSOg9Gs5qRkxQ0BvZJw== - -vis-util@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/vis-util/-/vis-util-4.3.4.tgz#02319fbd909f82782b96a36d1224f1beea67f8b2" - integrity sha512-hJIZNrwf4ML7FYjs+m+zjJfaNvhjk3/1hbMdQZVnwwpOFJS/8dMG8rdbOHXcKoIEM6U5VOh3HNpaDXxGkOZGpw== - -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== - dependencies: - browser-process-hrtime "^1.0.0" - -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== - dependencies: - xml-name-validator "^3.0.0" - -walker@^1.0.7: - version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -watchpack@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.3.1.tgz#4200d9447b401156eeca7767ee610f8809bc9d25" - integrity sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA== - dependencies: - glob-to-regexp "^0.4.1" - graceful-fs "^4.1.2" - -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== - dependencies: - minimalistic-assert "^1.0.0" - -wcwidth@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= - dependencies: - defaults "^1.0.3" - -webdriver@7.19.7, webdriver@^7.19.7: - version "7.19.7" - resolved "https://registry.yarnpkg.com/webdriver/-/webdriver-7.19.7.tgz#be2ba4052d9fad9cfdc88024949a55cb2f28168f" - integrity sha512-3gygDpwaCMZlUhh7Wv1SbjTvfdLGbPqRQ3poZ7lKvsVAAmciLziJDeR8LrNTyS9R418MgNbBdWOQrHGS+gp0ZQ== - dependencies: - "@types/node" "^17.0.4" - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/protocols" "7.19.0" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - got "^11.0.2" - ky "^0.30.0" - lodash.merge "^4.6.1" - -webdriverio@7.19.7, webdriverio@^7.19.7: - version "7.19.7" - resolved "https://registry.yarnpkg.com/webdriverio/-/webdriverio-7.19.7.tgz#781d43cf4db272537cd5422649145f93225b6aa2" - integrity sha512-GaekRmFN3wokW3VN08hFjTJ3GagJxuKR6AV8kVvlxxMye9nfU3TQPzsrqGrue8uWYvyZ3x0SVkUluwtImZNzPA== - dependencies: - "@types/aria-query" "^5.0.0" - "@types/node" "^17.0.4" - "@wdio/config" "7.19.5" - "@wdio/logger" "7.19.0" - "@wdio/protocols" "7.19.0" - "@wdio/repl" "7.19.7" - "@wdio/types" "7.19.5" - "@wdio/utils" "7.19.7" - archiver "^5.0.0" - aria-query "^5.0.0" - css-shorthand-properties "^1.1.1" - css-value "^0.0.1" - devtools "7.19.7" - devtools-protocol "^0.0.998712" - fs-extra "^10.0.0" - grapheme-splitter "^1.0.2" - lodash.clonedeep "^4.5.0" - lodash.isobject "^3.0.2" - lodash.isplainobject "^4.0.6" - lodash.zip "^4.2.0" - minimatch "^5.0.0" - puppeteer-core "^13.1.3" - query-selector-shadow-dom "^1.0.0" - resq "^1.9.1" - rgb2hex "0.2.5" - serialize-error "^8.0.0" - webdriver "7.19.7" - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= - -webidl-conversions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== - -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== - -webpack-dev-middleware@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.1.tgz#aa079a8dedd7e58bfeab358a9af7dab304cee57f" - integrity sha512-81EujCKkyles2wphtdrnPg/QqegC/AtqNH//mQkBYSMqwFVCQrxM6ktB2O/SPlZy7LqeEfTbV3cZARGQz6umhg== - dependencies: - colorette "^2.0.10" - memfs "^3.4.1" - mime-types "^2.1.31" - range-parser "^1.2.1" - schema-utils "^4.0.0" - -webpack-dev-server@^4.6.0: - version "4.9.0" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.9.0.tgz#737dbf44335bb8bde68f8f39127fc401c97a1557" - integrity sha512-+Nlb39iQSOSsFv0lWUuUTim3jDQO8nhK3E68f//J2r5rIcp4lULHXz2oZ0UVdEeWXEh5lSzYUlzarZhDAeAVQw== - dependencies: - "@types/bonjour" "^3.5.9" - "@types/connect-history-api-fallback" "^1.3.5" - "@types/express" "^4.17.13" - "@types/serve-index" "^1.9.1" - "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.1" - ansi-html-community "^0.0.8" - bonjour-service "^1.0.11" - chokidar "^3.5.3" - colorette "^2.0.10" - compression "^1.7.4" - connect-history-api-fallback "^1.6.0" - default-gateway "^6.0.3" - express "^4.17.3" - graceful-fs "^4.2.6" - html-entities "^2.3.2" - http-proxy-middleware "^2.0.3" - ipaddr.js "^2.0.1" - open "^8.0.9" - p-retry "^4.5.0" - rimraf "^3.0.2" - schema-utils "^4.0.0" - selfsigned "^2.0.1" - serve-index "^1.9.1" - sockjs "^0.3.21" - spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.4.2" - -webpack-manifest-plugin@^4.0.2: - version "4.1.1" - resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" - integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== - dependencies: - tapable "^2.0.0" - webpack-sources "^2.2.0" - -webpack-sources@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack-sources@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" - integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" - -webpack-sources@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" - integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== - -webpack@^5.64.4: - version "5.72.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.72.1.tgz#3500fc834b4e9ba573b9f430b2c0a61e1bb57d13" - integrity sha512-dXG5zXCLspQR4krZVR6QgajnZOjW2K/djHvdcRaDQvsjV9z9vaW6+ja5dZOYbqBBjF6kGXka/2ZyxNdc+8Jung== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - acorn "^8.4.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" - chrome-trace-event "^1.0.2" - enhanced-resolve "^5.9.3" - es-module-lexer "^0.9.0" - eslint-scope "5.1.1" - events "^3.2.0" - glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" - json-parse-even-better-errors "^2.3.1" - loader-runner "^4.2.0" - mime-types "^2.1.27" - neo-async "^2.6.2" - schema-utils "^3.1.0" - tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.3.1" - webpack-sources "^3.2.3" - -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: - version "0.7.4" - resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" - integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== - dependencies: - http-parser-js ">=0.5.1" - safe-buffer ">=5.1.0" - websocket-extensions ">=0.1.1" - -websocket-extensions@>=0.1.1: - version "0.1.4" - resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" - integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== - -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - -whatwg-fetch@^3.6.2: - version "3.6.2" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" - integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== - -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -whatwg-url@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" - integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== - dependencies: - lodash.sortby "^4.7.0" - tr46 "^1.0.1" - webidl-conversions "^4.0.2" - -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.7.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" - integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== - dependencies: - lodash "^4.7.0" - tr46 "^2.1.0" - webidl-conversions "^6.1.0" - -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" - -which@^1.2.9, which@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -which@^2.0.1, which@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -word-wrap@^1.2.3, word-wrap@~1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -workbox-background-sync@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.5.3.tgz#7c66c1836aeca6f3762dc48d17a1852a33b3168c" - integrity sha512-0DD/V05FAcek6tWv9XYj2w5T/plxhDSpclIcAGjA/b7t/6PdaRkQ7ZgtAX6Q/L7kV7wZ8uYRJUoH11VjNipMZw== - dependencies: - idb "^6.1.4" - workbox-core "6.5.3" - -workbox-broadcast-update@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.5.3.tgz#fc2ad79cf507e22950cda9baf1e9a0ccc43f31bc" - integrity sha512-4AwCIA5DiDrYhlN+Miv/fp5T3/whNmSL+KqhTwRBTZIL6pvTgE4lVuRzAt1JltmqyMcQ3SEfCdfxczuI4kwFQg== - dependencies: - workbox-core "6.5.3" - -workbox-build@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.5.3.tgz#38e3f286d63d2745bff4d1478bb3a6ab5c8b1170" - integrity sha512-8JNHHS7u13nhwIYCDea9MNXBNPHXCs5KDZPKI/ZNTr3f4sMGoD7hgFGecbyjX1gw4z6e9bMpMsOEJNyH5htA/w== - dependencies: - "@apideck/better-ajv-errors" "^0.3.1" - "@babel/core" "^7.11.1" - "@babel/preset-env" "^7.11.0" - "@babel/runtime" "^7.11.2" - "@rollup/plugin-babel" "^5.2.0" - "@rollup/plugin-node-resolve" "^11.2.1" - "@rollup/plugin-replace" "^2.4.1" - "@surma/rollup-plugin-off-main-thread" "^2.2.3" - ajv "^8.6.0" - common-tags "^1.8.0" - fast-json-stable-stringify "^2.1.0" - fs-extra "^9.0.1" - glob "^7.1.6" - lodash "^4.17.20" - pretty-bytes "^5.3.0" - rollup "^2.43.1" - rollup-plugin-terser "^7.0.0" - source-map "^0.8.0-beta.0" - stringify-object "^3.3.0" - strip-comments "^2.0.1" - tempy "^0.6.0" - upath "^1.2.0" - workbox-background-sync "6.5.3" - workbox-broadcast-update "6.5.3" - workbox-cacheable-response "6.5.3" - workbox-core "6.5.3" - workbox-expiration "6.5.3" - workbox-google-analytics "6.5.3" - workbox-navigation-preload "6.5.3" - workbox-precaching "6.5.3" - workbox-range-requests "6.5.3" - workbox-recipes "6.5.3" - workbox-routing "6.5.3" - workbox-strategies "6.5.3" - workbox-streams "6.5.3" - workbox-sw "6.5.3" - workbox-window "6.5.3" - -workbox-cacheable-response@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.5.3.tgz#b1f8c2bc599a7be8f7e3c262535629c558738e47" - integrity sha512-6JE/Zm05hNasHzzAGKDkqqgYtZZL2H06ic2GxuRLStA4S/rHUfm2mnLFFXuHAaGR1XuuYyVCEey1M6H3PdZ7SQ== - dependencies: - workbox-core "6.5.3" - -workbox-core@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.5.3.tgz#bca038a9ef0d7a634a6db2a60f45313ed22ac249" - integrity sha512-Bb9ey5n/M9x+l3fBTlLpHt9ASTzgSGj6vxni7pY72ilB/Pb3XtN+cZ9yueboVhD5+9cNQrC9n/E1fSrqWsUz7Q== - -workbox-expiration@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.5.3.tgz#efc0811f371a2ede1052b9de1c4f072b71d50503" - integrity sha512-jzYopYR1zD04ZMdlbn/R2Ik6ixiXbi15c9iX5H8CTi6RPDz7uhvMLZPKEndZTpfgmUk8mdmT9Vx/AhbuCl5Sqw== - dependencies: - idb "^6.1.4" - workbox-core "6.5.3" - -workbox-google-analytics@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.5.3.tgz#cc8c3a61f449131660a4ed2f5362d9a3599b18fe" - integrity sha512-3GLCHotz5umoRSb4aNQeTbILETcrTVEozSfLhHSBaegHs1PnqCmN0zbIy2TjTpph2AGXiNwDrWGF0AN+UgDNTw== - dependencies: - workbox-background-sync "6.5.3" - workbox-core "6.5.3" - workbox-routing "6.5.3" - workbox-strategies "6.5.3" - -workbox-navigation-preload@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.5.3.tgz#81b74f598b11aa07e2cf1c21af7a826a4f0f70b3" - integrity sha512-bK1gDFTc5iu6lH3UQ07QVo+0ovErhRNGvJJO/1ngknT0UQ702nmOUhoN9qE5mhuQSrnK+cqu7O7xeaJ+Rd9Tmg== - dependencies: - workbox-core "6.5.3" - -workbox-precaching@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.5.3.tgz#c870312b2ef901d790ab9e48da084e776c62af47" - integrity sha512-sjNfgNLSsRX5zcc63H/ar/hCf+T19fRtTqvWh795gdpghWb5xsfEkecXEvZ8biEi1QD7X/ljtHphdaPvXDygMQ== - dependencies: - workbox-core "6.5.3" - workbox-routing "6.5.3" - workbox-strategies "6.5.3" - -workbox-range-requests@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.5.3.tgz#e624ac82ff266a5e4f236d055797def07949d941" - integrity sha512-pGCP80Bpn/0Q0MQsfETSfmtXsQcu3M2QCJwSFuJ6cDp8s2XmbUXkzbuQhCUzKR86ZH2Vex/VUjb2UaZBGamijA== - dependencies: - workbox-core "6.5.3" - -workbox-recipes@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.5.3.tgz#15beac9d8ae7a3a1c100218094a824b4dd3fd59a" - integrity sha512-IcgiKYmbGiDvvf3PMSEtmwqxwfQ5zwI7OZPio3GWu4PfehA8jI8JHI3KZj+PCfRiUPZhjQHJ3v1HbNs+SiSkig== - dependencies: - workbox-cacheable-response "6.5.3" - workbox-core "6.5.3" - workbox-expiration "6.5.3" - workbox-precaching "6.5.3" - workbox-routing "6.5.3" - workbox-strategies "6.5.3" - -workbox-routing@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.5.3.tgz#a0a699d8cc90b5692bd3df24679acbbda3913777" - integrity sha512-DFjxcuRAJjjt4T34RbMm3MCn+xnd36UT/2RfPRfa8VWJGItGJIn7tG+GwVTdHmvE54i/QmVTJepyAGWtoLPTmg== - dependencies: - workbox-core "6.5.3" - -workbox-strategies@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.5.3.tgz#4bea9a48fee16cf43766e0d8138296773c8a9783" - integrity sha512-MgmGRrDVXs7rtSCcetZgkSZyMpRGw8HqL2aguszOc3nUmzGZsT238z/NN9ZouCxSzDu3PQ3ZSKmovAacaIhu1w== - dependencies: - workbox-core "6.5.3" - -workbox-streams@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.5.3.tgz#b6860290031caa7d0e46ad7142315c94359c780b" - integrity sha512-vN4Qi8o+b7zj1FDVNZ+PlmAcy1sBoV7SC956uhqYvZ9Sg1fViSbOpydULOssVJ4tOyKRifH/eoi6h99d+sJ33w== - dependencies: - workbox-core "6.5.3" - workbox-routing "6.5.3" - -workbox-sw@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.5.3.tgz#cd2f0c086f4496acd25774ed02c48504189bebdd" - integrity sha512-BQBzm092w+NqdIEF2yhl32dERt9j9MDGUTa2Eaa+o3YKL4Qqw55W9yQC6f44FdAHdAJrJvp0t+HVrfh8AiGj8A== - -workbox-webpack-plugin@^6.4.1: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.3.tgz#c37bb323be4952311565c07db51054fe59c87d73" - integrity sha512-Es8Xr02Gi6Kc3zaUwR691ZLy61hz3vhhs5GztcklQ7kl5k2qAusPh0s6LF3wEtlpfs9ZDErnmy5SErwoll7jBA== - dependencies: - fast-json-stable-stringify "^2.1.0" - pretty-bytes "^5.4.1" - upath "^1.2.0" - webpack-sources "^1.4.3" - workbox-build "6.5.3" - -workbox-window@6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.5.3.tgz#4ade70056cb73477ef1cd8fea7cfd0ecbd825c7f" - integrity sha512-GnJbx1kcKXDtoJBVZs/P7ddP0Yt52NNy4nocjBpYPiRhMqTpJCNrSL+fGHZ/i/oP6p/vhE8II0sA6AZGKGnssw== - dependencies: - "@types/trusted-types" "^2.0.2" - workbox-core "6.5.3" - -workerpool@6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" - integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - -ws@8.5.0: - version "8.5.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" - integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== - -ws@^7.4.6: - version "7.5.7" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.7.tgz#9e0ac77ee50af70d58326ecff7e85eb3fa375e67" - integrity sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A== - -ws@^8.4.2: - version "8.6.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.6.0.tgz#e5e9f1d9e7ff88083d0c0dd8281ea662a42c9c23" - integrity sha512-AzmM3aH3gk0aX7/rZLYvjdvZooofDu3fFOzGqcSnQ1tOcTWwhM/o+q++E8mAyVVIyUdajrkzWUGftaVSDLn1bw== - -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== - -xmlbuilder@^15.1.1: - version "15.1.1" - resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-15.1.1.tgz#9dcdce49eea66d8d10b42cae94a79c3c8d0c2ec5" - integrity sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg== - -xmlchars@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" - integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== - -xss@^1.0.8: - version "1.0.11" - resolved "https://registry.yarnpkg.com/xss/-/xss-1.0.11.tgz#211cb82e95b5071d4c75d597283c021157ebe46a" - integrity sha512-EimjrjThZeK2MO7WKR9mN5ZC1CSqivSl55wvUK5EtU6acf0rzEE1pN+9ZDrFXJ82BRp3JL38pPE6S4o/rpp1zQ== - dependencies: - commander "^2.20.3" - cssfilter "0.0.10" - -xtend@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: - version "1.10.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" - integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== - -yargs-parser@20.2.4: - version "20.2.4" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" - integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs-parser@^21.0.0: - version "21.0.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" - integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== - -yargs-unparser@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-2.0.0.tgz#f131f9226911ae5d9ad38c432fe809366c2325eb" - integrity sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA== - dependencies: - camelcase "^6.0.0" - decamelize "^4.0.0" - flat "^5.0.2" - is-plain-obj "^2.1.0" - -yargs@16.2.0, yargs@^16.2.0: - version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - -yargs@^17.0.0: - version "17.5.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.0.tgz#2706c5431f8c119002a2b106fc9f58b9bb9097a3" - integrity sha512-3sLxVhbAB5OC8qvVRebCLWuouhwh/rswsiDYx3WGxajUk/l4G20SKfrKKFeNIHboUFt2JFgv2yfn+5cgOr/t5A== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.0.0" - -yarn-install@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/yarn-install/-/yarn-install-1.0.0.tgz#57f45050b82efd57182b3973c54aa05cb5d25230" - integrity sha1-V/RQULgu/VcYKzlzxUqgXLXSUjA= - dependencies: - cac "^3.0.3" - chalk "^1.1.3" - cross-spawn "^4.0.2" - -yauzl@^2.10.0: - version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" - integrity sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk= - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== - -yup@^0.32.11: - version "0.32.11" - resolved "https://registry.yarnpkg.com/yup/-/yup-0.32.11.tgz#d67fb83eefa4698607982e63f7ca4c5ed3cf18c5" - integrity sha512-Z2Fe1bn+eLstG8DRR6FTavGD+MeAwyfmouhHsIUgaADz8jvFKbO/fXc2trJKZg+5EBjh4gGm3iU/t3onKlXHIg== - dependencies: - "@babel/runtime" "^7.15.4" - "@types/lodash" "^4.14.175" - lodash "^4.17.21" - lodash-es "^4.17.21" - nanoclone "^0.2.1" - property-expr "^2.0.4" - toposort "^2.0.2" - -zip-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.0.tgz#51dd326571544e36aa3f756430b313576dc8fc79" - integrity sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A== - dependencies: - archiver-utils "^2.1.0" - compress-commons "^4.1.0" - readable-stream "^3.6.0" diff --git a/unversioned-apis.html b/unversioned-apis.html new file mode 100644 index 000000000..d070f381e --- /dev/null +++ b/unversioned-apis.html @@ -0,0 +1,343 @@ + + + + + + + + + + + + + + + + + + + + + MSANose Report + + + + + + +
    +
    +
    + View More on GitHub +

    Unversioned APIs

    +
    +

    All Application Programming Interfaces (API) should be versioned to keep track of changes properly

    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #Endpoint
    1/{workflowId}/resume
    2/{workflowId}/restart
    3EVENT
    4/resume
    5/{workflowId}/retry
    6/taskdefs/{tasktype}
    7WORKFLOW_BULK/restart
    8/workflow/{name}
    9/{workflowId}/skiptask/{taskReferenceName}
    10WORKFLOW/{name}/correlated/{correlationId}
    11/queue/sizes
    12/poll/{tasktype}
    13/queue/polldata/all
    14ADMIN/queues
    15/task/{tasktype}
    16/pause
    17WORKFLOW_BULK/pause
    18WORKFLOW/running/{name}
    19/health
    20/{event}
    21/poll/batch/{tasktype}
    22WORKFLOW/{workflowId}/resetcallbacks
    23/config
    24/queue/all/verbose
    25ADMIN/config
    26EVENT/{event}
    27WORKFLOW/{workflowId}/resume
    28WORKFLOW_BULK/terminate
    29/restart
    30/workflow
    31/{name}
    32/terminate
    33/queues
    34/update/{workflowId}/task/{taskId}/{status}
    35/queue/size
    36EVENT/{name}
    37/workflow/{name}/{version}
    38/externalstoragelocation
    39WORKFLOW/{workflowId}/retry
    40/{workflowId}
    41WORKFLOW_BULK/retry
    42WORKFLOW/decide/{workflowId}
    43/{workflowId}/pause
    44/{name}/correlated/{correlationId}
    45/{workflowId}/remove
    46/taskdefs
    47/queue/all
    48WORKFLOW/{workflowId}/skiptask/{taskReferenceName}
    49WORKFLOW/{workflowId}/restart
    50WORKFLOW/{workflowId}
    51ADMIN/task/{tasktype}
    52WORKFLOW/{workflowId}/remove
    53/queue/polldata
    54/decide/{workflowId}
    55/retry
    56/{taskId}
    57WORKFLOW/externalstoragelocation
    58WORKFLOW_BULK/resume
    59/{taskId}/log
    60/running/{name}
    61WORKFLOW/{workflowId}/pause
    62/{workflowId}/resetcallbacks
    63QUEUE/update/{workflowId}/task/{taskId}/{status}
    + +
    +
    +
    + +
    +
    𝜇Sensor · © 2022-2024
    + + + + + + + + +