diff --git a/.apigentools-info b/.apigentools-info index f63f96ac8..d4d723485 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-02-12 17:13:42.478907", - "spec_repo_commit": "154100ad" + "regenerated": "2025-02-12 18:33:49.810961", + "spec_repo_commit": "6a4cfb82" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-02-12 17:13:42.494251", - "spec_repo_commit": "154100ad" + "regenerated": "2025-02-12 18:33:49.826925", + "spec_repo_commit": "6a4cfb82" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index b0adf622a..5e1dce10c 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -1048,6 +1048,22 @@ components: type: string x-enum-varnames: - API_KEYS + APITrigger: + description: Trigger a workflow VIA an API. The workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + APITriggerWrapper: + description: Schema for an API-based trigger. + properties: + apiTrigger: + $ref: '#/components/schemas/APITrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - apiTrigger + type: object AWSAccountConfigID: description: 'Unique Datadog ID of the AWS Account Integration Config. @@ -1822,6 +1838,56 @@ components: - id - base_severity type: object + Annotation: + description: A list of annotations used in the workflow. These are like sticky + notes for your workflow! + properties: + display: + $ref: '#/components/schemas/AnnotationDisplay' + id: + description: The `Annotation` `id`. + example: '' + type: string + markdownTextAnnotation: + $ref: '#/components/schemas/AnnotationMarkdownTextAnnotation' + required: + - id + - display + - markdownTextAnnotation + type: object + AnnotationDisplay: + description: The definition of `AnnotationDisplay` object. + properties: + bounds: + $ref: '#/components/schemas/AnnotationDisplayBounds' + type: object + AnnotationDisplayBounds: + description: The definition of `AnnotationDisplayBounds` object. + properties: + height: + description: The `bounds` `height`. + format: double + type: number + width: + description: The `bounds` `width`. + format: double + type: number + x: + description: The `bounds` `x`. + format: double + type: number + y: + description: The `bounds` `y`. + format: double + type: number + type: object + AnnotationMarkdownTextAnnotation: + description: The definition of `AnnotationMarkdownTextAnnotation` object. + properties: + text: + description: The `markdownTextAnnotation` `text`. + type: string + type: object ApiID: description: API identifier. example: 90646597-5fdb-4a17-a240-647003f8c028 @@ -1965,6 +2031,17 @@ components: deployment: $ref: '#/components/schemas/DeploymentRelationship' type: object + AppTriggerWrapper: + description: Schema for an App-based trigger. + properties: + appTrigger: + description: Trigger a workflow VIA an App. + type: object + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - appTrigger + type: object ApplicationKeyCreateAttributes: description: Attributes used to create an application Key. properties: @@ -5204,6 +5281,23 @@ components: - OPEN - IN_PROGRESS - CLOSED + CaseTrigger: + description: Trigger a workflow VIA a Case. For automatic triggering a handle + must be configured and the workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + CaseTriggerWrapper: + description: Schema for a Case-based trigger. + properties: + caseTrigger: + $ref: '#/components/schemas/CaseTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - caseTrigger + type: object CaseType: description: Case type enum: @@ -5421,6 +5515,17 @@ components: type: string x-enum-varnames: - SERVICE + ChangeEventTriggerWrapper: + description: Schema for a Change Event-based trigger. + properties: + changeEventTrigger: + description: Trigger a workflow VIA a Change Event. + type: object + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - changeEventTrigger + type: object ChargebackBreakdown: description: Charges breakdown. properties: @@ -6117,6 +6222,60 @@ components: required: - location type: object + CompletionCondition: + description: The definition of `CompletionCondition` object. + properties: + operand1: + description: The `CompletionCondition` `operand1`. + operand2: + description: The `CompletionCondition` `operand2`. + operator: + $ref: '#/components/schemas/CompletionConditionOperator' + required: + - operand1 + - operator + type: object + CompletionConditionOperator: + description: The definition of `CompletionConditionOperator` object. + enum: + - OPERATOR_EQUAL + - OPERATOR_NOT_EQUAL + - OPERATOR_GREATER_THAN + - OPERATOR_LESS_THAN + - OPERATOR_GREATER_THAN_OR_EQUAL_TO + - OPERATOR_LESS_THAN_OR_EQUAL_TO + - OPERATOR_CONTAINS + - OPERATOR_DOES_NOT_CONTAIN + - OPERATOR_IS_NULL + - OPERATOR_IS_NOT_NULL + - OPERATOR_IS_EMPTY + - OPERATOR_IS_NOT_EMPTY + example: OPERATOR_EQUAL + type: string + x-enum-varnames: + - OPERATOR_EQUAL + - OPERATOR_NOT_EQUAL + - OPERATOR_GREATER_THAN + - OPERATOR_LESS_THAN + - OPERATOR_GREATER_THAN_OR_EQUAL_TO + - OPERATOR_LESS_THAN_OR_EQUAL_TO + - OPERATOR_CONTAINS + - OPERATOR_DOES_NOT_CONTAIN + - OPERATOR_IS_NULL + - OPERATOR_IS_NOT_NULL + - OPERATOR_IS_EMPTY + - OPERATOR_IS_NOT_EMPTY + CompletionGate: + description: Used to create conditions before running subsequent actions. + properties: + completionCondition: + $ref: '#/components/schemas/CompletionCondition' + retryStrategy: + $ref: '#/components/schemas/RetryStrategy' + required: + - completionCondition + - retryStrategy + type: object Component: description: '[Definition of a UI component in the app](https://docs.datadoghq.com/service_management/app_builder/components/)' properties: @@ -6578,6 +6737,70 @@ components: $ref: '#/components/schemas/ConfluentResourceResponseData' type: array type: object + Connection: + description: The definition of `Connection` object. + properties: + connectionId: + description: The `Connection` `connectionId`. + example: '' + type: string + label: + description: The `Connection` `label`. + example: '' + type: string + required: + - connectionId + - label + type: object + ConnectionEnv: + description: A list of connections or connection groups used in the workflow. + properties: + connectionGroups: + description: The `ConnectionEnv` `connectionGroups`. + items: + $ref: '#/components/schemas/ConnectionGroup' + type: array + connections: + description: The `ConnectionEnv` `connections`. + items: + $ref: '#/components/schemas/Connection' + type: array + env: + $ref: '#/components/schemas/ConnectionEnvEnv' + required: + - env + type: object + ConnectionEnvEnv: + description: The definition of `ConnectionEnvEnv` object. + enum: + - default + example: default + type: string + x-enum-varnames: + - DEFAULT + ConnectionGroup: + description: The definition of `ConnectionGroup` object. + properties: + connectionGroupId: + description: The `ConnectionGroup` `connectionGroupId`. + example: '' + type: string + label: + description: The `ConnectionGroup` `label`. + example: '' + type: string + tags: + description: The `ConnectionGroup` `tags`. + example: + - '' + items: + type: string + type: array + required: + - connectionGroupId + - label + - tags + type: object Container: description: Container object. properties: @@ -7557,6 +7780,82 @@ components: type: $ref: '#/components/schemas/RuleType' type: object + CreateWorkflowRequest: + description: A request object for creating a new workflow. + example: + data: + attributes: + description: A sample workflow. + name: Example Workflow + published: true + spec: + annotations: + - display: + bounds: + height: 150 + width: 300 + x: -375 + y: -0.5 + id: 99999999-9999-9999-9999-999999999999 + markdownTextAnnotation: + text: Example annotation. + connectionEnvs: + - connections: + - connectionId: 11111111-1111-1111-1111-111111111111 + label: INTEGRATION_DATADOG + env: default + handle: my-handle + inputSchema: + parameters: + - defaultValue: default + name: input + type: STRING + outputSchema: + parameters: + - name: output + type: ARRAY_OBJECT + value: '{{ Steps.Step1 }}' + steps: + - actionId: com.datadoghq.dd.monitor.listMonitors + connectionLabel: INTEGRATION_DATADOG + name: Step1 + outboundEdges: + - branchName: main + nextStepName: Step2 + parameters: + - name: tags + value: service:monitoring + - actionId: com.datadoghq.core.noop + name: Step2 + triggers: + - monitorTrigger: + rateLimit: + count: 1 + interval: 3600s + startStepNames: + - Step1 + - githubWebhookTrigger: {} + startStepNames: + - Step1 + tags: + - team:infra + - service:monitoring + - foo:bar + type: workflows + properties: + data: + $ref: '#/components/schemas/WorkflowData' + required: + - data + type: object + CreateWorkflowResponse: + description: The response object after creating a new workflow. + properties: + data: + $ref: '#/components/schemas/WorkflowData' + required: + - data + type: object Creator: description: Creator of the object. properties: @@ -9043,6 +9342,17 @@ components: $ref: '#/components/schemas/DashboardListItemResponse' type: array type: object + DashboardTriggerWrapper: + description: Schema for a Dashboard-based trigger. + properties: + dashboardTrigger: + description: Trigger a workflow VIA a Dashboard. + type: object + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - dashboardTrigger + type: object DashboardType: description: The type of the dashboard. enum: @@ -11142,6 +11452,19 @@ components: minLength: 1 type: string type: object + ErrorHandler: + description: Used to handle errors in an action. + properties: + fallbackStepName: + description: The `ErrorHandler` `fallbackStepName`. + example: '' + type: string + retryStrategy: + $ref: '#/components/schemas/RetryStrategy' + required: + - retryStrategy + - fallbackStepName + type: object Event: description: The metadata associated with a request. properties: @@ -12712,6 +13035,12 @@ components: - _HANDLE - EMAIL - _EMAIL + GetWorkflowResponse: + description: The response object after getting a workflow. + properties: + data: + $ref: '#/components/schemas/WorkflowData' + type: object GitCommitSHA: description: Git Commit SHA. example: 66adc9350f2cc9b250b69abddab733dd55e1a588 @@ -12721,6 +13050,26 @@ components: description: Git Repository URL example: https://github.com/organization/example-repository type: string + GithubWebhookTrigger: + description: Trigger a workflow VIA GitHub webhook. To trigger a workflow from + GitHub, you must set a `webhookSecret`. In your GitHub Webhook Settings, set + the Payload URL to "base_url"/api/v2/workflows/"workflow_id"/webhook?orgId="org_id", + select application/json for the content type, and be highly recommend enabling + SSL verification for security. The workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + GithubWebhookTriggerWrapper: + description: Schema for a GitHub webhook-based trigger. + properties: + githubWebhookTrigger: + $ref: '#/components/schemas/GithubWebhookTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - githubWebhookTrigger + type: object GroupScalarColumn: description: A column containing the tag keys and values in a group. properties: @@ -15012,6 +15361,23 @@ components: type: string x-enum-varnames: - INCIDENT_TODOS + IncidentTrigger: + description: Trigger a workflow VIA an Incident. For automatic triggering a + handle must be configured and the workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + IncidentTriggerWrapper: + description: Schema for an Incident-based trigger. + properties: + incidentTrigger: + $ref: '#/components/schemas/IncidentTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - incidentTrigger + type: object IncidentType: default: incidents description: Incident resource type. @@ -15456,6 +15822,59 @@ components: - ONCALL - INCIDENT - RELATION + InputSchema: + description: A list of input parameters for the workflow. These can be used + as dynamic runtime values in your workflow. + properties: + parameters: + description: The `InputSchema` `parameters`. + items: + $ref: '#/components/schemas/InputSchemaParameters' + type: array + type: object + InputSchemaParameters: + description: The definition of `InputSchemaParameters` object. + properties: + defaultValue: + description: The `InputSchemaParameters` `defaultValue`. + description: + description: The `InputSchemaParameters` `description`. + type: string + label: + description: The `InputSchemaParameters` `label`. + type: string + name: + description: The `InputSchemaParameters` `name`. + example: '' + type: string + type: + $ref: '#/components/schemas/InputSchemaParametersType' + required: + - name + - type + type: object + InputSchemaParametersType: + description: The definition of `InputSchemaParametersType` object. + enum: + - STRING + - NUMBER + - BOOLEAN + - OBJECT + - ARRAY_STRING + - ARRAY_NUMBER + - ARRAY_BOOLEAN + - ARRAY_OBJECT + example: STRING + type: string + x-enum-varnames: + - STRING + - NUMBER + - BOOLEAN + - OBJECT + - ARRAY_STRING + - ARRAY_NUMBER + - ARRAY_BOOLEAN + - ARRAY_OBJECT IntakePayloadAccepted: description: The payload accepted for intake. properties: @@ -19373,6 +19792,23 @@ components: type: $ref: '#/components/schemas/MonitorDowntimeMatchResourceType' type: object + MonitorTrigger: + description: Trigger a workflow VIA a Monitor. For automatic triggering a handle + must be configured and the workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + MonitorTriggerWrapper: + description: Schema for a Monitor-based trigger. + properties: + monitorTrigger: + $ref: '#/components/schemas/MonitorTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - monitorTrigger + type: object MonitorType: description: Attributes from the monitor that triggered the event. nullable: true @@ -20136,6 +20572,21 @@ components: type: string x-enum-varnames: - ORGS + OutboundEdge: + description: The definition of `OutboundEdge` object. + properties: + branchName: + description: The `OutboundEdge` `branchName`. + example: '' + type: string + nextStepName: + description: The `OutboundEdge` `nextStepName`. + example: '' + type: string + required: + - nextStepName + - branchName + type: object OutcomeType: default: outcome description: The JSON:API type for an outcome. @@ -20315,6 +20766,60 @@ components: example: /api/v2/scorecard/outcomes?include=rule&page%5Blimit%5D=100&page%5Boffset%5D=100 type: string type: object + OutputSchema: + description: A list of output parameters for the workflow. + properties: + parameters: + description: The `OutputSchema` `parameters`. + items: + $ref: '#/components/schemas/OutputSchemaParameters' + type: array + type: object + OutputSchemaParameters: + description: The definition of `OutputSchemaParameters` object. + properties: + defaultValue: + description: The `OutputSchemaParameters` `defaultValue`. + description: + description: The `OutputSchemaParameters` `description`. + type: string + label: + description: The `OutputSchemaParameters` `label`. + type: string + name: + description: The `OutputSchemaParameters` `name`. + example: '' + type: string + type: + $ref: '#/components/schemas/OutputSchemaParametersType' + value: + description: The `OutputSchemaParameters` `value`. + required: + - name + - type + type: object + OutputSchemaParametersType: + description: The definition of `OutputSchemaParametersType` object. + enum: + - STRING + - NUMBER + - BOOLEAN + - OBJECT + - ARRAY_STRING + - ARRAY_NUMBER + - ARRAY_BOOLEAN + - ARRAY_OBJECT + example: STRING + type: string + x-enum-varnames: + - STRING + - NUMBER + - BOOLEAN + - OBJECT + - ARRAY_STRING + - ARRAY_NUMBER + - ARRAY_BOOLEAN + - ARRAY_OBJECT Pagination: description: Pagination object. properties: @@ -20327,6 +20832,19 @@ components: format: int64 type: integer type: object + Parameter: + description: The definition of `Parameter` object. + properties: + name: + description: The `Parameter` `name`. + example: '' + type: string + value: + description: The `Parameter` `value`. + required: + - name + - value + type: object PartialAPIKey: description: Partial Datadog API key. properties: @@ -21939,6 +22457,24 @@ components: from the other indexes type: string type: object + ReadinessGate: + description: Used to merge multiple branches into a single branch. + properties: + thresholdType: + $ref: '#/components/schemas/ReadinessGateThresholdType' + required: + - thresholdType + type: object + ReadinessGateThresholdType: + description: The definition of `ReadinessGateThresholdType` object. + enum: + - ANY + - ALL + example: ANY + type: string + x-enum-varnames: + - ANY + - ALL RelationType: description: Supported relation types. enum: @@ -22860,6 +23396,41 @@ components: required: - data type: object + RetryStrategy: + description: The definition of `RetryStrategy` object. + properties: + kind: + $ref: '#/components/schemas/RetryStrategyKind' + linear: + $ref: '#/components/schemas/RetryStrategyLinear' + required: + - kind + type: object + RetryStrategyKind: + description: The definition of `RetryStrategyKind` object. + enum: + - RETRY_STRATEGY_LINEAR + example: RETRY_STRATEGY_LINEAR + type: string + x-enum-varnames: + - RETRY_STRATEGY_LINEAR + RetryStrategyLinear: + description: The definition of `RetryStrategyLinear` object. + properties: + interval: + description: The `RetryStrategyLinear` `interval`. The expected format is + the number of seconds ending with an s. For example, 1 day is 86400s + example: '' + type: string + maxRetries: + description: The `RetryStrategyLinear` `maxRetries`. + example: 0.0 + format: double + type: number + required: + - interval + - maxRetries + type: object Role: description: Role object returned by the API. properties: @@ -23966,6 +24537,26 @@ components: type: $ref: '#/components/schemas/ScalarFormulaResponseType' type: object + ScheduleTrigger: + description: Trigger a workflow VIA a Schedule. The workflow must be published. + properties: + rruleExpression: + description: Recurrence rule expression for scheduling. + example: '' + type: string + required: + - rruleExpression + type: object + ScheduleTriggerWrapper: + description: Schema for a Schedule-based trigger. + properties: + scheduleTrigger: + $ref: '#/components/schemas/ScheduleTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - scheduleTrigger + type: object ScorecardType: default: scorecard description: The JSON:API type for scorecard. @@ -26216,6 +26807,23 @@ components: nullable: true type: string type: object + SecurityTrigger: + description: Trigger a workflow VIA a Security Signal or Finding. For automatic + triggering a handle must be configured and the workflow must be published. + properties: + rateLimit: + $ref: '#/components/schemas/TriggerRateLimit' + type: object + SecurityTriggerWrapper: + description: Schema for a Security-based trigger. + properties: + securityTrigger: + $ref: '#/components/schemas/SecurityTrigger' + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - securityTrigger + type: object Selectors: description: 'Selectors are used to filter security issues for which notifications should be generated. @@ -27971,6 +28579,17 @@ components: - channel_name - redirect_url type: object + SlackTriggerWrapper: + description: Schema for a Slack-based trigger. + properties: + slackTrigger: + description: Trigger a workflow VIA Slack. The workflow must be published. + type: object + startStepNames: + $ref: '#/components/schemas/StartStepNames' + required: + - slackTrigger + type: object SloReportCreateRequest: description: The SLO report request body. properties: @@ -28893,6 +29512,41 @@ components: from the other indexes type: string type: object + Spec: + description: The spec defines what the workflow does. + properties: + annotations: + description: A list of annotations used in the workflow. These are like + sticky notes for your workflow! + items: + $ref: '#/components/schemas/Annotation' + type: array + connectionEnvs: + description: A list of connections or connection groups used in the workflow. + items: + $ref: '#/components/schemas/ConnectionEnv' + type: array + handle: + description: Unique identifier used to trigger workflows automatically in + Datadog. + type: string + inputSchema: + $ref: '#/components/schemas/InputSchema' + outputSchema: + $ref: '#/components/schemas/OutputSchema' + steps: + description: A `Step` is a sub-component of a workflow. Each `Step` performs + an action. + items: + $ref: '#/components/schemas/Step' + type: array + triggers: + description: The list of triggers that activate this workflow. At least + one trigger is required, and each trigger type may appear at most once. + items: + $ref: '#/components/schemas/Trigger' + type: array + type: object SpecVersion: description: The version of the CycloneDX specification a BOM conforms to. enum: @@ -28911,6 +29565,14 @@ components: - ONE_THREE - ONE_FOUR - ONE_FIVE + StartStepNames: + description: A list of steps that run first after a trigger fires. + example: + - '' + items: + description: The `StartStepNames` `items`. + type: string + type: array State: description: The state of the rule evaluation. enum: @@ -28923,6 +29585,64 @@ components: - PASS - FAIL - SKIP + Step: + description: A Step is a sub-component of a workflow. Each Step performs an + action. + properties: + actionId: + description: The unique identifier of an action. + example: '' + type: string + completionGate: + $ref: '#/components/schemas/CompletionGate' + connectionLabel: + description: The unique identifier of a connection defined in the spec. + type: string + display: + $ref: '#/components/schemas/StepDisplay' + errorHandlers: + description: The `Step` `errorHandlers`. + items: + $ref: '#/components/schemas/ErrorHandler' + type: array + name: + description: Name of the step. + example: '' + type: string + outboundEdges: + description: A list of subsequent actions to run. + items: + $ref: '#/components/schemas/OutboundEdge' + type: array + parameters: + description: A list of inputs for an action. + items: + $ref: '#/components/schemas/Parameter' + type: array + readinessGate: + $ref: '#/components/schemas/ReadinessGate' + required: + - name + - actionId + type: object + StepDisplay: + description: The definition of `StepDisplay` object. + properties: + bounds: + $ref: '#/components/schemas/StepDisplayBounds' + type: object + StepDisplayBounds: + description: The definition of `StepDisplayBounds` object. + properties: + x: + description: The `bounds` `x`. + format: double + type: number + y: + description: The `bounds` `y`. + format: double + type: number + type: object TagsEventAttribute: description: Array of tags associated with your event. example: @@ -29729,6 +30449,33 @@ components: type: string x-enum-varnames: - SECRET + Trigger: + description: One of the triggers that can start the execution of a workflow. + oneOf: + - $ref: '#/components/schemas/APITriggerWrapper' + - $ref: '#/components/schemas/AppTriggerWrapper' + - $ref: '#/components/schemas/CaseTriggerWrapper' + - $ref: '#/components/schemas/ChangeEventTriggerWrapper' + - $ref: '#/components/schemas/DashboardTriggerWrapper' + - $ref: '#/components/schemas/GithubWebhookTriggerWrapper' + - $ref: '#/components/schemas/IncidentTriggerWrapper' + - $ref: '#/components/schemas/MonitorTriggerWrapper' + - $ref: '#/components/schemas/ScheduleTriggerWrapper' + - $ref: '#/components/schemas/SecurityTriggerWrapper' + - $ref: '#/components/schemas/SlackTriggerWrapper' + - $ref: '#/components/schemas/WorkflowTriggerWrapper' + TriggerRateLimit: + description: Defines a rate limit for a trigger. + properties: + count: + description: The `TriggerRateLimit` `count`. + format: int64 + type: integer + interval: + description: The `TriggerRateLimit` `interval`. The expected format is the + number of seconds ending with an s. For example, 1 day is 86400s + type: string + type: object TriggerSource: description: 'The type of security issues on which the rule applies. Notification rules based on security signals need to use the trigger source "security_signals", @@ -30018,6 +30765,81 @@ components: type: $ref: '#/components/schemas/RuleType' type: object + UpdateWorkflowRequest: + description: A request object for updating an existing workflow. + example: + data: + attributes: + description: A sample workflow. + name: Example Workflow + published: true + spec: + annotations: + - display: + bounds: + height: 150 + width: 300 + x: -375 + y: -0.5 + id: 99999999-9999-9999-9999-999999999999 + markdownTextAnnotation: + text: Example annotation. + connectionEnvs: + - connections: + - connectionId: 11111111-1111-1111-1111-111111111111 + label: INTEGRATION_DATADOG + env: default + handle: my-handle + inputSchema: + parameters: + - defaultValue: default + name: input + type: STRING + outputSchema: + parameters: + - name: output + type: ARRAY_OBJECT + value: '{{ Steps.Step1 }}' + steps: + - actionId: com.datadoghq.dd.monitor.listMonitors + connectionLabel: INTEGRATION_DATADOG + name: Step1 + outboundEdges: + - branchName: main + nextStepName: Step2 + parameters: + - name: tags + value: service:monitoring + - actionId: com.datadoghq.core.noop + name: Step2 + triggers: + - monitorTrigger: + rateLimit: + count: 1 + interval: 3600s + startStepNames: + - Step1 + - githubWebhookTrigger: {} + startStepNames: + - Step1 + tags: + - team:infra + - service:monitoring + - foo:bar + id: 22222222-2222-2222-2222-222222222222 + type: workflows + properties: + data: + $ref: '#/components/schemas/WorkflowDataUpdate' + required: + - data + type: object + UpdateWorkflowResponse: + description: The response object after updating a workflow. + properties: + data: + $ref: '#/components/schemas/WorkflowDataUpdate' + type: object UpsertCatalogEntityRequest: description: Create or update entity request. oneOf: @@ -31069,6 +31891,134 @@ components: - PAST_SIX_MONTHS - PAST_ONE_YEAR - ALERT + WorkflowData: + description: Data related to the workflow. + properties: + attributes: + $ref: '#/components/schemas/WorkflowDataAttributes' + id: + description: The workflow identifier + readOnly: true + type: string + relationships: + $ref: '#/components/schemas/WorkflowDataRelationships' + type: + $ref: '#/components/schemas/WorkflowDataType' + required: + - type + - attributes + type: object + WorkflowDataAttributes: + description: The definition of `WorkflowDataAttributes` object. + properties: + createdAt: + description: When the workflow was created. + format: date-time + readOnly: true + type: string + description: + description: Description of the workflow. + type: string + name: + description: Name of the workflow. + example: '' + type: string + published: + description: Set the workflow to published or unpublished. Workflows in + an unpublished state will only be executable via manual runs. Automatic + triggers such as Schedule will not execute the workflow until it is published. + type: boolean + spec: + $ref: '#/components/schemas/Spec' + tags: + description: Tags of the workflow. + items: + type: string + type: array + updatedAt: + description: When the workflow was last updated. + format: date-time + readOnly: true + type: string + webhookSecret: + description: If a Webhook trigger is defined on this workflow, a webhookSecret + is required and should be provided here. + type: string + writeOnly: true + required: + - name + - spec + type: object + WorkflowDataRelationships: + description: The definition of `WorkflowDataRelationships` object. + properties: + creator: + $ref: '#/components/schemas/WorkflowUserRelationship' + owner: + $ref: '#/components/schemas/WorkflowUserRelationship' + readOnly: true + type: object + WorkflowDataType: + description: The definition of `WorkflowDataType` object. + enum: + - workflows + example: workflows + type: string + x-enum-varnames: + - WORKFLOWS + WorkflowDataUpdate: + description: Data related to the workflow being updated. + properties: + attributes: + $ref: '#/components/schemas/WorkflowDataUpdateAttributes' + id: + description: The workflow identifier + type: string + relationships: + $ref: '#/components/schemas/WorkflowDataRelationships' + type: + $ref: '#/components/schemas/WorkflowDataType' + required: + - type + - attributes + type: object + WorkflowDataUpdateAttributes: + description: The definition of `WorkflowDataUpdateAttributes` object. + properties: + createdAt: + description: When the workflow was created. + format: date-time + readOnly: true + type: string + description: + description: Description of the workflow. + type: string + name: + description: Name of the workflow. + type: string + published: + description: Set the workflow to published or unpublished. Workflows in + an unpublished state will only be executable via manual runs. Automatic + triggers such as Schedule will not execute the workflow until it is published. + type: boolean + spec: + $ref: '#/components/schemas/Spec' + tags: + description: Tags of the workflow. + items: + type: string + type: array + updatedAt: + description: When the workflow was last updated. + format: date-time + readOnly: true + type: string + webhookSecret: + description: If a Webhook trigger is defined on this workflow, a webhookSecret + is required and should be provided here. + type: string + writeOnly: true + type: object WorkflowInstanceCreateMeta: description: Additional information for creating a workflow instance. properties: @@ -31135,6 +32085,45 @@ components: format: int64 type: integer type: object + WorkflowTriggerWrapper: + description: Schema for a Workflow-based trigger. + properties: + startStepNames: + $ref: '#/components/schemas/StartStepNames' + workflowTrigger: + description: Trigger a workflow VIA the Datadog UI. Only required if no + other trigger exists. + type: object + required: + - workflowTrigger + type: object + WorkflowUserRelationship: + description: The definition of `WorkflowUserRelationship` object. + properties: + data: + $ref: '#/components/schemas/WorkflowUserRelationshipData' + type: object + WorkflowUserRelationshipData: + description: The definition of `WorkflowUserRelationshipData` object. + properties: + id: + description: The user identifier + example: '' + type: string + type: + $ref: '#/components/schemas/WorkflowUserRelationshipType' + required: + - type + - id + type: object + WorkflowUserRelationshipType: + description: The definition of `WorkflowUserRelationshipType` object. + enum: + - users + example: users + type: string + x-enum-varnames: + - USERS WorklflowCancelInstanceResponse: description: Information about the canceled instance. properties: @@ -49835,6 +50824,178 @@ paths: operator: OR permissions: - teams_read + /api/v2/workflows: + post: + description: Create a new workflow, returning the workflow ID. This API requires + an application key scoped with the `workflows_write` permission. + operationId: CreateWorkflow + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateWorkflowRequest' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/CreateWorkflowResponse' + description: Successfully created a workflow. + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Bad request + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Forbidden + '429': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Too many requests + summary: Create a Workflow + tags: + - Workflow Automation + x-permission: + operator: OR + permissions: + - workflows_write + /api/v2/workflows/{workflow_id}: + delete: + description: Delete a workflow by ID. This API requires an application key scoped + with the `workflows_write` permission. + operationId: DeleteWorkflow + parameters: + - $ref: '#/components/parameters/WorkflowId' + responses: + '204': + description: Successfully deleted a workflow. + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Not found + '429': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Too many requests + summary: Delete an existing Workflow + tags: + - Workflow Automation + x-permission: + operator: OR + permissions: + - workflows_write + get: + description: Get a workflow by ID. This API requires an application key scoped + with the `workflows_read` permission. + operationId: GetWorkflow + parameters: + - $ref: '#/components/parameters/WorkflowId' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/GetWorkflowResponse' + description: Successfully got a workflow. + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Bad request + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Not found + '429': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Too many requests + summary: Get an existing Workflow + tags: + - Workflow Automation + x-permission: + operator: OR + permissions: + - workflows_read + patch: + description: Update a workflow by ID. This API requires an application key scoped + with the `workflows_write` permission. + operationId: UpdateWorkflow + parameters: + - $ref: '#/components/parameters/WorkflowId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateWorkflowRequest' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateWorkflowResponse' + description: Successfully updated a workflow. + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Bad request + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Not found + '429': + content: + application/json: + schema: + $ref: '#/components/schemas/JSONAPIErrorResponse' + description: Too many requests + summary: Update an existing Workflow + tags: + - Workflow Automation + x-permission: + operator: OR + permissions: + - workflows_write /api/v2/workflows/{workflow_id}/instances: get: description: List all instances of a given workflow. This API requires an application @@ -50438,7 +51599,12 @@ tags: externalDocs: url: https://docs.datadoghq.com/account_management/users name: Users -- description: Automate your teams operational processes with Datadog Workflow Automation. +- description: Datadog Workflow Automation allows you to automate your end-to-end + processes by connecting Datadog with the rest of your tech stack. Build workflows + to auto-remediate your alerts, streamline your incident and security processes, + and reduce manual toil. Workflow Automation supports over 1,000+ OOTB actions, + including AWS, JIRA, ServiceNow, GitHub, and OpenAI. Learn more in our Workflow + Automation docs [here](https://docs.datadoghq.com/service_management/workflows/). externalDocs: description: Find out more at url: https://docs.datadoghq.com/service_management/workflows/ diff --git a/examples/v2_workflow-automation_CreateWorkflow.rs b/examples/v2_workflow-automation_CreateWorkflow.rs new file mode 100644 index 000000000..42987d452 --- /dev/null +++ b/examples/v2_workflow-automation_CreateWorkflow.rs @@ -0,0 +1,103 @@ +// Create a Workflow returns "Successfully created a workflow." response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_workflow_automation::WorkflowAutomationAPI; +use datadog_api_client::datadogV2::model::Connection; +use datadog_api_client::datadogV2::model::ConnectionEnv; +use datadog_api_client::datadogV2::model::ConnectionEnvEnv; +use datadog_api_client::datadogV2::model::CreateWorkflowRequest; +use datadog_api_client::datadogV2::model::GithubWebhookTrigger; +use datadog_api_client::datadogV2::model::GithubWebhookTriggerWrapper; +use datadog_api_client::datadogV2::model::InputSchema; +use datadog_api_client::datadogV2::model::InputSchemaParameters; +use datadog_api_client::datadogV2::model::InputSchemaParametersType; +use datadog_api_client::datadogV2::model::MonitorTrigger; +use datadog_api_client::datadogV2::model::MonitorTriggerWrapper; +use datadog_api_client::datadogV2::model::OutboundEdge; +use datadog_api_client::datadogV2::model::OutputSchema; +use datadog_api_client::datadogV2::model::OutputSchemaParameters; +use datadog_api_client::datadogV2::model::OutputSchemaParametersType; +use datadog_api_client::datadogV2::model::Parameter; +use datadog_api_client::datadogV2::model::Spec; +use datadog_api_client::datadogV2::model::Step; +use datadog_api_client::datadogV2::model::Trigger; +use datadog_api_client::datadogV2::model::TriggerRateLimit; +use datadog_api_client::datadogV2::model::WorkflowData; +use datadog_api_client::datadogV2::model::WorkflowDataAttributes; +use datadog_api_client::datadogV2::model::WorkflowDataType; +use serde_json::Value; + +#[tokio::main] +async fn main() { + let body = CreateWorkflowRequest::new(WorkflowData::new( + WorkflowDataAttributes::new( + "Example Workflow".to_string(), + Spec::new() + .connection_envs(vec![ConnectionEnv::new(ConnectionEnvEnv::DEFAULT) + .connections(vec![Connection::new( + "11111111-1111-1111-1111-111111111111".to_string(), + "INTEGRATION_DATADOG".to_string(), + )])]) + .handle("my-handle".to_string()) + .input_schema(InputSchema::new().parameters(vec![ + InputSchemaParameters::new( + "input".to_string(), + InputSchemaParametersType::STRING, + ).default_value(Value::from("default")) + ])) + .output_schema(OutputSchema::new().parameters(vec![ + OutputSchemaParameters::new( + "output".to_string(), + OutputSchemaParametersType::ARRAY_OBJECT, + ).value(Value::from("outputValue")) + ])) + .steps(vec![ + Step::new( + "com.datadoghq.dd.monitor.listMonitors".to_string(), + "Step1".to_string(), + ) + .connection_label("INTEGRATION_DATADOG".to_string()) + .outbound_edges(vec![OutboundEdge::new( + "main".to_string(), + "Step2".to_string(), + )]) + .parameters(vec![Parameter::new( + "tags".to_string(), + Value::from("service:monitoring"), + )]), + Step::new("com.datadoghq.core.noop".to_string(), "Step2".to_string()), + ]) + .triggers(vec![ + Trigger::MonitorTriggerWrapper(Box::new( + MonitorTriggerWrapper::new( + MonitorTrigger::new().rate_limit( + TriggerRateLimit::new() + .count(1) + .interval("3600s".to_string()), + ), + ) + .start_step_names(vec!["Step1".to_string()]), + )), + Trigger::GithubWebhookTriggerWrapper(Box::new( + GithubWebhookTriggerWrapper::new(GithubWebhookTrigger::new()) + .start_step_names(vec!["Step1".to_string()]), + )), + ]), + ) + .description("A sample workflow.".to_string()) + .published(true) + .tags(vec![ + "team:infra".to_string(), + "service:monitoring".to_string(), + "foo:bar".to_string(), + ]), + WorkflowDataType::WORKFLOWS, + )); + let configuration = datadog::Configuration::new(); + let api = WorkflowAutomationAPI::with_config(configuration); + let resp = api.create_workflow(body).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/examples/v2_workflow-automation_DeleteWorkflow.rs b/examples/v2_workflow-automation_DeleteWorkflow.rs new file mode 100644 index 000000000..597daeb70 --- /dev/null +++ b/examples/v2_workflow-automation_DeleteWorkflow.rs @@ -0,0 +1,17 @@ +// Delete an existing Workflow returns "Successfully deleted a workflow." response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_workflow_automation::WorkflowAutomationAPI; + +#[tokio::main] +async fn main() { + // there is a valid "workflow" in the system + let workflow_data_id = std::env::var("WORKFLOW_DATA_ID").unwrap(); + let configuration = datadog::Configuration::new(); + let api = WorkflowAutomationAPI::with_config(configuration); + let resp = api.delete_workflow(workflow_data_id.clone()).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/examples/v2_workflow-automation_GetWorkflow.rs b/examples/v2_workflow-automation_GetWorkflow.rs new file mode 100644 index 000000000..b2b8721d8 --- /dev/null +++ b/examples/v2_workflow-automation_GetWorkflow.rs @@ -0,0 +1,17 @@ +// Get an existing Workflow returns "Successfully got a workflow." response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_workflow_automation::WorkflowAutomationAPI; + +#[tokio::main] +async fn main() { + // there is a valid "workflow" in the system + let workflow_data_id = std::env::var("WORKFLOW_DATA_ID").unwrap(); + let configuration = datadog::Configuration::new(); + let api = WorkflowAutomationAPI::with_config(configuration); + let resp = api.get_workflow(workflow_data_id.clone()).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/examples/v2_workflow-automation_UpdateWorkflow.rs b/examples/v2_workflow-automation_UpdateWorkflow.rs new file mode 100644 index 000000000..4e8e6eeb4 --- /dev/null +++ b/examples/v2_workflow-automation_UpdateWorkflow.rs @@ -0,0 +1,109 @@ +// Update an existing Workflow returns "Successfully updated a workflow." response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_workflow_automation::WorkflowAutomationAPI; +use datadog_api_client::datadogV2::model::Connection; +use datadog_api_client::datadogV2::model::ConnectionEnv; +use datadog_api_client::datadogV2::model::ConnectionEnvEnv; +use datadog_api_client::datadogV2::model::GithubWebhookTrigger; +use datadog_api_client::datadogV2::model::GithubWebhookTriggerWrapper; +use datadog_api_client::datadogV2::model::InputSchema; +use datadog_api_client::datadogV2::model::InputSchemaParameters; +use datadog_api_client::datadogV2::model::InputSchemaParametersType; +use datadog_api_client::datadogV2::model::MonitorTrigger; +use datadog_api_client::datadogV2::model::MonitorTriggerWrapper; +use datadog_api_client::datadogV2::model::OutboundEdge; +use datadog_api_client::datadogV2::model::OutputSchema; +use datadog_api_client::datadogV2::model::OutputSchemaParameters; +use datadog_api_client::datadogV2::model::OutputSchemaParametersType; +use datadog_api_client::datadogV2::model::Parameter; +use datadog_api_client::datadogV2::model::Spec; +use datadog_api_client::datadogV2::model::Step; +use datadog_api_client::datadogV2::model::Trigger; +use datadog_api_client::datadogV2::model::TriggerRateLimit; +use datadog_api_client::datadogV2::model::UpdateWorkflowRequest; +use datadog_api_client::datadogV2::model::WorkflowDataType; +use datadog_api_client::datadogV2::model::WorkflowDataUpdate; +use datadog_api_client::datadogV2::model::WorkflowDataUpdateAttributes; +use serde_json::Value; + +#[tokio::main] +async fn main() { + // there is a valid "workflow" in the system + let workflow_data_id = std::env::var("WORKFLOW_DATA_ID").unwrap(); + let body = UpdateWorkflowRequest::new( + WorkflowDataUpdate::new( + WorkflowDataUpdateAttributes::new() + .description("A sample workflow.".to_string()) + .name("Example Workflow".to_string()) + .published(true) + .spec( + Spec::new() + .connection_envs(vec![ConnectionEnv::new(ConnectionEnvEnv::DEFAULT) + .connections(vec![Connection::new( + "11111111-1111-1111-1111-111111111111".to_string(), + "INTEGRATION_DATADOG".to_string(), + )])]) + .handle("my-handle".to_string()) + .input_schema(InputSchema::new().parameters(vec![ + InputSchemaParameters::new( + "input".to_string(), + InputSchemaParametersType::STRING, + ).default_value(Value::from("default")) + ])) + .output_schema(OutputSchema::new().parameters(vec![ + OutputSchemaParameters::new( + "output".to_string(), + OutputSchemaParametersType::ARRAY_OBJECT, + ).value(Value::from("outputValue")) + ])) + .steps(vec![ + Step::new( + "com.datadoghq.dd.monitor.listMonitors".to_string(), + "Step1".to_string(), + ) + .connection_label("INTEGRATION_DATADOG".to_string()) + .outbound_edges(vec![OutboundEdge::new( + "main".to_string(), + "Step2".to_string(), + )]) + .parameters(vec![Parameter::new( + "tags".to_string(), + Value::from("service:monitoring"), + )]), + Step::new("com.datadoghq.core.noop".to_string(), "Step2".to_string()), + ]) + .triggers(vec![ + Trigger::MonitorTriggerWrapper(Box::new( + MonitorTriggerWrapper::new( + MonitorTrigger::new().rate_limit( + TriggerRateLimit::new() + .count(1) + .interval("3600s".to_string()), + ), + ) + .start_step_names(vec!["Step1".to_string()]), + )), + Trigger::GithubWebhookTriggerWrapper(Box::new( + GithubWebhookTriggerWrapper::new(GithubWebhookTrigger::new()) + .start_step_names(vec!["Step1".to_string()]), + )), + ]), + ) + .tags(vec![ + "team:infra".to_string(), + "service:monitoring".to_string(), + "foo:bar".to_string(), + ]), + WorkflowDataType::WORKFLOWS, + ) + .id("22222222-2222-2222-2222-222222222222".to_string()), + ); + let configuration = datadog::Configuration::new(); + let api = WorkflowAutomationAPI::with_config(configuration); + let resp = api.update_workflow(workflow_data_id.clone(), body).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/src/datadogV2/api/api_workflow_automation.rs b/src/datadogV2/api/api_workflow_automation.rs index 44c1983ef..024d3763c 100644 --- a/src/datadogV2/api/api_workflow_automation.rs +++ b/src/datadogV2/api/api_workflow_automation.rs @@ -41,6 +41,14 @@ pub enum CancelWorkflowInstanceError { UnknownValue(serde_json::Value), } +/// CreateWorkflowError is a struct for typed errors of method [`WorkflowAutomationAPI::create_workflow`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum CreateWorkflowError { + JSONAPIErrorResponse(crate::datadogV2::model::JSONAPIErrorResponse), + UnknownValue(serde_json::Value), +} + /// CreateWorkflowInstanceError is a struct for typed errors of method [`WorkflowAutomationAPI::create_workflow_instance`] #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] @@ -49,6 +57,22 @@ pub enum CreateWorkflowInstanceError { UnknownValue(serde_json::Value), } +/// DeleteWorkflowError is a struct for typed errors of method [`WorkflowAutomationAPI::delete_workflow`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum DeleteWorkflowError { + JSONAPIErrorResponse(crate::datadogV2::model::JSONAPIErrorResponse), + UnknownValue(serde_json::Value), +} + +/// GetWorkflowError is a struct for typed errors of method [`WorkflowAutomationAPI::get_workflow`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum GetWorkflowError { + JSONAPIErrorResponse(crate::datadogV2::model::JSONAPIErrorResponse), + UnknownValue(serde_json::Value), +} + /// GetWorkflowInstanceError is a struct for typed errors of method [`WorkflowAutomationAPI::get_workflow_instance`] #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] @@ -65,7 +89,15 @@ pub enum ListWorkflowInstancesError { UnknownValue(serde_json::Value), } -/// Automate your teams operational processes with Datadog Workflow Automation. +/// UpdateWorkflowError is a struct for typed errors of method [`WorkflowAutomationAPI::update_workflow`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum UpdateWorkflowError { + JSONAPIErrorResponse(crate::datadogV2::model::JSONAPIErrorResponse), + UnknownValue(serde_json::Value), +} + +/// Datadog Workflow Automation allows you to automate your end-to-end processes by connecting Datadog with the rest of your tech stack. Build workflows to auto-remediate your alerts, streamline your incident and security processes, and reduce manual toil. Workflow Automation supports over 1,000+ OOTB actions, including AWS, JIRA, ServiceNow, GitHub, and OpenAI. Learn more in our Workflow Automation docs [here](). #[derive(Debug, Clone)] pub struct WorkflowAutomationAPI { config: datadog::Configuration, @@ -245,6 +277,158 @@ impl WorkflowAutomationAPI { } } + /// Create a new workflow, returning the workflow ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn create_workflow( + &self, + body: crate::datadogV2::model::CreateWorkflowRequest, + ) -> Result> + { + match self.create_workflow_with_http_info(body).await { + Ok(response_content) => { + if let Some(e) = response_content.entity { + Ok(e) + } else { + Err(datadog::Error::Serde(serde::de::Error::custom( + "response content was None", + ))) + } + } + Err(err) => Err(err), + } + } + + /// Create a new workflow, returning the workflow ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn create_workflow_with_http_info( + &self, + body: crate::datadogV2::model::CreateWorkflowRequest, + ) -> Result< + datadog::ResponseContent, + datadog::Error, + > { + let local_configuration = &self.config; + let operation_id = "v2.create_workflow"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/workflows", + local_configuration.get_operation_host(operation_id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::POST, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Content-Type", HeaderValue::from_static("application/json")); + headers.insert("Accept", HeaderValue::from_static("application/json")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + // build body parameters + let output = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(output, datadog::DDFormatter); + if body.serialize(&mut ser).is_ok() { + if let Some(content_encoding) = headers.get("Content-Encoding") { + match content_encoding.to_str().unwrap_or_default() { + "gzip" => { + let mut enc = GzEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "deflate" => { + let mut enc = ZlibEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "zstd1" => { + let mut enc = zstd::stream::Encoder::new(Vec::new(), 0).unwrap(); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + _ => { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + } else { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + match serde_json::from_str::( + &local_content, + ) { + Ok(e) => { + return Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: Some(e), + }) + } + Err(e) => return Err(datadog::Error::Serde(e)), + }; + } else { + let local_entity: Option = + serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } + /// Execute the given workflow. This API requires an application key scoped with the workflows_run permission. pub async fn create_workflow_instance( &self, @@ -405,6 +589,200 @@ impl WorkflowAutomationAPI { } } + /// Delete a workflow by ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn delete_workflow( + &self, + workflow_id: String, + ) -> Result<(), datadog::Error> { + match self.delete_workflow_with_http_info(workflow_id).await { + Ok(_) => Ok(()), + Err(err) => Err(err), + } + } + + /// Delete a workflow by ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn delete_workflow_with_http_info( + &self, + workflow_id: String, + ) -> Result, datadog::Error> { + let local_configuration = &self.config; + let operation_id = "v2.delete_workflow"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/workflows/{workflow_id}", + local_configuration.get_operation_host(operation_id), + workflow_id = datadog::urlencode(workflow_id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::DELETE, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Accept", HeaderValue::from_static("*/*")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: None, + }) + } else { + let local_entity: Option = + serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } + + /// Get a workflow by ID. This API requires an application key scoped with the `workflows_read` permission. + pub async fn get_workflow( + &self, + workflow_id: String, + ) -> Result> + { + match self.get_workflow_with_http_info(workflow_id).await { + Ok(response_content) => { + if let Some(e) = response_content.entity { + Ok(e) + } else { + Err(datadog::Error::Serde(serde::de::Error::custom( + "response content was None", + ))) + } + } + Err(err) => Err(err), + } + } + + /// Get a workflow by ID. This API requires an application key scoped with the `workflows_read` permission. + pub async fn get_workflow_with_http_info( + &self, + workflow_id: String, + ) -> Result< + datadog::ResponseContent, + datadog::Error, + > { + let local_configuration = &self.config; + let operation_id = "v2.get_workflow"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/workflows/{workflow_id}", + local_configuration.get_operation_host(operation_id), + workflow_id = datadog::urlencode(workflow_id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::GET, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Accept", HeaderValue::from_static("application/json")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + match serde_json::from_str::( + &local_content, + ) { + Ok(e) => { + return Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: Some(e), + }) + } + Err(e) => return Err(datadog::Error::Serde(e)), + }; + } else { + let local_entity: Option = serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } + /// Get a specific execution of a given workflow. This API requires an application key scoped with the workflows_read permission. pub async fn get_workflow_instance( &self, @@ -646,4 +1024,159 @@ impl WorkflowAutomationAPI { Err(datadog::Error::ResponseError(local_error)) } } + + /// Update a workflow by ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn update_workflow( + &self, + workflow_id: String, + body: crate::datadogV2::model::UpdateWorkflowRequest, + ) -> Result> + { + match self.update_workflow_with_http_info(workflow_id, body).await { + Ok(response_content) => { + if let Some(e) = response_content.entity { + Ok(e) + } else { + Err(datadog::Error::Serde(serde::de::Error::custom( + "response content was None", + ))) + } + } + Err(err) => Err(err), + } + } + + /// Update a workflow by ID. This API requires an application key scoped with the `workflows_write` permission. + pub async fn update_workflow_with_http_info( + &self, + workflow_id: String, + body: crate::datadogV2::model::UpdateWorkflowRequest, + ) -> Result< + datadog::ResponseContent, + datadog::Error, + > { + let local_configuration = &self.config; + let operation_id = "v2.update_workflow"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/workflows/{workflow_id}", + local_configuration.get_operation_host(operation_id), + workflow_id = datadog::urlencode(workflow_id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::PATCH, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Content-Type", HeaderValue::from_static("application/json")); + headers.insert("Accept", HeaderValue::from_static("application/json")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + // build body parameters + let output = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(output, datadog::DDFormatter); + if body.serialize(&mut ser).is_ok() { + if let Some(content_encoding) = headers.get("Content-Encoding") { + match content_encoding.to_str().unwrap_or_default() { + "gzip" => { + let mut enc = GzEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "deflate" => { + let mut enc = ZlibEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "zstd1" => { + let mut enc = zstd::stream::Encoder::new(Vec::new(), 0).unwrap(); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + _ => { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + } else { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + match serde_json::from_str::( + &local_content, + ) { + Ok(e) => { + return Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: Some(e), + }) + } + Err(e) => return Err(datadog::Error::Serde(e)), + }; + } else { + let local_entity: Option = + serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } } diff --git a/src/datadogV2/model/mod.rs b/src/datadogV2/model/mod.rs index 6d3539230..4a3fbeaa8 100644 --- a/src/datadogV2/model/mod.rs +++ b/src/datadogV2/model/mod.rs @@ -3864,6 +3864,134 @@ pub mod model_user_update_data; pub use self::model_user_update_data::UserUpdateData; pub mod model_user_update_attributes; pub use self::model_user_update_attributes::UserUpdateAttributes; +pub mod model_create_workflow_request; +pub use self::model_create_workflow_request::CreateWorkflowRequest; +pub mod model_workflow_data; +pub use self::model_workflow_data::WorkflowData; +pub mod model_workflow_data_attributes; +pub use self::model_workflow_data_attributes::WorkflowDataAttributes; +pub mod model_spec; +pub use self::model_spec::Spec; +pub mod model_annotation; +pub use self::model_annotation::Annotation; +pub mod model_annotation_display; +pub use self::model_annotation_display::AnnotationDisplay; +pub mod model_annotation_display_bounds; +pub use self::model_annotation_display_bounds::AnnotationDisplayBounds; +pub mod model_annotation_markdown_text_annotation; +pub use self::model_annotation_markdown_text_annotation::AnnotationMarkdownTextAnnotation; +pub mod model_connection_env; +pub use self::model_connection_env::ConnectionEnv; +pub mod model_connection_group; +pub use self::model_connection_group::ConnectionGroup; +pub mod model_connection; +pub use self::model_connection::Connection; +pub mod model_connection_env_env; +pub use self::model_connection_env_env::ConnectionEnvEnv; +pub mod model_input_schema; +pub use self::model_input_schema::InputSchema; +pub mod model_input_schema_parameters; +pub use self::model_input_schema_parameters::InputSchemaParameters; +pub mod model_input_schema_parameters_type; +pub use self::model_input_schema_parameters_type::InputSchemaParametersType; +pub mod model_output_schema; +pub use self::model_output_schema::OutputSchema; +pub mod model_output_schema_parameters; +pub use self::model_output_schema_parameters::OutputSchemaParameters; +pub mod model_output_schema_parameters_type; +pub use self::model_output_schema_parameters_type::OutputSchemaParametersType; +pub mod model_step; +pub use self::model_step::Step; +pub mod model_completion_gate; +pub use self::model_completion_gate::CompletionGate; +pub mod model_completion_condition; +pub use self::model_completion_condition::CompletionCondition; +pub mod model_completion_condition_operator; +pub use self::model_completion_condition_operator::CompletionConditionOperator; +pub mod model_retry_strategy; +pub use self::model_retry_strategy::RetryStrategy; +pub mod model_retry_strategy_kind; +pub use self::model_retry_strategy_kind::RetryStrategyKind; +pub mod model_retry_strategy_linear; +pub use self::model_retry_strategy_linear::RetryStrategyLinear; +pub mod model_step_display; +pub use self::model_step_display::StepDisplay; +pub mod model_step_display_bounds; +pub use self::model_step_display_bounds::StepDisplayBounds; +pub mod model_error_handler; +pub use self::model_error_handler::ErrorHandler; +pub mod model_outbound_edge; +pub use self::model_outbound_edge::OutboundEdge; +pub mod model_parameter; +pub use self::model_parameter::Parameter; +pub mod model_readiness_gate; +pub use self::model_readiness_gate::ReadinessGate; +pub mod model_readiness_gate_threshold_type; +pub use self::model_readiness_gate_threshold_type::ReadinessGateThresholdType; +pub mod model_api_trigger_wrapper; +pub use self::model_api_trigger_wrapper::APITriggerWrapper; +pub mod model_api_trigger; +pub use self::model_api_trigger::APITrigger; +pub mod model_trigger_rate_limit; +pub use self::model_trigger_rate_limit::TriggerRateLimit; +pub mod model_app_trigger_wrapper; +pub use self::model_app_trigger_wrapper::AppTriggerWrapper; +pub mod model_case_trigger_wrapper; +pub use self::model_case_trigger_wrapper::CaseTriggerWrapper; +pub mod model_case_trigger; +pub use self::model_case_trigger::CaseTrigger; +pub mod model_change_event_trigger_wrapper; +pub use self::model_change_event_trigger_wrapper::ChangeEventTriggerWrapper; +pub mod model_dashboard_trigger_wrapper; +pub use self::model_dashboard_trigger_wrapper::DashboardTriggerWrapper; +pub mod model_github_webhook_trigger_wrapper; +pub use self::model_github_webhook_trigger_wrapper::GithubWebhookTriggerWrapper; +pub mod model_github_webhook_trigger; +pub use self::model_github_webhook_trigger::GithubWebhookTrigger; +pub mod model_incident_trigger_wrapper; +pub use self::model_incident_trigger_wrapper::IncidentTriggerWrapper; +pub mod model_incident_trigger; +pub use self::model_incident_trigger::IncidentTrigger; +pub mod model_monitor_trigger_wrapper; +pub use self::model_monitor_trigger_wrapper::MonitorTriggerWrapper; +pub mod model_monitor_trigger; +pub use self::model_monitor_trigger::MonitorTrigger; +pub mod model_schedule_trigger_wrapper; +pub use self::model_schedule_trigger_wrapper::ScheduleTriggerWrapper; +pub mod model_schedule_trigger; +pub use self::model_schedule_trigger::ScheduleTrigger; +pub mod model_security_trigger_wrapper; +pub use self::model_security_trigger_wrapper::SecurityTriggerWrapper; +pub mod model_security_trigger; +pub use self::model_security_trigger::SecurityTrigger; +pub mod model_slack_trigger_wrapper; +pub use self::model_slack_trigger_wrapper::SlackTriggerWrapper; +pub mod model_workflow_trigger_wrapper; +pub use self::model_workflow_trigger_wrapper::WorkflowTriggerWrapper; +pub mod model_trigger; +pub use self::model_trigger::Trigger; +pub mod model_workflow_data_relationships; +pub use self::model_workflow_data_relationships::WorkflowDataRelationships; +pub mod model_workflow_user_relationship; +pub use self::model_workflow_user_relationship::WorkflowUserRelationship; +pub mod model_workflow_user_relationship_data; +pub use self::model_workflow_user_relationship_data::WorkflowUserRelationshipData; +pub mod model_workflow_user_relationship_type; +pub use self::model_workflow_user_relationship_type::WorkflowUserRelationshipType; +pub mod model_workflow_data_type; +pub use self::model_workflow_data_type::WorkflowDataType; +pub mod model_create_workflow_response; +pub use self::model_create_workflow_response::CreateWorkflowResponse; +pub mod model_get_workflow_response; +pub use self::model_get_workflow_response::GetWorkflowResponse; +pub mod model_update_workflow_request; +pub use self::model_update_workflow_request::UpdateWorkflowRequest; +pub mod model_workflow_data_update; +pub use self::model_workflow_data_update::WorkflowDataUpdate; +pub mod model_workflow_data_update_attributes; +pub use self::model_workflow_data_update_attributes::WorkflowDataUpdateAttributes; +pub mod model_update_workflow_response; +pub use self::model_update_workflow_response::UpdateWorkflowResponse; pub mod model_workflow_list_instances_response; pub use self::model_workflow_list_instances_response::WorkflowListInstancesResponse; pub mod model_workflow_instance_list_item; diff --git a/src/datadogV2/model/model_annotation.rs b/src/datadogV2/model/model_annotation.rs new file mode 100644 index 000000000..04469bfb2 --- /dev/null +++ b/src/datadogV2/model/model_annotation.rs @@ -0,0 +1,120 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A list of annotations used in the workflow. These are like sticky notes for your workflow! +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct Annotation { + /// The definition of `AnnotationDisplay` object. + #[serde(rename = "display")] + pub display: crate::datadogV2::model::AnnotationDisplay, + /// The `Annotation` `id`. + #[serde(rename = "id")] + pub id: String, + /// The definition of `AnnotationMarkdownTextAnnotation` object. + #[serde(rename = "markdownTextAnnotation")] + pub markdown_text_annotation: crate::datadogV2::model::AnnotationMarkdownTextAnnotation, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl Annotation { + pub fn new( + display: crate::datadogV2::model::AnnotationDisplay, + id: String, + markdown_text_annotation: crate::datadogV2::model::AnnotationMarkdownTextAnnotation, + ) -> Annotation { + Annotation { + display, + id, + markdown_text_annotation, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for Annotation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct AnnotationVisitor; + impl<'a> Visitor<'a> for AnnotationVisitor { + type Value = Annotation; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut display: Option = None; + let mut id: Option = None; + let mut markdown_text_annotation: Option< + crate::datadogV2::model::AnnotationMarkdownTextAnnotation, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "display" => { + display = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "id" => { + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "markdownTextAnnotation" => { + markdown_text_annotation = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let display = display.ok_or_else(|| M::Error::missing_field("display"))?; + let id = id.ok_or_else(|| M::Error::missing_field("id"))?; + let markdown_text_annotation = markdown_text_annotation + .ok_or_else(|| M::Error::missing_field("markdown_text_annotation"))?; + + let content = Annotation { + display, + id, + markdown_text_annotation, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(AnnotationVisitor) + } +} diff --git a/src/datadogV2/model/model_annotation_display.rs b/src/datadogV2/model/model_annotation_display.rs new file mode 100644 index 000000000..d8084780c --- /dev/null +++ b/src/datadogV2/model/model_annotation_display.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `AnnotationDisplay` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AnnotationDisplay { + /// The definition of `AnnotationDisplayBounds` object. + #[serde(rename = "bounds")] + pub bounds: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl AnnotationDisplay { + pub fn new() -> AnnotationDisplay { + AnnotationDisplay { + bounds: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn bounds(mut self, value: crate::datadogV2::model::AnnotationDisplayBounds) -> Self { + self.bounds = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for AnnotationDisplay { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for AnnotationDisplay { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct AnnotationDisplayVisitor; + impl<'a> Visitor<'a> for AnnotationDisplayVisitor { + type Value = AnnotationDisplay; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut bounds: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "bounds" => { + if v.is_null() { + continue; + } + bounds = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = AnnotationDisplay { + bounds, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(AnnotationDisplayVisitor) + } +} diff --git a/src/datadogV2/model/model_annotation_display_bounds.rs b/src/datadogV2/model/model_annotation_display_bounds.rs new file mode 100644 index 000000000..0d6eb5282 --- /dev/null +++ b/src/datadogV2/model/model_annotation_display_bounds.rs @@ -0,0 +1,156 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `AnnotationDisplayBounds` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AnnotationDisplayBounds { + /// The `bounds` `height`. + #[serde(rename = "height")] + pub height: Option, + /// The `bounds` `width`. + #[serde(rename = "width")] + pub width: Option, + /// The `bounds` `x`. + #[serde(rename = "x")] + pub x: Option, + /// The `bounds` `y`. + #[serde(rename = "y")] + pub y: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl AnnotationDisplayBounds { + pub fn new() -> AnnotationDisplayBounds { + AnnotationDisplayBounds { + height: None, + width: None, + x: None, + y: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn height(mut self, value: f64) -> Self { + self.height = Some(value); + self + } + + pub fn width(mut self, value: f64) -> Self { + self.width = Some(value); + self + } + + pub fn x(mut self, value: f64) -> Self { + self.x = Some(value); + self + } + + pub fn y(mut self, value: f64) -> Self { + self.y = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for AnnotationDisplayBounds { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for AnnotationDisplayBounds { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct AnnotationDisplayBoundsVisitor; + impl<'a> Visitor<'a> for AnnotationDisplayBoundsVisitor { + type Value = AnnotationDisplayBounds; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut height: Option = None; + let mut width: Option = None; + let mut x: Option = None; + let mut y: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "height" => { + if v.is_null() { + continue; + } + height = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "width" => { + if v.is_null() { + continue; + } + width = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "x" => { + if v.is_null() { + continue; + } + x = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "y" => { + if v.is_null() { + continue; + } + y = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = AnnotationDisplayBounds { + height, + width, + x, + y, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(AnnotationDisplayBoundsVisitor) + } +} diff --git a/src/datadogV2/model/model_annotation_markdown_text_annotation.rs b/src/datadogV2/model/model_annotation_markdown_text_annotation.rs new file mode 100644 index 000000000..494728d88 --- /dev/null +++ b/src/datadogV2/model/model_annotation_markdown_text_annotation.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `AnnotationMarkdownTextAnnotation` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AnnotationMarkdownTextAnnotation { + /// The `markdownTextAnnotation` `text`. + #[serde(rename = "text")] + pub text: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl AnnotationMarkdownTextAnnotation { + pub fn new() -> AnnotationMarkdownTextAnnotation { + AnnotationMarkdownTextAnnotation { + text: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn text(mut self, value: String) -> Self { + self.text = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for AnnotationMarkdownTextAnnotation { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for AnnotationMarkdownTextAnnotation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct AnnotationMarkdownTextAnnotationVisitor; + impl<'a> Visitor<'a> for AnnotationMarkdownTextAnnotationVisitor { + type Value = AnnotationMarkdownTextAnnotation; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut text: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "text" => { + if v.is_null() { + continue; + } + text = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = AnnotationMarkdownTextAnnotation { + text, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(AnnotationMarkdownTextAnnotationVisitor) + } +} diff --git a/src/datadogV2/model/model_api_trigger.rs b/src/datadogV2/model/model_api_trigger.rs new file mode 100644 index 000000000..d0f908d43 --- /dev/null +++ b/src/datadogV2/model/model_api_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA an API. The workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct APITrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl APITrigger { + pub fn new() -> APITrigger { + APITrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for APITrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for APITrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct APITriggerVisitor; + impl<'a> Visitor<'a> for APITriggerVisitor { + type Value = APITrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = APITrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(APITriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_api_trigger_wrapper.rs b/src/datadogV2/model/model_api_trigger_wrapper.rs new file mode 100644 index 000000000..673e26328 --- /dev/null +++ b/src/datadogV2/model/model_api_trigger_wrapper.rs @@ -0,0 +1,112 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for an API-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct APITriggerWrapper { + /// Trigger a workflow VIA an API. The workflow must be published. + #[serde(rename = "apiTrigger")] + pub api_trigger: crate::datadogV2::model::APITrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl APITriggerWrapper { + pub fn new(api_trigger: crate::datadogV2::model::APITrigger) -> APITriggerWrapper { + APITriggerWrapper { + api_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for APITriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct APITriggerWrapperVisitor; + impl<'a> Visitor<'a> for APITriggerWrapperVisitor { + type Value = APITriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut api_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "apiTrigger" => { + api_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let api_trigger = + api_trigger.ok_or_else(|| M::Error::missing_field("api_trigger"))?; + + let content = APITriggerWrapper { + api_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(APITriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_app_trigger_wrapper.rs b/src/datadogV2/model/model_app_trigger_wrapper.rs new file mode 100644 index 000000000..1a42d3283 --- /dev/null +++ b/src/datadogV2/model/model_app_trigger_wrapper.rs @@ -0,0 +1,115 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for an App-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AppTriggerWrapper { + /// Trigger a workflow VIA an App. + #[serde(rename = "appTrigger")] + pub app_trigger: std::collections::BTreeMap, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl AppTriggerWrapper { + pub fn new( + app_trigger: std::collections::BTreeMap, + ) -> AppTriggerWrapper { + AppTriggerWrapper { + app_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for AppTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct AppTriggerWrapperVisitor; + impl<'a> Visitor<'a> for AppTriggerWrapperVisitor { + type Value = AppTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut app_trigger: Option> = + None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "appTrigger" => { + app_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let app_trigger = + app_trigger.ok_or_else(|| M::Error::missing_field("app_trigger"))?; + + let content = AppTriggerWrapper { + app_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(AppTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_case_trigger.rs b/src/datadogV2/model/model_case_trigger.rs new file mode 100644 index 000000000..1bfec2f46 --- /dev/null +++ b/src/datadogV2/model/model_case_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA a Case. For automatic triggering a handle must be configured and the workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CaseTrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CaseTrigger { + pub fn new() -> CaseTrigger { + CaseTrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for CaseTrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for CaseTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CaseTriggerVisitor; + impl<'a> Visitor<'a> for CaseTriggerVisitor { + type Value = CaseTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = CaseTrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CaseTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_case_trigger_wrapper.rs b/src/datadogV2/model/model_case_trigger_wrapper.rs new file mode 100644 index 000000000..f284ee9d1 --- /dev/null +++ b/src/datadogV2/model/model_case_trigger_wrapper.rs @@ -0,0 +1,112 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Case-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CaseTriggerWrapper { + /// Trigger a workflow VIA a Case. For automatic triggering a handle must be configured and the workflow must be published. + #[serde(rename = "caseTrigger")] + pub case_trigger: crate::datadogV2::model::CaseTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CaseTriggerWrapper { + pub fn new(case_trigger: crate::datadogV2::model::CaseTrigger) -> CaseTriggerWrapper { + CaseTriggerWrapper { + case_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for CaseTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CaseTriggerWrapperVisitor; + impl<'a> Visitor<'a> for CaseTriggerWrapperVisitor { + type Value = CaseTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut case_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "caseTrigger" => { + case_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let case_trigger = + case_trigger.ok_or_else(|| M::Error::missing_field("case_trigger"))?; + + let content = CaseTriggerWrapper { + case_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CaseTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_change_event_trigger_wrapper.rs b/src/datadogV2/model/model_change_event_trigger_wrapper.rs new file mode 100644 index 000000000..4d9f5dd16 --- /dev/null +++ b/src/datadogV2/model/model_change_event_trigger_wrapper.rs @@ -0,0 +1,116 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Change Event-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ChangeEventTriggerWrapper { + /// Trigger a workflow VIA a Change Event. + #[serde(rename = "changeEventTrigger")] + pub change_event_trigger: std::collections::BTreeMap, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ChangeEventTriggerWrapper { + pub fn new( + change_event_trigger: std::collections::BTreeMap, + ) -> ChangeEventTriggerWrapper { + ChangeEventTriggerWrapper { + change_event_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ChangeEventTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ChangeEventTriggerWrapperVisitor; + impl<'a> Visitor<'a> for ChangeEventTriggerWrapperVisitor { + type Value = ChangeEventTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut change_event_trigger: Option< + std::collections::BTreeMap, + > = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "changeEventTrigger" => { + change_event_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let change_event_trigger = change_event_trigger + .ok_or_else(|| M::Error::missing_field("change_event_trigger"))?; + + let content = ChangeEventTriggerWrapper { + change_event_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ChangeEventTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_completion_condition.rs b/src/datadogV2/model/model_completion_condition.rs new file mode 100644 index 000000000..167068db7 --- /dev/null +++ b/src/datadogV2/model/model_completion_condition.rs @@ -0,0 +1,131 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `CompletionCondition` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CompletionCondition { + /// The `CompletionCondition` `operand1`. + #[serde(rename = "operand1")] + pub operand1: serde_json::Value, + /// The `CompletionCondition` `operand2`. + #[serde(rename = "operand2")] + pub operand2: Option, + /// The definition of `CompletionConditionOperator` object. + #[serde(rename = "operator")] + pub operator: crate::datadogV2::model::CompletionConditionOperator, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CompletionCondition { + pub fn new( + operand1: serde_json::Value, + operator: crate::datadogV2::model::CompletionConditionOperator, + ) -> CompletionCondition { + CompletionCondition { + operand1, + operand2: None, + operator, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn operand2(mut self, value: serde_json::Value) -> Self { + self.operand2 = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for CompletionCondition { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CompletionConditionVisitor; + impl<'a> Visitor<'a> for CompletionConditionVisitor { + type Value = CompletionCondition; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut operand1: Option = None; + let mut operand2: Option = None; + let mut operator: Option = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "operand1" => { + operand1 = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "operand2" => { + if v.is_null() { + continue; + } + operand2 = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "operator" => { + operator = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _operator) = operator { + match _operator { + crate::datadogV2::model::CompletionConditionOperator::UnparsedObject(_operator) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let operand1 = operand1.ok_or_else(|| M::Error::missing_field("operand1"))?; + let operator = operator.ok_or_else(|| M::Error::missing_field("operator"))?; + + let content = CompletionCondition { + operand1, + operand2, + operator, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CompletionConditionVisitor) + } +} diff --git a/src/datadogV2/model/model_completion_condition_operator.rs b/src/datadogV2/model/model_completion_condition_operator.rs new file mode 100644 index 000000000..991536593 --- /dev/null +++ b/src/datadogV2/model/model_completion_condition_operator.rs @@ -0,0 +1,83 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum CompletionConditionOperator { + OPERATOR_EQUAL, + OPERATOR_NOT_EQUAL, + OPERATOR_GREATER_THAN, + OPERATOR_LESS_THAN, + OPERATOR_GREATER_THAN_OR_EQUAL_TO, + OPERATOR_LESS_THAN_OR_EQUAL_TO, + OPERATOR_CONTAINS, + OPERATOR_DOES_NOT_CONTAIN, + OPERATOR_IS_NULL, + OPERATOR_IS_NOT_NULL, + OPERATOR_IS_EMPTY, + OPERATOR_IS_NOT_EMPTY, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for CompletionConditionOperator { + fn to_string(&self) -> String { + match self { + Self::OPERATOR_EQUAL => String::from("OPERATOR_EQUAL"), + Self::OPERATOR_NOT_EQUAL => String::from("OPERATOR_NOT_EQUAL"), + Self::OPERATOR_GREATER_THAN => String::from("OPERATOR_GREATER_THAN"), + Self::OPERATOR_LESS_THAN => String::from("OPERATOR_LESS_THAN"), + Self::OPERATOR_GREATER_THAN_OR_EQUAL_TO => { + String::from("OPERATOR_GREATER_THAN_OR_EQUAL_TO") + } + Self::OPERATOR_LESS_THAN_OR_EQUAL_TO => String::from("OPERATOR_LESS_THAN_OR_EQUAL_TO"), + Self::OPERATOR_CONTAINS => String::from("OPERATOR_CONTAINS"), + Self::OPERATOR_DOES_NOT_CONTAIN => String::from("OPERATOR_DOES_NOT_CONTAIN"), + Self::OPERATOR_IS_NULL => String::from("OPERATOR_IS_NULL"), + Self::OPERATOR_IS_NOT_NULL => String::from("OPERATOR_IS_NOT_NULL"), + Self::OPERATOR_IS_EMPTY => String::from("OPERATOR_IS_EMPTY"), + Self::OPERATOR_IS_NOT_EMPTY => String::from("OPERATOR_IS_NOT_EMPTY"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for CompletionConditionOperator { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for CompletionConditionOperator { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "OPERATOR_EQUAL" => Self::OPERATOR_EQUAL, + "OPERATOR_NOT_EQUAL" => Self::OPERATOR_NOT_EQUAL, + "OPERATOR_GREATER_THAN" => Self::OPERATOR_GREATER_THAN, + "OPERATOR_LESS_THAN" => Self::OPERATOR_LESS_THAN, + "OPERATOR_GREATER_THAN_OR_EQUAL_TO" => Self::OPERATOR_GREATER_THAN_OR_EQUAL_TO, + "OPERATOR_LESS_THAN_OR_EQUAL_TO" => Self::OPERATOR_LESS_THAN_OR_EQUAL_TO, + "OPERATOR_CONTAINS" => Self::OPERATOR_CONTAINS, + "OPERATOR_DOES_NOT_CONTAIN" => Self::OPERATOR_DOES_NOT_CONTAIN, + "OPERATOR_IS_NULL" => Self::OPERATOR_IS_NULL, + "OPERATOR_IS_NOT_NULL" => Self::OPERATOR_IS_NOT_NULL, + "OPERATOR_IS_EMPTY" => Self::OPERATOR_IS_EMPTY, + "OPERATOR_IS_NOT_EMPTY" => Self::OPERATOR_IS_NOT_EMPTY, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_completion_gate.rs b/src/datadogV2/model/model_completion_gate.rs new file mode 100644 index 000000000..4029b2105 --- /dev/null +++ b/src/datadogV2/model/model_completion_gate.rs @@ -0,0 +1,110 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Used to create conditions before running subsequent actions. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CompletionGate { + /// The definition of `CompletionCondition` object. + #[serde(rename = "completionCondition")] + pub completion_condition: crate::datadogV2::model::CompletionCondition, + /// The definition of `RetryStrategy` object. + #[serde(rename = "retryStrategy")] + pub retry_strategy: crate::datadogV2::model::RetryStrategy, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CompletionGate { + pub fn new( + completion_condition: crate::datadogV2::model::CompletionCondition, + retry_strategy: crate::datadogV2::model::RetryStrategy, + ) -> CompletionGate { + CompletionGate { + completion_condition, + retry_strategy, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for CompletionGate { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CompletionGateVisitor; + impl<'a> Visitor<'a> for CompletionGateVisitor { + type Value = CompletionGate; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut completion_condition: Option = + None; + let mut retry_strategy: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "completionCondition" => { + completion_condition = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "retryStrategy" => { + retry_strategy = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let completion_condition = completion_condition + .ok_or_else(|| M::Error::missing_field("completion_condition"))?; + let retry_strategy = + retry_strategy.ok_or_else(|| M::Error::missing_field("retry_strategy"))?; + + let content = CompletionGate { + completion_condition, + retry_strategy, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CompletionGateVisitor) + } +} diff --git a/src/datadogV2/model/model_connection.rs b/src/datadogV2/model/model_connection.rs new file mode 100644 index 000000000..a795a868d --- /dev/null +++ b/src/datadogV2/model/model_connection.rs @@ -0,0 +1,104 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `Connection` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct Connection { + /// The `Connection` `connectionId`. + #[serde(rename = "connectionId")] + pub connection_id: String, + /// The `Connection` `label`. + #[serde(rename = "label")] + pub label: String, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl Connection { + pub fn new(connection_id: String, label: String) -> Connection { + Connection { + connection_id, + label, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for Connection { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ConnectionVisitor; + impl<'a> Visitor<'a> for ConnectionVisitor { + type Value = Connection; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut connection_id: Option = None; + let mut label: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "connectionId" => { + connection_id = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "label" => { + label = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let connection_id = + connection_id.ok_or_else(|| M::Error::missing_field("connection_id"))?; + let label = label.ok_or_else(|| M::Error::missing_field("label"))?; + + let content = Connection { + connection_id, + label, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ConnectionVisitor) + } +} diff --git a/src/datadogV2/model/model_connection_env.rs b/src/datadogV2/model/model_connection_env.rs new file mode 100644 index 000000000..184d39cbf --- /dev/null +++ b/src/datadogV2/model/model_connection_env.rs @@ -0,0 +1,142 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A list of connections or connection groups used in the workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ConnectionEnv { + /// The `ConnectionEnv` `connectionGroups`. + #[serde(rename = "connectionGroups")] + pub connection_groups: Option>, + /// The `ConnectionEnv` `connections`. + #[serde(rename = "connections")] + pub connections: Option>, + /// The definition of `ConnectionEnvEnv` object. + #[serde(rename = "env")] + pub env: crate::datadogV2::model::ConnectionEnvEnv, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ConnectionEnv { + pub fn new(env: crate::datadogV2::model::ConnectionEnvEnv) -> ConnectionEnv { + ConnectionEnv { + connection_groups: None, + connections: None, + env, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn connection_groups( + mut self, + value: Vec, + ) -> Self { + self.connection_groups = Some(value); + self + } + + pub fn connections(mut self, value: Vec) -> Self { + self.connections = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ConnectionEnv { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ConnectionEnvVisitor; + impl<'a> Visitor<'a> for ConnectionEnvVisitor { + type Value = ConnectionEnv; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut connection_groups: Option> = + None; + let mut connections: Option> = None; + let mut env: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "connectionGroups" => { + if v.is_null() { + continue; + } + connection_groups = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "connections" => { + if v.is_null() { + continue; + } + connections = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "env" => { + env = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _env) = env { + match _env { + crate::datadogV2::model::ConnectionEnvEnv::UnparsedObject( + _env, + ) => { + _unparsed = true; + } + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let env = env.ok_or_else(|| M::Error::missing_field("env"))?; + + let content = ConnectionEnv { + connection_groups, + connections, + env, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ConnectionEnvVisitor) + } +} diff --git a/src/datadogV2/model/model_connection_env_env.rs b/src/datadogV2/model/model_connection_env_env.rs new file mode 100644 index 000000000..3d872a2ed --- /dev/null +++ b/src/datadogV2/model/model_connection_env_env.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ConnectionEnvEnv { + DEFAULT, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for ConnectionEnvEnv { + fn to_string(&self) -> String { + match self { + Self::DEFAULT => String::from("default"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for ConnectionEnvEnv { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for ConnectionEnvEnv { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "default" => Self::DEFAULT, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_connection_group.rs b/src/datadogV2/model/model_connection_group.rs new file mode 100644 index 000000000..0b0bf01ad --- /dev/null +++ b/src/datadogV2/model/model_connection_group.rs @@ -0,0 +1,114 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `ConnectionGroup` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ConnectionGroup { + /// The `ConnectionGroup` `connectionGroupId`. + #[serde(rename = "connectionGroupId")] + pub connection_group_id: String, + /// The `ConnectionGroup` `label`. + #[serde(rename = "label")] + pub label: String, + /// The `ConnectionGroup` `tags`. + #[serde(rename = "tags")] + pub tags: Vec, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ConnectionGroup { + pub fn new(connection_group_id: String, label: String, tags: Vec) -> ConnectionGroup { + ConnectionGroup { + connection_group_id, + label, + tags, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ConnectionGroup { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ConnectionGroupVisitor; + impl<'a> Visitor<'a> for ConnectionGroupVisitor { + type Value = ConnectionGroup; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut connection_group_id: Option = None; + let mut label: Option = None; + let mut tags: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "connectionGroupId" => { + connection_group_id = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "label" => { + label = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "tags" => { + tags = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let connection_group_id = connection_group_id + .ok_or_else(|| M::Error::missing_field("connection_group_id"))?; + let label = label.ok_or_else(|| M::Error::missing_field("label"))?; + let tags = tags.ok_or_else(|| M::Error::missing_field("tags"))?; + + let content = ConnectionGroup { + connection_group_id, + label, + tags, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ConnectionGroupVisitor) + } +} diff --git a/src/datadogV2/model/model_create_workflow_request.rs b/src/datadogV2/model/model_create_workflow_request.rs new file mode 100644 index 000000000..6f3d26cbd --- /dev/null +++ b/src/datadogV2/model/model_create_workflow_request.rs @@ -0,0 +1,92 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A request object for creating a new workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CreateWorkflowRequest { + /// Data related to the workflow. + #[serde(rename = "data")] + pub data: crate::datadogV2::model::WorkflowData, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CreateWorkflowRequest { + pub fn new(data: crate::datadogV2::model::WorkflowData) -> CreateWorkflowRequest { + CreateWorkflowRequest { + data, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for CreateWorkflowRequest { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CreateWorkflowRequestVisitor; + impl<'a> Visitor<'a> for CreateWorkflowRequestVisitor { + type Value = CreateWorkflowRequest; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let data = data.ok_or_else(|| M::Error::missing_field("data"))?; + + let content = CreateWorkflowRequest { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CreateWorkflowRequestVisitor) + } +} diff --git a/src/datadogV2/model/model_create_workflow_response.rs b/src/datadogV2/model/model_create_workflow_response.rs new file mode 100644 index 000000000..64437a37f --- /dev/null +++ b/src/datadogV2/model/model_create_workflow_response.rs @@ -0,0 +1,92 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The response object after creating a new workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct CreateWorkflowResponse { + /// Data related to the workflow. + #[serde(rename = "data")] + pub data: crate::datadogV2::model::WorkflowData, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl CreateWorkflowResponse { + pub fn new(data: crate::datadogV2::model::WorkflowData) -> CreateWorkflowResponse { + CreateWorkflowResponse { + data, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for CreateWorkflowResponse { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CreateWorkflowResponseVisitor; + impl<'a> Visitor<'a> for CreateWorkflowResponseVisitor { + type Value = CreateWorkflowResponse; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let data = data.ok_or_else(|| M::Error::missing_field("data"))?; + + let content = CreateWorkflowResponse { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(CreateWorkflowResponseVisitor) + } +} diff --git a/src/datadogV2/model/model_dashboard_trigger_wrapper.rs b/src/datadogV2/model/model_dashboard_trigger_wrapper.rs new file mode 100644 index 000000000..b6473f3e2 --- /dev/null +++ b/src/datadogV2/model/model_dashboard_trigger_wrapper.rs @@ -0,0 +1,116 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Dashboard-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct DashboardTriggerWrapper { + /// Trigger a workflow VIA a Dashboard. + #[serde(rename = "dashboardTrigger")] + pub dashboard_trigger: std::collections::BTreeMap, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl DashboardTriggerWrapper { + pub fn new( + dashboard_trigger: std::collections::BTreeMap, + ) -> DashboardTriggerWrapper { + DashboardTriggerWrapper { + dashboard_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for DashboardTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct DashboardTriggerWrapperVisitor; + impl<'a> Visitor<'a> for DashboardTriggerWrapperVisitor { + type Value = DashboardTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut dashboard_trigger: Option< + std::collections::BTreeMap, + > = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "dashboardTrigger" => { + dashboard_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let dashboard_trigger = dashboard_trigger + .ok_or_else(|| M::Error::missing_field("dashboard_trigger"))?; + + let content = DashboardTriggerWrapper { + dashboard_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(DashboardTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_error_handler.rs b/src/datadogV2/model/model_error_handler.rs new file mode 100644 index 000000000..50afacf37 --- /dev/null +++ b/src/datadogV2/model/model_error_handler.rs @@ -0,0 +1,109 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Used to handle errors in an action. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ErrorHandler { + /// The `ErrorHandler` `fallbackStepName`. + #[serde(rename = "fallbackStepName")] + pub fallback_step_name: String, + /// The definition of `RetryStrategy` object. + #[serde(rename = "retryStrategy")] + pub retry_strategy: crate::datadogV2::model::RetryStrategy, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ErrorHandler { + pub fn new( + fallback_step_name: String, + retry_strategy: crate::datadogV2::model::RetryStrategy, + ) -> ErrorHandler { + ErrorHandler { + fallback_step_name, + retry_strategy, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ErrorHandler { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ErrorHandlerVisitor; + impl<'a> Visitor<'a> for ErrorHandlerVisitor { + type Value = ErrorHandler; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut fallback_step_name: Option = None; + let mut retry_strategy: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "fallbackStepName" => { + fallback_step_name = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "retryStrategy" => { + retry_strategy = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let fallback_step_name = fallback_step_name + .ok_or_else(|| M::Error::missing_field("fallback_step_name"))?; + let retry_strategy = + retry_strategy.ok_or_else(|| M::Error::missing_field("retry_strategy"))?; + + let content = ErrorHandler { + fallback_step_name, + retry_strategy, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ErrorHandlerVisitor) + } +} diff --git a/src/datadogV2/model/model_get_workflow_response.rs b/src/datadogV2/model/model_get_workflow_response.rs new file mode 100644 index 000000000..ab2e5efb0 --- /dev/null +++ b/src/datadogV2/model/model_get_workflow_response.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The response object after getting a workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct GetWorkflowResponse { + /// Data related to the workflow. + #[serde(rename = "data")] + pub data: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl GetWorkflowResponse { + pub fn new() -> GetWorkflowResponse { + GetWorkflowResponse { + data: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn data(mut self, value: crate::datadogV2::model::WorkflowData) -> Self { + self.data = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for GetWorkflowResponse { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for GetWorkflowResponse { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct GetWorkflowResponseVisitor; + impl<'a> Visitor<'a> for GetWorkflowResponseVisitor { + type Value = GetWorkflowResponse; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + if v.is_null() { + continue; + } + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = GetWorkflowResponse { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(GetWorkflowResponseVisitor) + } +} diff --git a/src/datadogV2/model/model_github_webhook_trigger.rs b/src/datadogV2/model/model_github_webhook_trigger.rs new file mode 100644 index 000000000..939b14408 --- /dev/null +++ b/src/datadogV2/model/model_github_webhook_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA GitHub webhook. To trigger a workflow from GitHub, you must set a `webhookSecret`. In your GitHub Webhook Settings, set the Payload URL to "base_url"/api/v2/workflows/"workflow_id"/webhook?orgId="org_id", select application/json for the content type, and be highly recommend enabling SSL verification for security. The workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct GithubWebhookTrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl GithubWebhookTrigger { + pub fn new() -> GithubWebhookTrigger { + GithubWebhookTrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for GithubWebhookTrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for GithubWebhookTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct GithubWebhookTriggerVisitor; + impl<'a> Visitor<'a> for GithubWebhookTriggerVisitor { + type Value = GithubWebhookTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = GithubWebhookTrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(GithubWebhookTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_github_webhook_trigger_wrapper.rs b/src/datadogV2/model/model_github_webhook_trigger_wrapper.rs new file mode 100644 index 000000000..d72446ee5 --- /dev/null +++ b/src/datadogV2/model/model_github_webhook_trigger_wrapper.rs @@ -0,0 +1,116 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a GitHub webhook-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct GithubWebhookTriggerWrapper { + /// Trigger a workflow VIA GitHub webhook. To trigger a workflow from GitHub, you must set a `webhookSecret`. In your GitHub Webhook Settings, set the Payload URL to "base_url"/api/v2/workflows/"workflow_id"/webhook?orgId="org_id", select application/json for the content type, and be highly recommend enabling SSL verification for security. The workflow must be published. + #[serde(rename = "githubWebhookTrigger")] + pub github_webhook_trigger: crate::datadogV2::model::GithubWebhookTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl GithubWebhookTriggerWrapper { + pub fn new( + github_webhook_trigger: crate::datadogV2::model::GithubWebhookTrigger, + ) -> GithubWebhookTriggerWrapper { + GithubWebhookTriggerWrapper { + github_webhook_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for GithubWebhookTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct GithubWebhookTriggerWrapperVisitor; + impl<'a> Visitor<'a> for GithubWebhookTriggerWrapperVisitor { + type Value = GithubWebhookTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut github_webhook_trigger: Option< + crate::datadogV2::model::GithubWebhookTrigger, + > = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "githubWebhookTrigger" => { + github_webhook_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let github_webhook_trigger = github_webhook_trigger + .ok_or_else(|| M::Error::missing_field("github_webhook_trigger"))?; + + let content = GithubWebhookTriggerWrapper { + github_webhook_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(GithubWebhookTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_incident_trigger.rs b/src/datadogV2/model/model_incident_trigger.rs new file mode 100644 index 000000000..0f6761f2d --- /dev/null +++ b/src/datadogV2/model/model_incident_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA an Incident. For automatic triggering a handle must be configured and the workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct IncidentTrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl IncidentTrigger { + pub fn new() -> IncidentTrigger { + IncidentTrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for IncidentTrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for IncidentTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct IncidentTriggerVisitor; + impl<'a> Visitor<'a> for IncidentTriggerVisitor { + type Value = IncidentTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = IncidentTrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(IncidentTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_incident_trigger_wrapper.rs b/src/datadogV2/model/model_incident_trigger_wrapper.rs new file mode 100644 index 000000000..a9b781dac --- /dev/null +++ b/src/datadogV2/model/model_incident_trigger_wrapper.rs @@ -0,0 +1,114 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for an Incident-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct IncidentTriggerWrapper { + /// Trigger a workflow VIA an Incident. For automatic triggering a handle must be configured and the workflow must be published. + #[serde(rename = "incidentTrigger")] + pub incident_trigger: crate::datadogV2::model::IncidentTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl IncidentTriggerWrapper { + pub fn new( + incident_trigger: crate::datadogV2::model::IncidentTrigger, + ) -> IncidentTriggerWrapper { + IncidentTriggerWrapper { + incident_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for IncidentTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct IncidentTriggerWrapperVisitor; + impl<'a> Visitor<'a> for IncidentTriggerWrapperVisitor { + type Value = IncidentTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut incident_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "incidentTrigger" => { + incident_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let incident_trigger = + incident_trigger.ok_or_else(|| M::Error::missing_field("incident_trigger"))?; + + let content = IncidentTriggerWrapper { + incident_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(IncidentTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_input_schema.rs b/src/datadogV2/model/model_input_schema.rs new file mode 100644 index 000000000..c84a5b812 --- /dev/null +++ b/src/datadogV2/model/model_input_schema.rs @@ -0,0 +1,109 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A list of input parameters for the workflow. These can be used as dynamic runtime values in your workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct InputSchema { + /// The `InputSchema` `parameters`. + #[serde(rename = "parameters")] + pub parameters: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl InputSchema { + pub fn new() -> InputSchema { + InputSchema { + parameters: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn parameters( + mut self, + value: Vec, + ) -> Self { + self.parameters = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for InputSchema { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for InputSchema { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct InputSchemaVisitor; + impl<'a> Visitor<'a> for InputSchemaVisitor { + type Value = InputSchema; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut parameters: Option> = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "parameters" => { + if v.is_null() { + continue; + } + parameters = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = InputSchema { + parameters, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(InputSchemaVisitor) + } +} diff --git a/src/datadogV2/model/model_input_schema_parameters.rs b/src/datadogV2/model/model_input_schema_parameters.rs new file mode 100644 index 000000000..72209cb33 --- /dev/null +++ b/src/datadogV2/model/model_input_schema_parameters.rs @@ -0,0 +1,166 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `InputSchemaParameters` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct InputSchemaParameters { + /// The `InputSchemaParameters` `defaultValue`. + #[serde(rename = "defaultValue")] + pub default_value: Option, + /// The `InputSchemaParameters` `description`. + #[serde(rename = "description")] + pub description: Option, + /// The `InputSchemaParameters` `label`. + #[serde(rename = "label")] + pub label: Option, + /// The `InputSchemaParameters` `name`. + #[serde(rename = "name")] + pub name: String, + /// The definition of `InputSchemaParametersType` object. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::InputSchemaParametersType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl InputSchemaParameters { + pub fn new( + name: String, + type_: crate::datadogV2::model::InputSchemaParametersType, + ) -> InputSchemaParameters { + InputSchemaParameters { + default_value: None, + description: None, + label: None, + name, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn default_value(mut self, value: serde_json::Value) -> Self { + self.default_value = Some(value); + self + } + + pub fn description(mut self, value: String) -> Self { + self.description = Some(value); + self + } + + pub fn label(mut self, value: String) -> Self { + self.label = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for InputSchemaParameters { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct InputSchemaParametersVisitor; + impl<'a> Visitor<'a> for InputSchemaParametersVisitor { + type Value = InputSchemaParameters; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut default_value: Option = None; + let mut description: Option = None; + let mut label: Option = None; + let mut name: Option = None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "defaultValue" => { + if v.is_null() { + continue; + } + default_value = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "description" => { + if v.is_null() { + continue; + } + description = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "label" => { + if v.is_null() { + continue; + } + label = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "name" => { + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::InputSchemaParametersType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let name = name.ok_or_else(|| M::Error::missing_field("name"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = InputSchemaParameters { + default_value, + description, + label, + name, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(InputSchemaParametersVisitor) + } +} diff --git a/src/datadogV2/model/model_input_schema_parameters_type.rs b/src/datadogV2/model/model_input_schema_parameters_type.rs new file mode 100644 index 000000000..e86be10c8 --- /dev/null +++ b/src/datadogV2/model/model_input_schema_parameters_type.rs @@ -0,0 +1,69 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum InputSchemaParametersType { + STRING, + NUMBER, + BOOLEAN, + OBJECT, + ARRAY_STRING, + ARRAY_NUMBER, + ARRAY_BOOLEAN, + ARRAY_OBJECT, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for InputSchemaParametersType { + fn to_string(&self) -> String { + match self { + Self::STRING => String::from("STRING"), + Self::NUMBER => String::from("NUMBER"), + Self::BOOLEAN => String::from("BOOLEAN"), + Self::OBJECT => String::from("OBJECT"), + Self::ARRAY_STRING => String::from("ARRAY_STRING"), + Self::ARRAY_NUMBER => String::from("ARRAY_NUMBER"), + Self::ARRAY_BOOLEAN => String::from("ARRAY_BOOLEAN"), + Self::ARRAY_OBJECT => String::from("ARRAY_OBJECT"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for InputSchemaParametersType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for InputSchemaParametersType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "STRING" => Self::STRING, + "NUMBER" => Self::NUMBER, + "BOOLEAN" => Self::BOOLEAN, + "OBJECT" => Self::OBJECT, + "ARRAY_STRING" => Self::ARRAY_STRING, + "ARRAY_NUMBER" => Self::ARRAY_NUMBER, + "ARRAY_BOOLEAN" => Self::ARRAY_BOOLEAN, + "ARRAY_OBJECT" => Self::ARRAY_OBJECT, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_monitor_trigger.rs b/src/datadogV2/model/model_monitor_trigger.rs new file mode 100644 index 000000000..e3e91fa94 --- /dev/null +++ b/src/datadogV2/model/model_monitor_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA a Monitor. For automatic triggering a handle must be configured and the workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct MonitorTrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl MonitorTrigger { + pub fn new() -> MonitorTrigger { + MonitorTrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for MonitorTrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for MonitorTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct MonitorTriggerVisitor; + impl<'a> Visitor<'a> for MonitorTriggerVisitor { + type Value = MonitorTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = MonitorTrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(MonitorTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_monitor_trigger_wrapper.rs b/src/datadogV2/model/model_monitor_trigger_wrapper.rs new file mode 100644 index 000000000..59fe782a4 --- /dev/null +++ b/src/datadogV2/model/model_monitor_trigger_wrapper.rs @@ -0,0 +1,112 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Monitor-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct MonitorTriggerWrapper { + /// Trigger a workflow VIA a Monitor. For automatic triggering a handle must be configured and the workflow must be published. + #[serde(rename = "monitorTrigger")] + pub monitor_trigger: crate::datadogV2::model::MonitorTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl MonitorTriggerWrapper { + pub fn new(monitor_trigger: crate::datadogV2::model::MonitorTrigger) -> MonitorTriggerWrapper { + MonitorTriggerWrapper { + monitor_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for MonitorTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct MonitorTriggerWrapperVisitor; + impl<'a> Visitor<'a> for MonitorTriggerWrapperVisitor { + type Value = MonitorTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut monitor_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "monitorTrigger" => { + monitor_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let monitor_trigger = + monitor_trigger.ok_or_else(|| M::Error::missing_field("monitor_trigger"))?; + + let content = MonitorTriggerWrapper { + monitor_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(MonitorTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_outbound_edge.rs b/src/datadogV2/model/model_outbound_edge.rs new file mode 100644 index 000000000..4986e7cf4 --- /dev/null +++ b/src/datadogV2/model/model_outbound_edge.rs @@ -0,0 +1,106 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `OutboundEdge` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct OutboundEdge { + /// The `OutboundEdge` `branchName`. + #[serde(rename = "branchName")] + pub branch_name: String, + /// The `OutboundEdge` `nextStepName`. + #[serde(rename = "nextStepName")] + pub next_step_name: String, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl OutboundEdge { + pub fn new(branch_name: String, next_step_name: String) -> OutboundEdge { + OutboundEdge { + branch_name, + next_step_name, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for OutboundEdge { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct OutboundEdgeVisitor; + impl<'a> Visitor<'a> for OutboundEdgeVisitor { + type Value = OutboundEdge; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut branch_name: Option = None; + let mut next_step_name: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "branchName" => { + branch_name = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "nextStepName" => { + next_step_name = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let branch_name = + branch_name.ok_or_else(|| M::Error::missing_field("branch_name"))?; + let next_step_name = + next_step_name.ok_or_else(|| M::Error::missing_field("next_step_name"))?; + + let content = OutboundEdge { + branch_name, + next_step_name, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(OutboundEdgeVisitor) + } +} diff --git a/src/datadogV2/model/model_output_schema.rs b/src/datadogV2/model/model_output_schema.rs new file mode 100644 index 000000000..1e3a60925 --- /dev/null +++ b/src/datadogV2/model/model_output_schema.rs @@ -0,0 +1,109 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A list of output parameters for the workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct OutputSchema { + /// The `OutputSchema` `parameters`. + #[serde(rename = "parameters")] + pub parameters: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl OutputSchema { + pub fn new() -> OutputSchema { + OutputSchema { + parameters: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn parameters( + mut self, + value: Vec, + ) -> Self { + self.parameters = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for OutputSchema { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for OutputSchema { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct OutputSchemaVisitor; + impl<'a> Visitor<'a> for OutputSchemaVisitor { + type Value = OutputSchema; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut parameters: Option> = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "parameters" => { + if v.is_null() { + continue; + } + parameters = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = OutputSchema { + parameters, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(OutputSchemaVisitor) + } +} diff --git a/src/datadogV2/model/model_output_schema_parameters.rs b/src/datadogV2/model/model_output_schema_parameters.rs new file mode 100644 index 000000000..588128a29 --- /dev/null +++ b/src/datadogV2/model/model_output_schema_parameters.rs @@ -0,0 +1,183 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `OutputSchemaParameters` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct OutputSchemaParameters { + /// The `OutputSchemaParameters` `defaultValue`. + #[serde(rename = "defaultValue")] + pub default_value: Option, + /// The `OutputSchemaParameters` `description`. + #[serde(rename = "description")] + pub description: Option, + /// The `OutputSchemaParameters` `label`. + #[serde(rename = "label")] + pub label: Option, + /// The `OutputSchemaParameters` `name`. + #[serde(rename = "name")] + pub name: String, + /// The definition of `OutputSchemaParametersType` object. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::OutputSchemaParametersType, + /// The `OutputSchemaParameters` `value`. + #[serde(rename = "value")] + pub value: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl OutputSchemaParameters { + pub fn new( + name: String, + type_: crate::datadogV2::model::OutputSchemaParametersType, + ) -> OutputSchemaParameters { + OutputSchemaParameters { + default_value: None, + description: None, + label: None, + name, + type_, + value: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn default_value(mut self, value: serde_json::Value) -> Self { + self.default_value = Some(value); + self + } + + pub fn description(mut self, value: String) -> Self { + self.description = Some(value); + self + } + + pub fn label(mut self, value: String) -> Self { + self.label = Some(value); + self + } + + pub fn value(mut self, value: serde_json::Value) -> Self { + self.value = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for OutputSchemaParameters { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct OutputSchemaParametersVisitor; + impl<'a> Visitor<'a> for OutputSchemaParametersVisitor { + type Value = OutputSchemaParameters; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut default_value: Option = None; + let mut description: Option = None; + let mut label: Option = None; + let mut name: Option = None; + let mut type_: Option = None; + let mut value: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "defaultValue" => { + if v.is_null() { + continue; + } + default_value = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "description" => { + if v.is_null() { + continue; + } + description = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "label" => { + if v.is_null() { + continue; + } + label = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "name" => { + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::OutputSchemaParametersType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + "value" => { + if v.is_null() { + continue; + } + value = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let name = name.ok_or_else(|| M::Error::missing_field("name"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = OutputSchemaParameters { + default_value, + description, + label, + name, + type_, + value, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(OutputSchemaParametersVisitor) + } +} diff --git a/src/datadogV2/model/model_output_schema_parameters_type.rs b/src/datadogV2/model/model_output_schema_parameters_type.rs new file mode 100644 index 000000000..e29e83024 --- /dev/null +++ b/src/datadogV2/model/model_output_schema_parameters_type.rs @@ -0,0 +1,69 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum OutputSchemaParametersType { + STRING, + NUMBER, + BOOLEAN, + OBJECT, + ARRAY_STRING, + ARRAY_NUMBER, + ARRAY_BOOLEAN, + ARRAY_OBJECT, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for OutputSchemaParametersType { + fn to_string(&self) -> String { + match self { + Self::STRING => String::from("STRING"), + Self::NUMBER => String::from("NUMBER"), + Self::BOOLEAN => String::from("BOOLEAN"), + Self::OBJECT => String::from("OBJECT"), + Self::ARRAY_STRING => String::from("ARRAY_STRING"), + Self::ARRAY_NUMBER => String::from("ARRAY_NUMBER"), + Self::ARRAY_BOOLEAN => String::from("ARRAY_BOOLEAN"), + Self::ARRAY_OBJECT => String::from("ARRAY_OBJECT"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for OutputSchemaParametersType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for OutputSchemaParametersType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "STRING" => Self::STRING, + "NUMBER" => Self::NUMBER, + "BOOLEAN" => Self::BOOLEAN, + "OBJECT" => Self::OBJECT, + "ARRAY_STRING" => Self::ARRAY_STRING, + "ARRAY_NUMBER" => Self::ARRAY_NUMBER, + "ARRAY_BOOLEAN" => Self::ARRAY_BOOLEAN, + "ARRAY_OBJECT" => Self::ARRAY_OBJECT, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_parameter.rs b/src/datadogV2/model/model_parameter.rs new file mode 100644 index 000000000..6de31189a --- /dev/null +++ b/src/datadogV2/model/model_parameter.rs @@ -0,0 +1,102 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `Parameter` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct Parameter { + /// The `Parameter` `name`. + #[serde(rename = "name")] + pub name: String, + /// The `Parameter` `value`. + #[serde(rename = "value")] + pub value: serde_json::Value, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl Parameter { + pub fn new(name: String, value: serde_json::Value) -> Parameter { + Parameter { + name, + value, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for Parameter { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ParameterVisitor; + impl<'a> Visitor<'a> for ParameterVisitor { + type Value = Parameter; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut name: Option = None; + let mut value: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "name" => { + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "value" => { + value = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let name = name.ok_or_else(|| M::Error::missing_field("name"))?; + let value = value.ok_or_else(|| M::Error::missing_field("value"))?; + + let content = Parameter { + name, + value, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ParameterVisitor) + } +} diff --git a/src/datadogV2/model/model_readiness_gate.rs b/src/datadogV2/model/model_readiness_gate.rs new file mode 100644 index 000000000..2e3c78270 --- /dev/null +++ b/src/datadogV2/model/model_readiness_gate.rs @@ -0,0 +1,106 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Used to merge multiple branches into a single branch. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ReadinessGate { + /// The definition of `ReadinessGateThresholdType` object. + #[serde(rename = "thresholdType")] + pub threshold_type: crate::datadogV2::model::ReadinessGateThresholdType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ReadinessGate { + pub fn new( + threshold_type: crate::datadogV2::model::ReadinessGateThresholdType, + ) -> ReadinessGate { + ReadinessGate { + threshold_type, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ReadinessGate { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ReadinessGateVisitor; + impl<'a> Visitor<'a> for ReadinessGateVisitor { + type Value = ReadinessGate; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut threshold_type: Option< + crate::datadogV2::model::ReadinessGateThresholdType, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "thresholdType" => { + threshold_type = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _threshold_type) = threshold_type { + match _threshold_type { + crate::datadogV2::model::ReadinessGateThresholdType::UnparsedObject(_threshold_type) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let threshold_type = + threshold_type.ok_or_else(|| M::Error::missing_field("threshold_type"))?; + + let content = ReadinessGate { + threshold_type, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ReadinessGateVisitor) + } +} diff --git a/src/datadogV2/model/model_readiness_gate_threshold_type.rs b/src/datadogV2/model/model_readiness_gate_threshold_type.rs new file mode 100644 index 000000000..b496f4fcc --- /dev/null +++ b/src/datadogV2/model/model_readiness_gate_threshold_type.rs @@ -0,0 +1,51 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ReadinessGateThresholdType { + ANY, + ALL, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for ReadinessGateThresholdType { + fn to_string(&self) -> String { + match self { + Self::ANY => String::from("ANY"), + Self::ALL => String::from("ALL"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for ReadinessGateThresholdType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for ReadinessGateThresholdType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "ANY" => Self::ANY, + "ALL" => Self::ALL, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_retry_strategy.rs b/src/datadogV2/model/model_retry_strategy.rs new file mode 100644 index 000000000..3d7707483 --- /dev/null +++ b/src/datadogV2/model/model_retry_strategy.rs @@ -0,0 +1,119 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `RetryStrategy` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct RetryStrategy { + /// The definition of `RetryStrategyKind` object. + #[serde(rename = "kind")] + pub kind: crate::datadogV2::model::RetryStrategyKind, + /// The definition of `RetryStrategyLinear` object. + #[serde(rename = "linear")] + pub linear: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl RetryStrategy { + pub fn new(kind: crate::datadogV2::model::RetryStrategyKind) -> RetryStrategy { + RetryStrategy { + kind, + linear: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn linear(mut self, value: crate::datadogV2::model::RetryStrategyLinear) -> Self { + self.linear = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for RetryStrategy { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct RetryStrategyVisitor; + impl<'a> Visitor<'a> for RetryStrategyVisitor { + type Value = RetryStrategy; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut kind: Option = None; + let mut linear: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "kind" => { + kind = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _kind) = kind { + match _kind { + crate::datadogV2::model::RetryStrategyKind::UnparsedObject( + _kind, + ) => { + _unparsed = true; + } + _ => {} + } + } + } + "linear" => { + if v.is_null() { + continue; + } + linear = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let kind = kind.ok_or_else(|| M::Error::missing_field("kind"))?; + + let content = RetryStrategy { + kind, + linear, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(RetryStrategyVisitor) + } +} diff --git a/src/datadogV2/model/model_retry_strategy_kind.rs b/src/datadogV2/model/model_retry_strategy_kind.rs new file mode 100644 index 000000000..14227536e --- /dev/null +++ b/src/datadogV2/model/model_retry_strategy_kind.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum RetryStrategyKind { + RETRY_STRATEGY_LINEAR, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for RetryStrategyKind { + fn to_string(&self) -> String { + match self { + Self::RETRY_STRATEGY_LINEAR => String::from("RETRY_STRATEGY_LINEAR"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for RetryStrategyKind { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for RetryStrategyKind { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "RETRY_STRATEGY_LINEAR" => Self::RETRY_STRATEGY_LINEAR, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_retry_strategy_linear.rs b/src/datadogV2/model/model_retry_strategy_linear.rs new file mode 100644 index 000000000..87322c040 --- /dev/null +++ b/src/datadogV2/model/model_retry_strategy_linear.rs @@ -0,0 +1,104 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `RetryStrategyLinear` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct RetryStrategyLinear { + /// The `RetryStrategyLinear` `interval`. The expected format is the number of seconds ending with an s. For example, 1 day is 86400s + #[serde(rename = "interval")] + pub interval: String, + /// The `RetryStrategyLinear` `maxRetries`. + #[serde(rename = "maxRetries")] + pub max_retries: f64, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl RetryStrategyLinear { + pub fn new(interval: String, max_retries: f64) -> RetryStrategyLinear { + RetryStrategyLinear { + interval, + max_retries, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for RetryStrategyLinear { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct RetryStrategyLinearVisitor; + impl<'a> Visitor<'a> for RetryStrategyLinearVisitor { + type Value = RetryStrategyLinear; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut interval: Option = None; + let mut max_retries: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "interval" => { + interval = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "maxRetries" => { + max_retries = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let interval = interval.ok_or_else(|| M::Error::missing_field("interval"))?; + let max_retries = + max_retries.ok_or_else(|| M::Error::missing_field("max_retries"))?; + + let content = RetryStrategyLinear { + interval, + max_retries, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(RetryStrategyLinearVisitor) + } +} diff --git a/src/datadogV2/model/model_schedule_trigger.rs b/src/datadogV2/model/model_schedule_trigger.rs new file mode 100644 index 000000000..27670cc13 --- /dev/null +++ b/src/datadogV2/model/model_schedule_trigger.rs @@ -0,0 +1,94 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA a Schedule. The workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ScheduleTrigger { + /// Recurrence rule expression for scheduling. + #[serde(rename = "rruleExpression")] + pub rrule_expression: String, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ScheduleTrigger { + pub fn new(rrule_expression: String) -> ScheduleTrigger { + ScheduleTrigger { + rrule_expression, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ScheduleTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ScheduleTriggerVisitor; + impl<'a> Visitor<'a> for ScheduleTriggerVisitor { + type Value = ScheduleTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rrule_expression: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rruleExpression" => { + rrule_expression = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let rrule_expression = + rrule_expression.ok_or_else(|| M::Error::missing_field("rrule_expression"))?; + + let content = ScheduleTrigger { + rrule_expression, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ScheduleTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_schedule_trigger_wrapper.rs b/src/datadogV2/model/model_schedule_trigger_wrapper.rs new file mode 100644 index 000000000..6110ea0a2 --- /dev/null +++ b/src/datadogV2/model/model_schedule_trigger_wrapper.rs @@ -0,0 +1,114 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Schedule-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ScheduleTriggerWrapper { + /// Trigger a workflow VIA a Schedule. The workflow must be published. + #[serde(rename = "scheduleTrigger")] + pub schedule_trigger: crate::datadogV2::model::ScheduleTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ScheduleTriggerWrapper { + pub fn new( + schedule_trigger: crate::datadogV2::model::ScheduleTrigger, + ) -> ScheduleTriggerWrapper { + ScheduleTriggerWrapper { + schedule_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for ScheduleTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ScheduleTriggerWrapperVisitor; + impl<'a> Visitor<'a> for ScheduleTriggerWrapperVisitor { + type Value = ScheduleTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut schedule_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "scheduleTrigger" => { + schedule_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let schedule_trigger = + schedule_trigger.ok_or_else(|| M::Error::missing_field("schedule_trigger"))?; + + let content = ScheduleTriggerWrapper { + schedule_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ScheduleTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_security_trigger.rs b/src/datadogV2/model/model_security_trigger.rs new file mode 100644 index 000000000..f42d005b3 --- /dev/null +++ b/src/datadogV2/model/model_security_trigger.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Trigger a workflow VIA a Security Signal or Finding. For automatic triggering a handle must be configured and the workflow must be published. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SecurityTrigger { + /// Defines a rate limit for a trigger. + #[serde(rename = "rateLimit")] + pub rate_limit: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl SecurityTrigger { + pub fn new() -> SecurityTrigger { + SecurityTrigger { + rate_limit: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn rate_limit(mut self, value: crate::datadogV2::model::TriggerRateLimit) -> Self { + self.rate_limit = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for SecurityTrigger { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for SecurityTrigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SecurityTriggerVisitor; + impl<'a> Visitor<'a> for SecurityTriggerVisitor { + type Value = SecurityTrigger; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut rate_limit: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "rateLimit" => { + if v.is_null() { + continue; + } + rate_limit = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = SecurityTrigger { + rate_limit, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(SecurityTriggerVisitor) + } +} diff --git a/src/datadogV2/model/model_security_trigger_wrapper.rs b/src/datadogV2/model/model_security_trigger_wrapper.rs new file mode 100644 index 000000000..130caf68e --- /dev/null +++ b/src/datadogV2/model/model_security_trigger_wrapper.rs @@ -0,0 +1,114 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Security-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SecurityTriggerWrapper { + /// Trigger a workflow VIA a Security Signal or Finding. For automatic triggering a handle must be configured and the workflow must be published. + #[serde(rename = "securityTrigger")] + pub security_trigger: crate::datadogV2::model::SecurityTrigger, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl SecurityTriggerWrapper { + pub fn new( + security_trigger: crate::datadogV2::model::SecurityTrigger, + ) -> SecurityTriggerWrapper { + SecurityTriggerWrapper { + security_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for SecurityTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SecurityTriggerWrapperVisitor; + impl<'a> Visitor<'a> for SecurityTriggerWrapperVisitor { + type Value = SecurityTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut security_trigger: Option = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "securityTrigger" => { + security_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let security_trigger = + security_trigger.ok_or_else(|| M::Error::missing_field("security_trigger"))?; + + let content = SecurityTriggerWrapper { + security_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(SecurityTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_slack_trigger_wrapper.rs b/src/datadogV2/model/model_slack_trigger_wrapper.rs new file mode 100644 index 000000000..66d909356 --- /dev/null +++ b/src/datadogV2/model/model_slack_trigger_wrapper.rs @@ -0,0 +1,116 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Slack-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SlackTriggerWrapper { + /// Trigger a workflow VIA Slack. The workflow must be published. + #[serde(rename = "slackTrigger")] + pub slack_trigger: std::collections::BTreeMap, + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl SlackTriggerWrapper { + pub fn new( + slack_trigger: std::collections::BTreeMap, + ) -> SlackTriggerWrapper { + SlackTriggerWrapper { + slack_trigger, + start_step_names: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for SlackTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SlackTriggerWrapperVisitor; + impl<'a> Visitor<'a> for SlackTriggerWrapperVisitor { + type Value = SlackTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut slack_trigger: Option< + std::collections::BTreeMap, + > = None; + let mut start_step_names: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "slackTrigger" => { + slack_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let slack_trigger = + slack_trigger.ok_or_else(|| M::Error::missing_field("slack_trigger"))?; + + let content = SlackTriggerWrapper { + slack_trigger, + start_step_names, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(SlackTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_spec.rs b/src/datadogV2/model/model_spec.rs new file mode 100644 index 000000000..9d942585e --- /dev/null +++ b/src/datadogV2/model/model_spec.rs @@ -0,0 +1,211 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The spec defines what the workflow does. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct Spec { + /// A list of annotations used in the workflow. These are like sticky notes for your workflow! + #[serde(rename = "annotations")] + pub annotations: Option>, + /// A list of connections or connection groups used in the workflow. + #[serde(rename = "connectionEnvs")] + pub connection_envs: Option>, + /// Unique identifier used to trigger workflows automatically in Datadog. + #[serde(rename = "handle")] + pub handle: Option, + /// A list of input parameters for the workflow. These can be used as dynamic runtime values in your workflow. + #[serde(rename = "inputSchema")] + pub input_schema: Option, + /// A list of output parameters for the workflow. + #[serde(rename = "outputSchema")] + pub output_schema: Option, + /// A `Step` is a sub-component of a workflow. Each `Step` performs an action. + #[serde(rename = "steps")] + pub steps: Option>, + /// The list of triggers that activate this workflow. At least one trigger is required, and each trigger type may appear at most once. + #[serde(rename = "triggers")] + pub triggers: Option>, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl Spec { + pub fn new() -> Spec { + Spec { + annotations: None, + connection_envs: None, + handle: None, + input_schema: None, + output_schema: None, + steps: None, + triggers: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn annotations(mut self, value: Vec) -> Self { + self.annotations = Some(value); + self + } + + pub fn connection_envs(mut self, value: Vec) -> Self { + self.connection_envs = Some(value); + self + } + + pub fn handle(mut self, value: String) -> Self { + self.handle = Some(value); + self + } + + pub fn input_schema(mut self, value: crate::datadogV2::model::InputSchema) -> Self { + self.input_schema = Some(value); + self + } + + pub fn output_schema(mut self, value: crate::datadogV2::model::OutputSchema) -> Self { + self.output_schema = Some(value); + self + } + + pub fn steps(mut self, value: Vec) -> Self { + self.steps = Some(value); + self + } + + pub fn triggers(mut self, value: Vec) -> Self { + self.triggers = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for Spec { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for Spec { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SpecVisitor; + impl<'a> Visitor<'a> for SpecVisitor { + type Value = Spec; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut annotations: Option> = None; + let mut connection_envs: Option> = None; + let mut handle: Option = None; + let mut input_schema: Option = None; + let mut output_schema: Option = None; + let mut steps: Option> = None; + let mut triggers: Option> = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "annotations" => { + if v.is_null() { + continue; + } + annotations = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "connectionEnvs" => { + if v.is_null() { + continue; + } + connection_envs = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "handle" => { + if v.is_null() { + continue; + } + handle = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "inputSchema" => { + if v.is_null() { + continue; + } + input_schema = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "outputSchema" => { + if v.is_null() { + continue; + } + output_schema = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "steps" => { + if v.is_null() { + continue; + } + steps = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "triggers" => { + if v.is_null() { + continue; + } + triggers = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = Spec { + annotations, + connection_envs, + handle, + input_schema, + output_schema, + steps, + triggers, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(SpecVisitor) + } +} diff --git a/src/datadogV2/model/model_step.rs b/src/datadogV2/model/model_step.rs new file mode 100644 index 000000000..573833b23 --- /dev/null +++ b/src/datadogV2/model/model_step.rs @@ -0,0 +1,226 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A Step is a sub-component of a workflow. Each Step performs an action. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct Step { + /// The unique identifier of an action. + #[serde(rename = "actionId")] + pub action_id: String, + /// Used to create conditions before running subsequent actions. + #[serde(rename = "completionGate")] + pub completion_gate: Option, + /// The unique identifier of a connection defined in the spec. + #[serde(rename = "connectionLabel")] + pub connection_label: Option, + /// The definition of `StepDisplay` object. + #[serde(rename = "display")] + pub display: Option, + /// The `Step` `errorHandlers`. + #[serde(rename = "errorHandlers")] + pub error_handlers: Option>, + /// Name of the step. + #[serde(rename = "name")] + pub name: String, + /// A list of subsequent actions to run. + #[serde(rename = "outboundEdges")] + pub outbound_edges: Option>, + /// A list of inputs for an action. + #[serde(rename = "parameters")] + pub parameters: Option>, + /// Used to merge multiple branches into a single branch. + #[serde(rename = "readinessGate")] + pub readiness_gate: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl Step { + pub fn new(action_id: String, name: String) -> Step { + Step { + action_id, + completion_gate: None, + connection_label: None, + display: None, + error_handlers: None, + name, + outbound_edges: None, + parameters: None, + readiness_gate: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn completion_gate(mut self, value: crate::datadogV2::model::CompletionGate) -> Self { + self.completion_gate = Some(value); + self + } + + pub fn connection_label(mut self, value: String) -> Self { + self.connection_label = Some(value); + self + } + + pub fn display(mut self, value: crate::datadogV2::model::StepDisplay) -> Self { + self.display = Some(value); + self + } + + pub fn error_handlers(mut self, value: Vec) -> Self { + self.error_handlers = Some(value); + self + } + + pub fn outbound_edges(mut self, value: Vec) -> Self { + self.outbound_edges = Some(value); + self + } + + pub fn parameters(mut self, value: Vec) -> Self { + self.parameters = Some(value); + self + } + + pub fn readiness_gate(mut self, value: crate::datadogV2::model::ReadinessGate) -> Self { + self.readiness_gate = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for Step { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct StepVisitor; + impl<'a> Visitor<'a> for StepVisitor { + type Value = Step; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut action_id: Option = None; + let mut completion_gate: Option = None; + let mut connection_label: Option = None; + let mut display: Option = None; + let mut error_handlers: Option> = None; + let mut name: Option = None; + let mut outbound_edges: Option> = None; + let mut parameters: Option> = None; + let mut readiness_gate: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "actionId" => { + action_id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "completionGate" => { + if v.is_null() { + continue; + } + completion_gate = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "connectionLabel" => { + if v.is_null() { + continue; + } + connection_label = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "display" => { + if v.is_null() { + continue; + } + display = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "errorHandlers" => { + if v.is_null() { + continue; + } + error_handlers = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "name" => { + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "outboundEdges" => { + if v.is_null() { + continue; + } + outbound_edges = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "parameters" => { + if v.is_null() { + continue; + } + parameters = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "readinessGate" => { + if v.is_null() { + continue; + } + readiness_gate = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let action_id = action_id.ok_or_else(|| M::Error::missing_field("action_id"))?; + let name = name.ok_or_else(|| M::Error::missing_field("name"))?; + + let content = Step { + action_id, + completion_gate, + connection_label, + display, + error_handlers, + name, + outbound_edges, + parameters, + readiness_gate, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(StepVisitor) + } +} diff --git a/src/datadogV2/model/model_step_display.rs b/src/datadogV2/model/model_step_display.rs new file mode 100644 index 000000000..879f674b2 --- /dev/null +++ b/src/datadogV2/model/model_step_display.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `StepDisplay` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct StepDisplay { + /// The definition of `StepDisplayBounds` object. + #[serde(rename = "bounds")] + pub bounds: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl StepDisplay { + pub fn new() -> StepDisplay { + StepDisplay { + bounds: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn bounds(mut self, value: crate::datadogV2::model::StepDisplayBounds) -> Self { + self.bounds = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for StepDisplay { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for StepDisplay { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct StepDisplayVisitor; + impl<'a> Visitor<'a> for StepDisplayVisitor { + type Value = StepDisplay; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut bounds: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "bounds" => { + if v.is_null() { + continue; + } + bounds = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = StepDisplay { + bounds, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(StepDisplayVisitor) + } +} diff --git a/src/datadogV2/model/model_step_display_bounds.rs b/src/datadogV2/model/model_step_display_bounds.rs new file mode 100644 index 000000000..cdca5a3f9 --- /dev/null +++ b/src/datadogV2/model/model_step_display_bounds.rs @@ -0,0 +1,122 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `StepDisplayBounds` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct StepDisplayBounds { + /// The `bounds` `x`. + #[serde(rename = "x")] + pub x: Option, + /// The `bounds` `y`. + #[serde(rename = "y")] + pub y: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl StepDisplayBounds { + pub fn new() -> StepDisplayBounds { + StepDisplayBounds { + x: None, + y: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn x(mut self, value: f64) -> Self { + self.x = Some(value); + self + } + + pub fn y(mut self, value: f64) -> Self { + self.y = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for StepDisplayBounds { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for StepDisplayBounds { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct StepDisplayBoundsVisitor; + impl<'a> Visitor<'a> for StepDisplayBoundsVisitor { + type Value = StepDisplayBounds; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut x: Option = None; + let mut y: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "x" => { + if v.is_null() { + continue; + } + x = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "y" => { + if v.is_null() { + continue; + } + y = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = StepDisplayBounds { + x, + y, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(StepDisplayBoundsVisitor) + } +} diff --git a/src/datadogV2/model/model_trigger.rs b/src/datadogV2/model/model_trigger.rs new file mode 100644 index 000000000..401f3a018 --- /dev/null +++ b/src/datadogV2/model/model_trigger.rs @@ -0,0 +1,124 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::{Deserialize, Deserializer, Serialize}; + +/// One of the triggers that can start the execution of a workflow. +#[non_exhaustive] +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(untagged)] +pub enum Trigger { + APITriggerWrapper(Box), + AppTriggerWrapper(Box), + CaseTriggerWrapper(Box), + ChangeEventTriggerWrapper(Box), + DashboardTriggerWrapper(Box), + GithubWebhookTriggerWrapper(Box), + IncidentTriggerWrapper(Box), + MonitorTriggerWrapper(Box), + ScheduleTriggerWrapper(Box), + SecurityTriggerWrapper(Box), + SlackTriggerWrapper(Box), + WorkflowTriggerWrapper(Box), + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl<'de> Deserialize<'de> for Trigger { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let value: serde_json::Value = Deserialize::deserialize(deserializer)?; + if let Ok(_v) = + serde_json::from_value::>(value.clone()) + { + if !_v._unparsed { + return Ok(Trigger::APITriggerWrapper(_v)); + } + } + if let Ok(_v) = + serde_json::from_value::>(value.clone()) + { + if !_v._unparsed { + return Ok(Trigger::AppTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::CaseTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok(Trigger::ChangeEventTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok(Trigger::DashboardTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok(Trigger::GithubWebhookTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::IncidentTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::MonitorTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::ScheduleTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::SecurityTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::SlackTriggerWrapper(_v)); + } + } + if let Ok(_v) = serde_json::from_value::>( + value.clone(), + ) { + if !_v._unparsed { + return Ok(Trigger::WorkflowTriggerWrapper(_v)); + } + } + + return Ok(Trigger::UnparsedObject(crate::datadog::UnparsedObject { + value, + })); + } +} diff --git a/src/datadogV2/model/model_trigger_rate_limit.rs b/src/datadogV2/model/model_trigger_rate_limit.rs new file mode 100644 index 000000000..6601e86eb --- /dev/null +++ b/src/datadogV2/model/model_trigger_rate_limit.rs @@ -0,0 +1,122 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Defines a rate limit for a trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct TriggerRateLimit { + /// The `TriggerRateLimit` `count`. + #[serde(rename = "count")] + pub count: Option, + /// The `TriggerRateLimit` `interval`. The expected format is the number of seconds ending with an s. For example, 1 day is 86400s + #[serde(rename = "interval")] + pub interval: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl TriggerRateLimit { + pub fn new() -> TriggerRateLimit { + TriggerRateLimit { + count: None, + interval: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn count(mut self, value: i64) -> Self { + self.count = Some(value); + self + } + + pub fn interval(mut self, value: String) -> Self { + self.interval = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for TriggerRateLimit { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for TriggerRateLimit { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct TriggerRateLimitVisitor; + impl<'a> Visitor<'a> for TriggerRateLimitVisitor { + type Value = TriggerRateLimit; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut count: Option = None; + let mut interval: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "count" => { + if v.is_null() { + continue; + } + count = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "interval" => { + if v.is_null() { + continue; + } + interval = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = TriggerRateLimit { + count, + interval, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(TriggerRateLimitVisitor) + } +} diff --git a/src/datadogV2/model/model_update_workflow_request.rs b/src/datadogV2/model/model_update_workflow_request.rs new file mode 100644 index 000000000..18d305352 --- /dev/null +++ b/src/datadogV2/model/model_update_workflow_request.rs @@ -0,0 +1,92 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// A request object for updating an existing workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct UpdateWorkflowRequest { + /// Data related to the workflow being updated. + #[serde(rename = "data")] + pub data: crate::datadogV2::model::WorkflowDataUpdate, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl UpdateWorkflowRequest { + pub fn new(data: crate::datadogV2::model::WorkflowDataUpdate) -> UpdateWorkflowRequest { + UpdateWorkflowRequest { + data, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for UpdateWorkflowRequest { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct UpdateWorkflowRequestVisitor; + impl<'a> Visitor<'a> for UpdateWorkflowRequestVisitor { + type Value = UpdateWorkflowRequest; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let data = data.ok_or_else(|| M::Error::missing_field("data"))?; + + let content = UpdateWorkflowRequest { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(UpdateWorkflowRequestVisitor) + } +} diff --git a/src/datadogV2/model/model_update_workflow_response.rs b/src/datadogV2/model/model_update_workflow_response.rs new file mode 100644 index 000000000..a9453b0e9 --- /dev/null +++ b/src/datadogV2/model/model_update_workflow_response.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The response object after updating a workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct UpdateWorkflowResponse { + /// Data related to the workflow being updated. + #[serde(rename = "data")] + pub data: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl UpdateWorkflowResponse { + pub fn new() -> UpdateWorkflowResponse { + UpdateWorkflowResponse { + data: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn data(mut self, value: crate::datadogV2::model::WorkflowDataUpdate) -> Self { + self.data = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for UpdateWorkflowResponse { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for UpdateWorkflowResponse { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct UpdateWorkflowResponseVisitor; + impl<'a> Visitor<'a> for UpdateWorkflowResponseVisitor { + type Value = UpdateWorkflowResponse; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + if v.is_null() { + continue; + } + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = UpdateWorkflowResponse { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(UpdateWorkflowResponseVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_data.rs b/src/datadogV2/model/model_workflow_data.rs new file mode 100644 index 000000000..e6558a327 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data.rs @@ -0,0 +1,154 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Data related to the workflow. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowData { + /// The definition of `WorkflowDataAttributes` object. + #[serde(rename = "attributes")] + pub attributes: crate::datadogV2::model::WorkflowDataAttributes, + /// The workflow identifier + #[serde(rename = "id")] + pub id: Option, + /// The definition of `WorkflowDataRelationships` object. + #[serde(rename = "relationships")] + pub relationships: Option, + /// The definition of `WorkflowDataType` object. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::WorkflowDataType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowData { + pub fn new( + attributes: crate::datadogV2::model::WorkflowDataAttributes, + type_: crate::datadogV2::model::WorkflowDataType, + ) -> WorkflowData { + WorkflowData { + attributes, + id: None, + relationships: None, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn id(mut self, value: String) -> Self { + self.id = Some(value); + self + } + + pub fn relationships( + mut self, + value: crate::datadogV2::model::WorkflowDataRelationships, + ) -> Self { + self.relationships = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for WorkflowData { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowDataVisitor; + impl<'a> Visitor<'a> for WorkflowDataVisitor { + type Value = WorkflowData; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut attributes: Option = None; + let mut id: Option = None; + let mut relationships: Option = + None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "attributes" => { + attributes = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "id" => { + if v.is_null() { + continue; + } + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "relationships" => { + if v.is_null() { + continue; + } + relationships = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::WorkflowDataType::UnparsedObject( + _type_, + ) => { + _unparsed = true; + } + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let attributes = attributes.ok_or_else(|| M::Error::missing_field("attributes"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = WorkflowData { + attributes, + id, + relationships, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowDataVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_data_attributes.rs b/src/datadogV2/model/model_workflow_data_attributes.rs new file mode 100644 index 000000000..b69250552 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data_attributes.rs @@ -0,0 +1,206 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `WorkflowDataAttributes` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowDataAttributes { + /// When the workflow was created. + #[serde(rename = "createdAt")] + pub created_at: Option>, + /// Description of the workflow. + #[serde(rename = "description")] + pub description: Option, + /// Name of the workflow. + #[serde(rename = "name")] + pub name: String, + /// Set the workflow to published or unpublished. Workflows in an unpublished state will only be executable via manual runs. Automatic triggers such as Schedule will not execute the workflow until it is published. + #[serde(rename = "published")] + pub published: Option, + /// The spec defines what the workflow does. + #[serde(rename = "spec")] + pub spec: crate::datadogV2::model::Spec, + /// Tags of the workflow. + #[serde(rename = "tags")] + pub tags: Option>, + /// When the workflow was last updated. + #[serde(rename = "updatedAt")] + pub updated_at: Option>, + /// If a Webhook trigger is defined on this workflow, a webhookSecret is required and should be provided here. + #[serde(rename = "webhookSecret")] + pub webhook_secret: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowDataAttributes { + pub fn new(name: String, spec: crate::datadogV2::model::Spec) -> WorkflowDataAttributes { + WorkflowDataAttributes { + created_at: None, + description: None, + name, + published: None, + spec, + tags: None, + updated_at: None, + webhook_secret: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn created_at(mut self, value: chrono::DateTime) -> Self { + self.created_at = Some(value); + self + } + + pub fn description(mut self, value: String) -> Self { + self.description = Some(value); + self + } + + pub fn published(mut self, value: bool) -> Self { + self.published = Some(value); + self + } + + pub fn tags(mut self, value: Vec) -> Self { + self.tags = Some(value); + self + } + + pub fn updated_at(mut self, value: chrono::DateTime) -> Self { + self.updated_at = Some(value); + self + } + + pub fn webhook_secret(mut self, value: String) -> Self { + self.webhook_secret = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for WorkflowDataAttributes { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowDataAttributesVisitor; + impl<'a> Visitor<'a> for WorkflowDataAttributesVisitor { + type Value = WorkflowDataAttributes; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut created_at: Option> = None; + let mut description: Option = None; + let mut name: Option = None; + let mut published: Option = None; + let mut spec: Option = None; + let mut tags: Option> = None; + let mut updated_at: Option> = None; + let mut webhook_secret: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "createdAt" => { + if v.is_null() { + continue; + } + created_at = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "description" => { + if v.is_null() { + continue; + } + description = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "name" => { + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "published" => { + if v.is_null() { + continue; + } + published = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "spec" => { + spec = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "tags" => { + if v.is_null() { + continue; + } + tags = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "updatedAt" => { + if v.is_null() { + continue; + } + updated_at = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "webhookSecret" => { + if v.is_null() { + continue; + } + webhook_secret = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let name = name.ok_or_else(|| M::Error::missing_field("name"))?; + let spec = spec.ok_or_else(|| M::Error::missing_field("spec"))?; + + let content = WorkflowDataAttributes { + created_at, + description, + name, + published, + spec, + tags, + updated_at, + webhook_secret, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowDataAttributesVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_data_relationships.rs b/src/datadogV2/model/model_workflow_data_relationships.rs new file mode 100644 index 000000000..77f233ea9 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data_relationships.rs @@ -0,0 +1,122 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `WorkflowDataRelationships` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowDataRelationships { + /// The definition of `WorkflowUserRelationship` object. + #[serde(rename = "creator")] + pub creator: Option, + /// The definition of `WorkflowUserRelationship` object. + #[serde(rename = "owner")] + pub owner: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowDataRelationships { + pub fn new() -> WorkflowDataRelationships { + WorkflowDataRelationships { + creator: None, + owner: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn creator(mut self, value: crate::datadogV2::model::WorkflowUserRelationship) -> Self { + self.creator = Some(value); + self + } + + pub fn owner(mut self, value: crate::datadogV2::model::WorkflowUserRelationship) -> Self { + self.owner = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for WorkflowDataRelationships { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for WorkflowDataRelationships { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowDataRelationshipsVisitor; + impl<'a> Visitor<'a> for WorkflowDataRelationshipsVisitor { + type Value = WorkflowDataRelationships; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut creator: Option = None; + let mut owner: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "creator" => { + if v.is_null() { + continue; + } + creator = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "owner" => { + if v.is_null() { + continue; + } + owner = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = WorkflowDataRelationships { + creator, + owner, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowDataRelationshipsVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_data_type.rs b/src/datadogV2/model/model_workflow_data_type.rs new file mode 100644 index 000000000..912d20e97 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data_type.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum WorkflowDataType { + WORKFLOWS, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for WorkflowDataType { + fn to_string(&self) -> String { + match self { + Self::WORKFLOWS => String::from("workflows"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for WorkflowDataType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for WorkflowDataType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "workflows" => Self::WORKFLOWS, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_workflow_data_update.rs b/src/datadogV2/model/model_workflow_data_update.rs new file mode 100644 index 000000000..52d3b73a0 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data_update.rs @@ -0,0 +1,155 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Data related to the workflow being updated. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowDataUpdate { + /// The definition of `WorkflowDataUpdateAttributes` object. + #[serde(rename = "attributes")] + pub attributes: crate::datadogV2::model::WorkflowDataUpdateAttributes, + /// The workflow identifier + #[serde(rename = "id")] + pub id: Option, + /// The definition of `WorkflowDataRelationships` object. + #[serde(rename = "relationships")] + pub relationships: Option, + /// The definition of `WorkflowDataType` object. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::WorkflowDataType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowDataUpdate { + pub fn new( + attributes: crate::datadogV2::model::WorkflowDataUpdateAttributes, + type_: crate::datadogV2::model::WorkflowDataType, + ) -> WorkflowDataUpdate { + WorkflowDataUpdate { + attributes, + id: None, + relationships: None, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn id(mut self, value: String) -> Self { + self.id = Some(value); + self + } + + pub fn relationships( + mut self, + value: crate::datadogV2::model::WorkflowDataRelationships, + ) -> Self { + self.relationships = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for WorkflowDataUpdate { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowDataUpdateVisitor; + impl<'a> Visitor<'a> for WorkflowDataUpdateVisitor { + type Value = WorkflowDataUpdate; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut attributes: Option = + None; + let mut id: Option = None; + let mut relationships: Option = + None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "attributes" => { + attributes = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "id" => { + if v.is_null() { + continue; + } + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "relationships" => { + if v.is_null() { + continue; + } + relationships = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::WorkflowDataType::UnparsedObject( + _type_, + ) => { + _unparsed = true; + } + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let attributes = attributes.ok_or_else(|| M::Error::missing_field("attributes"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = WorkflowDataUpdate { + attributes, + id, + relationships, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowDataUpdateVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_data_update_attributes.rs b/src/datadogV2/model/model_workflow_data_update_attributes.rs new file mode 100644 index 000000000..ae8ce0164 --- /dev/null +++ b/src/datadogV2/model/model_workflow_data_update_attributes.rs @@ -0,0 +1,226 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `WorkflowDataUpdateAttributes` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowDataUpdateAttributes { + /// When the workflow was created. + #[serde(rename = "createdAt")] + pub created_at: Option>, + /// Description of the workflow. + #[serde(rename = "description")] + pub description: Option, + /// Name of the workflow. + #[serde(rename = "name")] + pub name: Option, + /// Set the workflow to published or unpublished. Workflows in an unpublished state will only be executable via manual runs. Automatic triggers such as Schedule will not execute the workflow until it is published. + #[serde(rename = "published")] + pub published: Option, + /// The spec defines what the workflow does. + #[serde(rename = "spec")] + pub spec: Option, + /// Tags of the workflow. + #[serde(rename = "tags")] + pub tags: Option>, + /// When the workflow was last updated. + #[serde(rename = "updatedAt")] + pub updated_at: Option>, + /// If a Webhook trigger is defined on this workflow, a webhookSecret is required and should be provided here. + #[serde(rename = "webhookSecret")] + pub webhook_secret: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowDataUpdateAttributes { + pub fn new() -> WorkflowDataUpdateAttributes { + WorkflowDataUpdateAttributes { + created_at: None, + description: None, + name: None, + published: None, + spec: None, + tags: None, + updated_at: None, + webhook_secret: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn created_at(mut self, value: chrono::DateTime) -> Self { + self.created_at = Some(value); + self + } + + pub fn description(mut self, value: String) -> Self { + self.description = Some(value); + self + } + + pub fn name(mut self, value: String) -> Self { + self.name = Some(value); + self + } + + pub fn published(mut self, value: bool) -> Self { + self.published = Some(value); + self + } + + pub fn spec(mut self, value: crate::datadogV2::model::Spec) -> Self { + self.spec = Some(value); + self + } + + pub fn tags(mut self, value: Vec) -> Self { + self.tags = Some(value); + self + } + + pub fn updated_at(mut self, value: chrono::DateTime) -> Self { + self.updated_at = Some(value); + self + } + + pub fn webhook_secret(mut self, value: String) -> Self { + self.webhook_secret = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for WorkflowDataUpdateAttributes { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for WorkflowDataUpdateAttributes { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowDataUpdateAttributesVisitor; + impl<'a> Visitor<'a> for WorkflowDataUpdateAttributesVisitor { + type Value = WorkflowDataUpdateAttributes; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut created_at: Option> = None; + let mut description: Option = None; + let mut name: Option = None; + let mut published: Option = None; + let mut spec: Option = None; + let mut tags: Option> = None; + let mut updated_at: Option> = None; + let mut webhook_secret: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "createdAt" => { + if v.is_null() { + continue; + } + created_at = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "description" => { + if v.is_null() { + continue; + } + description = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "name" => { + if v.is_null() { + continue; + } + name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "published" => { + if v.is_null() { + continue; + } + published = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "spec" => { + if v.is_null() { + continue; + } + spec = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "tags" => { + if v.is_null() { + continue; + } + tags = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "updatedAt" => { + if v.is_null() { + continue; + } + updated_at = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "webhookSecret" => { + if v.is_null() { + continue; + } + webhook_secret = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = WorkflowDataUpdateAttributes { + created_at, + description, + name, + published, + spec, + tags, + updated_at, + webhook_secret, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowDataUpdateAttributesVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_trigger_wrapper.rs b/src/datadogV2/model/model_workflow_trigger_wrapper.rs new file mode 100644 index 000000000..2cc5488d5 --- /dev/null +++ b/src/datadogV2/model/model_workflow_trigger_wrapper.rs @@ -0,0 +1,116 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Schema for a Workflow-based trigger. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowTriggerWrapper { + /// A list of steps that run first after a trigger fires. + #[serde(rename = "startStepNames")] + pub start_step_names: Option>, + /// Trigger a workflow VIA the Datadog UI. Only required if no other trigger exists. + #[serde(rename = "workflowTrigger")] + pub workflow_trigger: std::collections::BTreeMap, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowTriggerWrapper { + pub fn new( + workflow_trigger: std::collections::BTreeMap, + ) -> WorkflowTriggerWrapper { + WorkflowTriggerWrapper { + start_step_names: None, + workflow_trigger, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn start_step_names(mut self, value: Vec) -> Self { + self.start_step_names = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for WorkflowTriggerWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowTriggerWrapperVisitor; + impl<'a> Visitor<'a> for WorkflowTriggerWrapperVisitor { + type Value = WorkflowTriggerWrapper; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut start_step_names: Option> = None; + let mut workflow_trigger: Option< + std::collections::BTreeMap, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "startStepNames" => { + if v.is_null() { + continue; + } + start_step_names = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "workflowTrigger" => { + workflow_trigger = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let workflow_trigger = + workflow_trigger.ok_or_else(|| M::Error::missing_field("workflow_trigger"))?; + + let content = WorkflowTriggerWrapper { + start_step_names, + workflow_trigger, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowTriggerWrapperVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_user_relationship.rs b/src/datadogV2/model/model_workflow_user_relationship.rs new file mode 100644 index 000000000..a43c84d3f --- /dev/null +++ b/src/datadogV2/model/model_workflow_user_relationship.rs @@ -0,0 +1,105 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `WorkflowUserRelationship` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowUserRelationship { + /// The definition of `WorkflowUserRelationshipData` object. + #[serde(rename = "data")] + pub data: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowUserRelationship { + pub fn new() -> WorkflowUserRelationship { + WorkflowUserRelationship { + data: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn data(mut self, value: crate::datadogV2::model::WorkflowUserRelationshipData) -> Self { + self.data = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for WorkflowUserRelationship { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for WorkflowUserRelationship { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowUserRelationshipVisitor; + impl<'a> Visitor<'a> for WorkflowUserRelationshipVisitor { + type Value = WorkflowUserRelationship; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + if v.is_null() { + continue; + } + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = WorkflowUserRelationship { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowUserRelationshipVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_user_relationship_data.rs b/src/datadogV2/model/model_workflow_user_relationship_data.rs new file mode 100644 index 000000000..d603d49fc --- /dev/null +++ b/src/datadogV2/model/model_workflow_user_relationship_data.rs @@ -0,0 +1,113 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The definition of `WorkflowUserRelationshipData` object. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct WorkflowUserRelationshipData { + /// The user identifier + #[serde(rename = "id")] + pub id: String, + /// The definition of `WorkflowUserRelationshipType` object. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::WorkflowUserRelationshipType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl WorkflowUserRelationshipData { + pub fn new( + id: String, + type_: crate::datadogV2::model::WorkflowUserRelationshipType, + ) -> WorkflowUserRelationshipData { + WorkflowUserRelationshipData { + id, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for WorkflowUserRelationshipData { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WorkflowUserRelationshipDataVisitor; + impl<'a> Visitor<'a> for WorkflowUserRelationshipDataVisitor { + type Value = WorkflowUserRelationshipData; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut id: Option = None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "id" => { + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::WorkflowUserRelationshipType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let id = id.ok_or_else(|| M::Error::missing_field("id"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = WorkflowUserRelationshipData { + id, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(WorkflowUserRelationshipDataVisitor) + } +} diff --git a/src/datadogV2/model/model_workflow_user_relationship_type.rs b/src/datadogV2/model/model_workflow_user_relationship_type.rs new file mode 100644 index 000000000..5d3b7d2f0 --- /dev/null +++ b/src/datadogV2/model/model_workflow_user_relationship_type.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum WorkflowUserRelationshipType { + USERS, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for WorkflowUserRelationshipType { + fn to_string(&self) -> String { + match self { + Self::USERS => String::from("users"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for WorkflowUserRelationshipType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for WorkflowUserRelationshipType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "users" => Self::USERS, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.frozen new file mode 100644 index 000000000..ad0605b17 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:34:39.241Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.json new file mode 100644 index 000000000..47011ee31 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Bad-request-response.json @@ -0,0 +1,39 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"name\":\"Too many characters in description\",\"spec\":{}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow description exceeds size limit of [300] characters\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 400, + "message": "Bad Request" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:34:39 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.frozen new file mode 100644 index 000000000..8b2b65b83 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:34:39.387Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.json new file mode 100644 index 000000000..ee7595a51 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-Workflow-returns-Successfully-created-a-workflow-response.json @@ -0,0 +1,63 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"connectionEnvs\":[{\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}],\"env\":\"default\"}],\"handle\":\"my-handle\",\"inputSchema\":{\"parameters\":[{\"defaultValue\":\"default\",\"name\":\"input\",\"type\":\"STRING\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]},\"steps\":[{\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"name\":\"Step1\",\"outboundEdges\":[{\"branchName\":\"main\",\"nextStepName\":\"Step2\"}],\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}]},{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"Step2\"}],\"triggers\":[{\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}},\"startStepNames\":[\"Step1\"]},{\"githubWebhookTrigger\":{},\"startStepNames\":[\"Step1\"]}]},\"tags\":[\"team:infra\",\"service:monitoring\",\"foo:bar\"]},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"9c250b4e-3d0c-433d-84e6-60a6bde81adb\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:34:39.501028Z\",\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"Step1\"],\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}}},{\"startStepNames\":[\"Step1\"],\"githubWebhookTrigger\":{}}],\"steps\":[{\"name\":\"Step1\",\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}],\"outboundEdges\":[{\"nextStepName\":\"Step2\",\"branchName\":\"main\"}]},{\"name\":\"Step2\",\"actionId\":\"com.datadoghq.core.noop\"}],\"handle\":\"my-handle\",\"connectionEnvs\":[{\"env\":\"default\",\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}]}],\"inputSchema\":{\"parameters\":[{\"name\":\"input\",\"type\":\"STRING\",\"defaultValue\":\"default\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]}},\"tags\":[\"foo:bar\",\"team:infra\",\"service:monitoring\"],\"updatedAt\":\"2025-02-04T04:34:39.501028Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:34:39 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/9c250b4e-3d0c-433d-84e6-60a6bde81adb" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:34:39 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.frozen new file mode 100644 index 000000000..8fc4fe650 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:32.931Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.json new file mode 100644 index 000000000..a2043d831 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Bad-request-response.json @@ -0,0 +1,39 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"name\":\"Too many characters in description\",\"spec\":{}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow description exceeds size limit of [300] characters\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 400, + "message": "Bad Request" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:32 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.frozen new file mode 100644 index 000000000..5a61727b7 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.114Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.json new file mode 100644 index 000000000..12a07af58 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Create-a-Workflow-returns-Successfully-created-a-workflow-response.json @@ -0,0 +1,63 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"connectionEnvs\":[{\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}],\"env\":\"default\"}],\"handle\":\"my-handle\",\"inputSchema\":{\"parameters\":[{\"defaultValue\":\"default\",\"name\":\"input\",\"type\":\"STRING\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]},\"steps\":[{\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"name\":\"Step1\",\"outboundEdges\":[{\"branchName\":\"main\",\"nextStepName\":\"Step2\"}],\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}]},{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"Step2\"}],\"triggers\":[{\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}},\"startStepNames\":[\"Step1\"]},{\"githubWebhookTrigger\":{},\"startStepNames\":[\"Step1\"]}]},\"tags\":[\"team:infra\",\"service:monitoring\",\"foo:bar\"]},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"cb8dd95a-1463-495d-99d6-95efcd86e74e\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:33.224986Z\",\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"Step1\"],\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}}},{\"startStepNames\":[\"Step1\"],\"githubWebhookTrigger\":{}}],\"steps\":[{\"name\":\"Step1\",\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}],\"outboundEdges\":[{\"nextStepName\":\"Step2\",\"branchName\":\"main\"}]},{\"name\":\"Step2\",\"actionId\":\"com.datadoghq.core.noop\"}],\"handle\":\"my-handle\",\"connectionEnvs\":[{\"env\":\"default\",\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}]}],\"inputSchema\":{\"parameters\":[{\"name\":\"input\",\"type\":\"STRING\",\"defaultValue\":\"default\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]}},\"tags\":[\"foo:bar\",\"team:infra\",\"service:monitoring\"],\"updatedAt\":\"2025-02-04T04:58:33.224986Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/cb8dd95a-1463-495d-99d6-95efcd86e74e" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.frozen new file mode 100644 index 000000000..8ffae6cef --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.338Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.json new file mode 100644 index 000000000..e39889918 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Not-found-response.json @@ -0,0 +1,33 @@ +{ + "http_interactions": [ + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/aaa11111-aa11-aa11-aaaa-aaaaaa111111" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow not found\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 404, + "message": "Not Found" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.frozen new file mode 100644 index 000000000..eb026ef17 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.426Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.json new file mode 100644 index 000000000..165e4aa9e --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Delete-an-existing-Workflow-returns-Successfully-deleted-a-workflow-response.json @@ -0,0 +1,91 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"name\":\"Cassette Workflow x-given\",\"spec\":{\"steps\":[{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"No_op\"}],\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}]}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"94300e37-bf51-498b-b56b-83d17ef45e6b\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:33.538848Z\",\"description\":\"\",\"name\":\"Cassette Workflow x-given\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}],\"steps\":[{\"name\":\"No_op\",\"actionId\":\"com.datadoghq.core.noop\"}]},\"tags\":[],\"updatedAt\":\"2025-02-04T04:58:33.538848Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/94300e37-bf51-498b-b56b-83d17ef45e6b" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/94300e37-bf51-498b-b56b-83d17ef45e6b" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow not found\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 404, + "message": "Not Found" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.frozen new file mode 100644 index 000000000..728a4fc6d --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.693Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.json new file mode 100644 index 000000000..be46e4891 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Bad-request-response.json @@ -0,0 +1,33 @@ +{ + "http_interactions": [ + { + "request": { + "body": "", + "headers": { + "Accept": [ + "application/json" + ] + }, + "method": "get", + "uri": "https://api.datadoghq.com/api/v2/workflows/bad-format" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"status\":\"400\",\"title\":\"Invalid Parameter\",\"detail\":\"invalid parameter \\\"workflowId\\\" in \\\"path\\\"; expected type \\\"uuid\\\"\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 400, + "message": "Bad Request" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.frozen new file mode 100644 index 000000000..1192c2758 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.759Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.json new file mode 100644 index 000000000..3c7292428 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Not-found-response.json @@ -0,0 +1,33 @@ +{ + "http_interactions": [ + { + "request": { + "body": "", + "headers": { + "Accept": [ + "application/json" + ] + }, + "method": "get", + "uri": "https://api.datadoghq.com/api/v2/workflows/aaa11111-aa11-aa11-aaaa-aaaaaa111111" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow not found\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 404, + "message": "Not Found" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.frozen new file mode 100644 index 000000000..c1bb2bc7d --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:33.829Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.json new file mode 100644 index 000000000..eee12b3cb --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Get-an-existing-Workflow-returns-Successfully-got-a-workflow-response.json @@ -0,0 +1,91 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"name\":\"Cassette Workflow x-given\",\"spec\":{\"steps\":[{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"No_op\"}],\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}]}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"6ffa14c3-816b-4463-9386-cb8eb5b73438\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:33.952654Z\",\"description\":\"\",\"name\":\"Cassette Workflow x-given\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}],\"steps\":[{\"name\":\"No_op\",\"actionId\":\"com.datadoghq.core.noop\"}]},\"tags\":[],\"updatedAt\":\"2025-02-04T04:58:33.952654Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "application/json" + ] + }, + "method": "get", + "uri": "https://api.datadoghq.com/api/v2/workflows/6ffa14c3-816b-4463-9386-cb8eb5b73438" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"6ffa14c3-816b-4463-9386-cb8eb5b73438\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:33.952654Z\",\"description\":\"\",\"name\":\"Cassette Workflow x-given\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}],\"steps\":[{\"name\":\"No_op\",\"actionId\":\"com.datadoghq.core.noop\"}]},\"tags\":[],\"updatedAt\":\"2025-02-04T04:58:33.952654Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 200, + "message": "OK" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/6ffa14c3-816b-4463-9386-cb8eb5b73438" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:33 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.frozen new file mode 100644 index 000000000..d1b627bbd --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:34.162Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.json new file mode 100644 index 000000000..1c3854f00 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Bad-request-response.json @@ -0,0 +1,97 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"name\":\"Cassette Workflow x-given\",\"spec\":{\"steps\":[{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"No_op\"}],\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}]}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"748a9d1b-a9a5-415b-9360-3d8374329957\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:34.263325Z\",\"description\":\"\",\"name\":\"Cassette Workflow x-given\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}],\"steps\":[{\"name\":\"No_op\",\"actionId\":\"com.datadoghq.core.noop\"}]},\"tags\":[],\"updatedAt\":\"2025-02-04T04:58:34.263325Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + }, + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"name\":\"Too many characters in description\",\"spec\":{}},\"id\":\"22222222-2222-2222-2222-222222222222\",\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "patch", + "uri": "https://api.datadoghq.com/api/v2/workflows/748a9d1b-a9a5-415b-9360-3d8374329957" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow description exceeds size limit of [300] characters\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 400, + "message": "Bad Request" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/748a9d1b-a9a5-415b-9360-3d8374329957" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.frozen new file mode 100644 index 000000000..41cf38523 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:34.413Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.json new file mode 100644 index 000000000..972f99f25 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Not-found-response.json @@ -0,0 +1,39 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"connectionEnvs\":[{\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}],\"env\":\"default\"}],\"handle\":\"my-handle\",\"inputSchema\":{\"parameters\":[{\"defaultValue\":\"default\",\"name\":\"input\",\"type\":\"STRING\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]},\"steps\":[{\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"name\":\"Step1\",\"outboundEdges\":[{\"branchName\":\"main\",\"nextStepName\":\"Step2\"}],\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}]},{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"Step2\"}],\"triggers\":[{\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}},\"startStepNames\":[\"Step1\"]},{\"githubWebhookTrigger\":{},\"startStepNames\":[\"Step1\"]}]},\"tags\":[\"team:infra\",\"service:monitoring\",\"foo:bar\"]},\"id\":\"22222222-2222-2222-2222-222222222222\",\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "patch", + "uri": "https://api.datadoghq.com/api/v2/workflows/aaa11111-aa11-aa11-aaaa-aaaaaa111111" + }, + "response": { + "body": { + "string": "{\"errors\":[{\"detail\":\"workflow not found\"}]}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 404, + "message": "Not Found" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.frozen b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.frozen new file mode 100644 index 000000000..e1127f3d4 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.frozen @@ -0,0 +1 @@ +2025-02-04T04:58:34.496Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.json b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.json new file mode 100644 index 000000000..087f42767 --- /dev/null +++ b/tests/scenarios/cassettes/v2/workflow_automation/Update-an-existing-Workflow-returns-Successfully-updated-a-workflow-response.json @@ -0,0 +1,97 @@ +{ + "http_interactions": [ + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"name\":\"Cassette Workflow x-given\",\"spec\":{\"steps\":[{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"No_op\"}],\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}]}},\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "post", + "uri": "https://api.datadoghq.com/api/v2/workflows" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"b7a23134-f18e-4896-acd6-73c9b8b813a5\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:34.597357Z\",\"description\":\"\",\"name\":\"Cassette Workflow x-given\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"No_op\"],\"workflowTrigger\":{}}],\"steps\":[{\"name\":\"No_op\",\"actionId\":\"com.datadoghq.core.noop\"}]},\"tags\":[],\"updatedAt\":\"2025-02-04T04:58:34.597357Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 201, + "message": "Created" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + }, + { + "request": { + "body": { + "string": "{\"data\":{\"attributes\":{\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"connectionEnvs\":[{\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}],\"env\":\"default\"}],\"handle\":\"my-handle\",\"inputSchema\":{\"parameters\":[{\"defaultValue\":\"default\",\"name\":\"input\",\"type\":\"STRING\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]},\"steps\":[{\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"name\":\"Step1\",\"outboundEdges\":[{\"branchName\":\"main\",\"nextStepName\":\"Step2\"}],\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}]},{\"actionId\":\"com.datadoghq.core.noop\",\"name\":\"Step2\"}],\"triggers\":[{\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}},\"startStepNames\":[\"Step1\"]},{\"githubWebhookTrigger\":{},\"startStepNames\":[\"Step1\"]}]},\"tags\":[\"team:infra\",\"service:monitoring\",\"foo:bar\"]},\"id\":\"22222222-2222-2222-2222-222222222222\",\"type\":\"workflows\"}}", + "encoding": null + }, + "headers": { + "Accept": [ + "application/json" + ], + "Content-Type": [ + "application/json" + ] + }, + "method": "patch", + "uri": "https://api.datadoghq.com/api/v2/workflows/b7a23134-f18e-4896-acd6-73c9b8b813a5" + }, + "response": { + "body": { + "string": "{\"data\":{\"id\":\"b7a23134-f18e-4896-acd6-73c9b8b813a5\",\"type\":\"workflows\",\"attributes\":{\"createdAt\":\"2025-02-04T04:58:34.597357Z\",\"description\":\"A sample workflow.\",\"name\":\"Example Workflow\",\"published\":true,\"spec\":{\"triggers\":[{\"startStepNames\":[\"Step1\"],\"monitorTrigger\":{\"rateLimit\":{\"count\":1,\"interval\":\"3600s\"}}},{\"startStepNames\":[\"Step1\"],\"githubWebhookTrigger\":{}}],\"steps\":[{\"name\":\"Step1\",\"actionId\":\"com.datadoghq.dd.monitor.listMonitors\",\"connectionLabel\":\"INTEGRATION_DATADOG\",\"parameters\":[{\"name\":\"tags\",\"value\":\"service:monitoring\"}],\"outboundEdges\":[{\"nextStepName\":\"Step2\",\"branchName\":\"main\"}]},{\"name\":\"Step2\",\"actionId\":\"com.datadoghq.core.noop\"}],\"handle\":\"my-handle\",\"connectionEnvs\":[{\"env\":\"default\",\"connections\":[{\"connectionId\":\"11111111-1111-1111-1111-111111111111\",\"label\":\"INTEGRATION_DATADOG\"}]}],\"inputSchema\":{\"parameters\":[{\"name\":\"input\",\"type\":\"STRING\",\"defaultValue\":\"default\"}]},\"outputSchema\":{\"parameters\":[{\"name\":\"output\",\"type\":\"ARRAY_OBJECT\",\"value\":\"outputValue\"}]}},\"tags\":[\"foo:bar\",\"team:infra\",\"service:monitoring\"],\"updatedAt\":\"2025-02-04T04:58:34.728719Z\"},\"relationships\":{\"creator\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}},\"owner\":{\"data\":{\"id\":\"3ad549bf-eba0-11e9-a77a-0705486660d0\",\"type\":\"users\"}}}}}", + "encoding": null + }, + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "status": { + "code": 200, + "message": "OK" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + }, + { + "request": { + "body": "", + "headers": { + "Accept": [ + "*/*" + ] + }, + "method": "delete", + "uri": "https://api.datadoghq.com/api/v2/workflows/b7a23134-f18e-4896-acd6-73c9b8b813a5" + }, + "response": { + "body": { + "string": "", + "encoding": null + }, + "headers": {}, + "status": { + "code": 204, + "message": "No Content" + } + }, + "recorded_at": "Tue, 04 Feb 2025 04:58:34 GMT" + } + ], + "recorded_with": "VCR 6.0.0" +} \ No newline at end of file diff --git a/tests/scenarios/features/v2/given.json b/tests/scenarios/features/v2/given.json index b33ac88a3..57348eb2a 100644 --- a/tests/scenarios/features/v2/given.json +++ b/tests/scenarios/features/v2/given.json @@ -837,5 +837,17 @@ "key": "user", "tag": "Users", "operationId": "CreateUser" + }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\": {\n \"type\": \"workflows\",\n \"attributes\": {\n \"name\": \"Cassette Workflow x-given\",\n \"spec\": {\n \"triggers\": [\n {\n \"startStepNames\": [\"No_op\"],\n \"workflowTrigger\": {}\n }\n ],\n \"steps\": [\n {\n \"name\": \"No_op\",\n \"actionId\": \"com.datadoghq.core.noop\"\n }\n ]\n }\n }\n }\n}" + } + ], + "step": "there is a valid \"workflow\" in the system", + "key": "workflow", + "tag": "Workflow Automation", + "operationId": "CreateWorkflow" } ] diff --git a/tests/scenarios/features/v2/undo.json b/tests/scenarios/features/v2/undo.json index 03717048e..288ca835f 100644 --- a/tests/scenarios/features/v2/undo.json +++ b/tests/scenarios/features/v2/undo.json @@ -2970,6 +2970,37 @@ "type": "safe" } }, + "CreateWorkflow": { + "tag": "Workflow Automation", + "undo": { + "operationId": "DeleteWorkflow", + "parameters": [ + { + "name": "workflow_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "DeleteWorkflow": { + "tag": "Workflow Automation", + "undo": { + "type": "idempotent" + } + }, + "GetWorkflow": { + "tag": "Workflow Automation", + "undo": { + "type": "safe" + } + }, + "UpdateWorkflow": { + "tag": "Workflow Automation", + "undo": { + "type": "idempotent" + } + }, "ListWorkflowInstances": { "tag": "Workflow Automation", "undo": { diff --git a/tests/scenarios/features/v2/workflow_automation.feature b/tests/scenarios/features/v2/workflow_automation.feature index fe979f72d..6db797feb 100644 --- a/tests/scenarios/features/v2/workflow_automation.feature +++ b/tests/scenarios/features/v2/workflow_automation.feature @@ -1,7 +1,12 @@ @endpoint(workflow-automation) @endpoint(workflow-automation-v2) Feature: Workflow Automation - Automate your teams operational processes with Datadog Workflow - Automation. + Datadog Workflow Automation allows you to automate your end-to-end + processes by connecting Datadog with the rest of your tech stack. Build + workflows to auto-remediate your alerts, streamline your incident and + security processes, and reduce manual toil. Workflow Automation supports + over 1,000+ OOTB actions, including AWS, JIRA, ServiceNow, GitHub, and + OpenAI. Learn more in our Workflow Automation docs + [here](https://docs.datadoghq.com/service_management/workflows/). Background: Given a valid "apiKeyAuth" key in the system @@ -32,6 +37,35 @@ Feature: Workflow Automation When the request is sent Then the response status is 200 OK + @team:DataDog/workflow-automation-dev + Scenario: Create a Workflow returns "Bad request" response + Given new "CreateWorkflow" request + And body with value {"data": {"attributes": {"name": "Too many characters in description", "description": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "spec": {}}, "type": "workflows"}} + When the request is sent + Then the response status is 400 Bad request + + @team:DataDog/workflow-automation-dev + Scenario: Create a Workflow returns "Successfully created a workflow." response + Given new "CreateWorkflow" request + And body with value {"data": {"attributes": {"description": "A sample workflow.", "name": "Example Workflow", "published": true, "spec": {"connectionEnvs": [{"connections": [{"connectionId": "11111111-1111-1111-1111-111111111111", "label": "INTEGRATION_DATADOG"}], "env": "default"}], "handle": "my-handle", "inputSchema": {"parameters": [{"defaultValue": "default", "name": "input", "type": "STRING"}]}, "outputSchema": {"parameters": [{"name": "output", "type": "ARRAY_OBJECT", "value": "outputValue"}]}, "steps": [{"actionId": "com.datadoghq.dd.monitor.listMonitors", "connectionLabel": "INTEGRATION_DATADOG", "name": "Step1", "outboundEdges": [{"branchName": "main", "nextStepName": "Step2"}], "parameters": [{"name": "tags", "value": "service:monitoring"}]}, {"actionId": "com.datadoghq.core.noop", "name": "Step2"}], "triggers": [{"monitorTrigger": {"rateLimit": {"count": 1, "interval": "3600s"}}, "startStepNames": ["Step1"]}, {"startStepNames": ["Step1"], "githubWebhookTrigger": {}}]}, "tags": ["team:infra", "service:monitoring", "foo:bar"]}, "type": "workflows"}} + When the request is sent + Then the response status is 201 Successfully created a workflow. + + @team:DataDog/workflow-automation-dev + Scenario: Delete an existing Workflow returns "Not found" response + Given new "DeleteWorkflow" request + And request contains "workflow_id" parameter with value "aaa11111-aa11-aa11-aaaa-aaaaaa111111" + When the request is sent + Then the response status is 404 Not found + + @team:DataDog/workflow-automation-dev + Scenario: Delete an existing Workflow returns "Successfully deleted a workflow." response + Given there is a valid "workflow" in the system + And new "DeleteWorkflow" request + And request contains "workflow_id" parameter from "workflow.data.id" + When the request is sent + Then the response status is 204 Successfully deleted a workflow. + @replay-only @team:DataDog/workflow-automation-dev Scenario: Execute a workflow returns "Bad Request" response Given new "CreateWorkflowInstance" request @@ -72,6 +106,28 @@ Feature: Workflow Automation When the request is sent Then the response status is 200 OK + @team:DataDog/workflow-automation-dev + Scenario: Get an existing Workflow returns "Bad request" response + Given new "GetWorkflow" request + And request contains "workflow_id" parameter with value "bad-format" + When the request is sent + Then the response status is 400 Bad request + + @team:DataDog/workflow-automation-dev + Scenario: Get an existing Workflow returns "Not found" response + Given new "GetWorkflow" request + And request contains "workflow_id" parameter with value "aaa11111-aa11-aa11-aaaa-aaaaaa111111" + When the request is sent + Then the response status is 404 Not found + + @team:DataDog/workflow-automation-dev + Scenario: Get an existing Workflow returns "Successfully got a workflow." response + Given there is a valid "workflow" in the system + And new "GetWorkflow" request + And request contains "workflow_id" parameter from "workflow.data.id" + When the request is sent + Then the response status is 200 Successfully got a workflow. + @replay-only @team:DataDog/workflow-automation-dev Scenario: List workflow instances returns "Bad Request" response Given new "ListWorkflowInstances" request @@ -85,3 +141,29 @@ Feature: Workflow Automation And request contains "workflow_id" parameter with value "ccf73164-1998-4785-a7a3-8d06c7e5f558" When the request is sent Then the response status is 200 OK + + @team:DataDog/workflow-automation-dev + Scenario: Update an existing Workflow returns "Bad request" response + Given there is a valid "workflow" in the system + And new "UpdateWorkflow" request + And request contains "workflow_id" parameter from "workflow.data.id" + And body with value {"data": {"attributes": {"name": "Too many characters in description", "description": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "spec": {}}, "id": "22222222-2222-2222-2222-222222222222", "type": "workflows"}} + When the request is sent + Then the response status is 400 Bad request + + @team:DataDog/workflow-automation-dev + Scenario: Update an existing Workflow returns "Not found" response + Given new "UpdateWorkflow" request + And request contains "workflow_id" parameter with value "aaa11111-aa11-aa11-aaaa-aaaaaa111111" + And body with value {"data": {"attributes": {"description": "A sample workflow.", "name": "Example Workflow", "published": true, "spec": {"connectionEnvs": [{"connections": [{"connectionId": "11111111-1111-1111-1111-111111111111", "label": "INTEGRATION_DATADOG"}], "env": "default"}], "handle": "my-handle", "inputSchema": {"parameters": [{"defaultValue": "default", "name": "input", "type": "STRING"}]}, "outputSchema": {"parameters": [{"name": "output", "type": "ARRAY_OBJECT", "value": "outputValue"}]}, "steps": [{"actionId": "com.datadoghq.dd.monitor.listMonitors", "connectionLabel": "INTEGRATION_DATADOG", "name": "Step1", "outboundEdges": [{"branchName": "main", "nextStepName": "Step2"}], "parameters": [{"name": "tags", "value": "service:monitoring"}]}, {"actionId": "com.datadoghq.core.noop", "name": "Step2"}], "triggers": [{"monitorTrigger": {"rateLimit": {"count": 1, "interval": "3600s"}}, "startStepNames": ["Step1"]}, {"startStepNames": ["Step1"], "githubWebhookTrigger": {}}]}, "tags": ["team:infra", "service:monitoring", "foo:bar"]}, "id": "22222222-2222-2222-2222-222222222222", "type": "workflows"}} + When the request is sent + Then the response status is 404 Not found + + @team:DataDog/workflow-automation-dev + Scenario: Update an existing Workflow returns "Successfully updated a workflow." response + Given there is a valid "workflow" in the system + And new "UpdateWorkflow" request + And request contains "workflow_id" parameter from "workflow.data.id" + And body with value {"data": {"attributes": {"description": "A sample workflow.", "name": "Example Workflow", "published": true, "spec": {"connectionEnvs": [{"connections": [{"connectionId": "11111111-1111-1111-1111-111111111111", "label": "INTEGRATION_DATADOG"}], "env": "default"}], "handle": "my-handle", "inputSchema": {"parameters": [{"defaultValue": "default", "name": "input", "type": "STRING"}]}, "outputSchema": {"parameters": [{"name": "output", "type": "ARRAY_OBJECT", "value": "outputValue"}]}, "steps": [{"actionId": "com.datadoghq.dd.monitor.listMonitors", "connectionLabel": "INTEGRATION_DATADOG", "name": "Step1", "outboundEdges": [{"branchName": "main", "nextStepName": "Step2"}], "parameters": [{"name": "tags", "value": "service:monitoring"}]}, {"actionId": "com.datadoghq.core.noop", "name": "Step2"}], "triggers": [{"monitorTrigger": {"rateLimit": {"count": 1, "interval": "3600s"}}, "startStepNames": ["Step1"]}, {"startStepNames": ["Step1"], "githubWebhookTrigger": {}}]}, "tags": ["team:infra", "service:monitoring", "foo:bar"]}, "id": "22222222-2222-2222-2222-222222222222", "type": "workflows"}} + When the request is sent + Then the response status is 200 Successfully updated a workflow. diff --git a/tests/scenarios/function_mappings.rs b/tests/scenarios/function_mappings.rs index 745832d86..da5c3c01b 100644 --- a/tests/scenarios/function_mappings.rs +++ b/tests/scenarios/function_mappings.rs @@ -3300,6 +3300,18 @@ pub fn collect_function_calls(world: &mut DatadogWorld) { "v2.ListUserPermissions".into(), test_v2_list_user_permissions, ); + world + .function_mappings + .insert("v2.CreateWorkflow".into(), test_v2_create_workflow); + world + .function_mappings + .insert("v2.DeleteWorkflow".into(), test_v2_delete_workflow); + world + .function_mappings + .insert("v2.GetWorkflow".into(), test_v2_get_workflow); + world + .function_mappings + .insert("v2.UpdateWorkflow".into(), test_v2_update_workflow); world.function_mappings.insert( "v2.ListWorkflowInstances".into(), test_v2_list_workflow_instances, @@ -25426,6 +25438,110 @@ fn test_v2_list_user_permissions(world: &mut DatadogWorld, _parameters: &HashMap world.response.code = response.status.as_u16(); } +fn test_v2_create_workflow(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_workflow_automation + .as_ref() + .expect("api instance not found"); + let body = serde_json::from_value(_parameters.get("body").unwrap().clone()).unwrap(); + let response = match block_on(api.create_workflow_with_http_info(body)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + +fn test_v2_delete_workflow(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_workflow_automation + .as_ref() + .expect("api instance not found"); + let workflow_id = + serde_json::from_value(_parameters.get("workflow_id").unwrap().clone()).unwrap(); + let response = match block_on(api.delete_workflow_with_http_info(workflow_id)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + +fn test_v2_get_workflow(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_workflow_automation + .as_ref() + .expect("api instance not found"); + let workflow_id = + serde_json::from_value(_parameters.get("workflow_id").unwrap().clone()).unwrap(); + let response = match block_on(api.get_workflow_with_http_info(workflow_id)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + +fn test_v2_update_workflow(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_workflow_automation + .as_ref() + .expect("api instance not found"); + let workflow_id = + serde_json::from_value(_parameters.get("workflow_id").unwrap().clone()).unwrap(); + let body = serde_json::from_value(_parameters.get("body").unwrap().clone()).unwrap(); + let response = match block_on(api.update_workflow_with_http_info(workflow_id, body)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + fn test_v2_list_workflow_instances(world: &mut DatadogWorld, _parameters: &HashMap) { let api = world .api_instances