interface CfnFlowProps
Language | Type name |
---|---|
.NET | Amazon.CDK.AWS.AppFlow.CfnFlowProps |
Go | github.com/aws/aws-cdk-go/awscdk/v2/awsappflow#CfnFlowProps |
Java | software.amazon.awscdk.services.appflow.CfnFlowProps |
Python | aws_cdk.aws_appflow.CfnFlowProps |
TypeScript | aws-cdk-lib » aws_appflow » CfnFlowProps |
Properties for defining a CfnFlow
.
See also: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-appflow-flow.html
Example
// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import { aws_appflow as appflow } from 'aws-cdk-lib';
const cfnFlowProps: appflow.CfnFlowProps = {
destinationFlowConfigList: [{
connectorType: 'connectorType',
destinationConnectorProperties: {
customConnector: {
entityName: 'entityName',
// the properties below are optional
customProperties: {
customPropertiesKey: 'customProperties',
},
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
idFieldNames: ['idFieldNames'],
writeOperationType: 'writeOperationType',
},
eventBridge: {
object: 'object',
// the properties below are optional
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
},
lookoutMetrics: {
object: 'object',
},
marketo: {
object: 'object',
// the properties below are optional
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
},
redshift: {
intermediateBucketName: 'intermediateBucketName',
object: 'object',
// the properties below are optional
bucketPrefix: 'bucketPrefix',
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
},
s3: {
bucketName: 'bucketName',
// the properties below are optional
bucketPrefix: 'bucketPrefix',
s3OutputFormatConfig: {
aggregationConfig: {
aggregationType: 'aggregationType',
targetFileSize: 123,
},
fileType: 'fileType',
prefixConfig: {
pathPrefixHierarchy: ['pathPrefixHierarchy'],
prefixFormat: 'prefixFormat',
prefixType: 'prefixType',
},
preserveSourceDataTyping: false,
},
},
salesforce: {
object: 'object',
// the properties below are optional
dataTransferApi: 'dataTransferApi',
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
idFieldNames: ['idFieldNames'],
writeOperationType: 'writeOperationType',
},
sapoData: {
objectPath: 'objectPath',
// the properties below are optional
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
idFieldNames: ['idFieldNames'],
successResponseHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
},
writeOperationType: 'writeOperationType',
},
snowflake: {
intermediateBucketName: 'intermediateBucketName',
object: 'object',
// the properties below are optional
bucketPrefix: 'bucketPrefix',
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
},
upsolver: {
bucketName: 'bucketName',
s3OutputFormatConfig: {
prefixConfig: {
pathPrefixHierarchy: ['pathPrefixHierarchy'],
prefixFormat: 'prefixFormat',
prefixType: 'prefixType',
},
// the properties below are optional
aggregationConfig: {
aggregationType: 'aggregationType',
targetFileSize: 123,
},
fileType: 'fileType',
},
// the properties below are optional
bucketPrefix: 'bucketPrefix',
},
zendesk: {
object: 'object',
// the properties below are optional
errorHandlingConfig: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
failOnFirstError: false,
},
idFieldNames: ['idFieldNames'],
writeOperationType: 'writeOperationType',
},
},
// the properties below are optional
apiVersion: 'apiVersion',
connectorProfileName: 'connectorProfileName',
}],
flowName: 'flowName',
sourceFlowConfig: {
connectorType: 'connectorType',
sourceConnectorProperties: {
amplitude: {
object: 'object',
},
customConnector: {
entityName: 'entityName',
// the properties below are optional
customProperties: {
customPropertiesKey: 'customProperties',
},
dataTransferApi: {
name: 'name',
type: 'type',
},
},
datadog: {
object: 'object',
},
dynatrace: {
object: 'object',
},
googleAnalytics: {
object: 'object',
},
inforNexus: {
object: 'object',
},
marketo: {
object: 'object',
},
pardot: {
object: 'object',
},
s3: {
bucketName: 'bucketName',
bucketPrefix: 'bucketPrefix',
// the properties below are optional
s3InputFormatConfig: {
s3InputFileType: 's3InputFileType',
},
},
salesforce: {
object: 'object',
// the properties below are optional
dataTransferApi: 'dataTransferApi',
enableDynamicFieldUpdate: false,
includeDeletedRecords: false,
},
sapoData: {
objectPath: 'objectPath',
// the properties below are optional
paginationConfig: {
maxPageSize: 123,
},
parallelismConfig: {
maxParallelism: 123,
},
},
serviceNow: {
object: 'object',
},
singular: {
object: 'object',
},
slack: {
object: 'object',
},
trendmicro: {
object: 'object',
},
veeva: {
object: 'object',
// the properties below are optional
documentType: 'documentType',
includeAllVersions: false,
includeRenditions: false,
includeSourceFiles: false,
},
zendesk: {
object: 'object',
},
},
// the properties below are optional
apiVersion: 'apiVersion',
connectorProfileName: 'connectorProfileName',
incrementalPullConfig: {
datetimeTypeFieldName: 'datetimeTypeFieldName',
},
},
tasks: [{
sourceFields: ['sourceFields'],
taskType: 'taskType',
// the properties below are optional
connectorOperator: {
amplitude: 'amplitude',
customConnector: 'customConnector',
datadog: 'datadog',
dynatrace: 'dynatrace',
googleAnalytics: 'googleAnalytics',
inforNexus: 'inforNexus',
marketo: 'marketo',
pardot: 'pardot',
s3: 's3',
salesforce: 'salesforce',
sapoData: 'sapoData',
serviceNow: 'serviceNow',
singular: 'singular',
slack: 'slack',
trendmicro: 'trendmicro',
veeva: 'veeva',
zendesk: 'zendesk',
},
destinationField: 'destinationField',
taskProperties: [{
key: 'key',
value: 'value',
}],
}],
triggerConfig: {
triggerType: 'triggerType',
// the properties below are optional
triggerProperties: {
scheduleExpression: 'scheduleExpression',
// the properties below are optional
dataPullMode: 'dataPullMode',
firstExecutionFrom: 123,
flowErrorDeactivationThreshold: 123,
scheduleEndTime: 123,
scheduleOffset: 123,
scheduleStartTime: 123,
timeZone: 'timeZone',
},
},
// the properties below are optional
description: 'description',
flowStatus: 'flowStatus',
kmsArn: 'kmsArn',
metadataCatalogConfig: {
glueDataCatalog: {
databaseName: 'databaseName',
roleArn: 'roleArn',
tablePrefix: 'tablePrefix',
},
},
tags: [{
key: 'key',
value: 'value',
}],
};
Properties
Name | Type | Description |
---|---|---|
destination | IResolvable | IResolvable | Destination [] | The configuration that controls how Amazon AppFlow places data in the destination connector. |
flow | string | The specified name of the flow. |
source | IResolvable | Source | Contains information about the configuration of the source connector used in the flow. |
tasks | IResolvable | IResolvable | Task [] | A list of tasks that Amazon AppFlow performs while transferring the data in the flow run. |
trigger | IResolvable | Trigger | The trigger settings that determine how and when Amazon AppFlow runs the specified flow. |
description? | string | A user-entered description of the flow. |
flow | string | Sets the status of the flow. You can specify one of the following values:. |
kms | string | The ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. |
metadata | IResolvable | Metadata | Specifies the configuration that Amazon AppFlow uses when it catalogs your data. |
tags? | Cfn [] | The tags used to organize, track, or control access for your flow. |
destinationFlowConfigList
Type:
IResolvable
|
IResolvable
|
Destination
[]
The configuration that controls how Amazon AppFlow places data in the destination connector.
flowName
Type:
string
The specified name of the flow.
Spaces are not allowed. Use underscores (_) or hyphens (-) only.
sourceFlowConfig
Type:
IResolvable
|
Source
Contains information about the configuration of the source connector used in the flow.
tasks
Type:
IResolvable
|
IResolvable
|
Task
[]
A list of tasks that Amazon AppFlow performs while transferring the data in the flow run.
triggerConfig
Type:
IResolvable
|
Trigger
The trigger settings that determine how and when Amazon AppFlow runs the specified flow.
description?
Type:
string
(optional)
A user-entered description of the flow.
flowStatus?
Type:
string
(optional)
Sets the status of the flow. You can specify one of the following values:.
- Active - The flow runs based on the trigger settings that you defined. Active scheduled flows run as scheduled, and active event-triggered flows run when the specified change event occurs. However, active on-demand flows run only when you manually start them by using Amazon AppFlow.
- Suspended - You can use this option to deactivate an active flow. Scheduled and event-triggered flows will cease to run until you reactive them. This value only affects scheduled and event-triggered flows. It has no effect for on-demand flows.
If you omit the FlowStatus parameter, Amazon AppFlow creates the flow with a default status. The default status for on-demand flows is Active. The default status for scheduled and event-triggered flows is Draft, which means they’re not yet active.
kmsArn?
Type:
string
(optional)
The ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption.
This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
metadataCatalogConfig?
Type:
IResolvable
|
Metadata
(optional)
Specifies the configuration that Amazon AppFlow uses when it catalogs your data.
When Amazon AppFlow catalogs your data, it stores metadata in a data catalog.
tags?
Type:
Cfn
[]
(optional)
The tags used to organize, track, or control access for your flow.