Interface CfnDeliveryStream.IExtendedS3DestinationConfigurationProperty
The ExtendedS3DestinationConfiguration
property type configures an Amazon S3 destination for an Amazon Kinesis Data Firehose delivery stream.
Namespace: Amazon.CDK.AWS.KinesisFirehose
Assembly: Amazon.CDK.Lib.dll
Syntax (csharp)
public interface IExtendedS3DestinationConfigurationProperty
Syntax (vb)
Public Interface IExtendedS3DestinationConfigurationProperty
Remarks
ExampleMetadata: fixture=_generated
Examples
// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
using Amazon.CDK.AWS.KinesisFirehose;
var extendedS3DestinationConfigurationProperty = new ExtendedS3DestinationConfigurationProperty {
BucketArn = "bucketArn",
RoleArn = "roleArn",
// the properties below are optional
BufferingHints = new BufferingHintsProperty {
IntervalInSeconds = 123,
SizeInMBs = 123
},
CloudWatchLoggingOptions = new CloudWatchLoggingOptionsProperty {
Enabled = false,
LogGroupName = "logGroupName",
LogStreamName = "logStreamName"
},
CompressionFormat = "compressionFormat",
CustomTimeZone = "customTimeZone",
DataFormatConversionConfiguration = new DataFormatConversionConfigurationProperty {
Enabled = false,
InputFormatConfiguration = new InputFormatConfigurationProperty {
Deserializer = new DeserializerProperty {
HiveJsonSerDe = new HiveJsonSerDeProperty {
TimestampFormats = new [] { "timestampFormats" }
},
OpenXJsonSerDe = new OpenXJsonSerDeProperty {
CaseInsensitive = false,
ColumnToJsonKeyMappings = new Dictionary<string, string> {
{ "columnToJsonKeyMappingsKey", "columnToJsonKeyMappings" }
},
ConvertDotsInJsonKeysToUnderscores = false
}
}
},
OutputFormatConfiguration = new OutputFormatConfigurationProperty {
Serializer = new SerializerProperty {
OrcSerDe = new OrcSerDeProperty {
BlockSizeBytes = 123,
BloomFilterColumns = new [] { "bloomFilterColumns" },
BloomFilterFalsePositiveProbability = 123,
Compression = "compression",
DictionaryKeyThreshold = 123,
EnablePadding = false,
FormatVersion = "formatVersion",
PaddingTolerance = 123,
RowIndexStride = 123,
StripeSizeBytes = 123
},
ParquetSerDe = new ParquetSerDeProperty {
BlockSizeBytes = 123,
Compression = "compression",
EnableDictionaryCompression = false,
MaxPaddingBytes = 123,
PageSizeBytes = 123,
WriterVersion = "writerVersion"
}
}
},
SchemaConfiguration = new SchemaConfigurationProperty {
CatalogId = "catalogId",
DatabaseName = "databaseName",
Region = "region",
RoleArn = "roleArn",
TableName = "tableName",
VersionId = "versionId"
}
},
DynamicPartitioningConfiguration = new DynamicPartitioningConfigurationProperty {
Enabled = false,
RetryOptions = new RetryOptionsProperty {
DurationInSeconds = 123
}
},
EncryptionConfiguration = new EncryptionConfigurationProperty {
KmsEncryptionConfig = new KMSEncryptionConfigProperty {
AwskmsKeyArn = "awskmsKeyArn"
},
NoEncryptionConfig = "noEncryptionConfig"
},
ErrorOutputPrefix = "errorOutputPrefix",
FileExtension = "fileExtension",
Prefix = "prefix",
ProcessingConfiguration = new ProcessingConfigurationProperty {
Enabled = false,
Processors = new [] { new ProcessorProperty {
Type = "type",
// the properties below are optional
Parameters = new [] { new ProcessorParameterProperty {
ParameterName = "parameterName",
ParameterValue = "parameterValue"
} }
} }
},
S3BackupConfiguration = new S3DestinationConfigurationProperty {
BucketArn = "bucketArn",
RoleArn = "roleArn",
// the properties below are optional
BufferingHints = new BufferingHintsProperty {
IntervalInSeconds = 123,
SizeInMBs = 123
},
CloudWatchLoggingOptions = new CloudWatchLoggingOptionsProperty {
Enabled = false,
LogGroupName = "logGroupName",
LogStreamName = "logStreamName"
},
CompressionFormat = "compressionFormat",
EncryptionConfiguration = new EncryptionConfigurationProperty {
KmsEncryptionConfig = new KMSEncryptionConfigProperty {
AwskmsKeyArn = "awskmsKeyArn"
},
NoEncryptionConfig = "noEncryptionConfig"
},
ErrorOutputPrefix = "errorOutputPrefix",
Prefix = "prefix"
},
S3BackupMode = "s3BackupMode"
};
Synopsis
Properties
Bucket |
The Amazon Resource Name (ARN) of the Amazon S3 bucket. |
Buffering |
The buffering option. |
Cloud |
The Amazon CloudWatch logging options for your Firehose stream. |
Compression |
The compression format. |
Custom |
The time zone you prefer. |
Data |
The serializer, deserializer, and schema for converting data from the JSON format to the Parquet or ORC format before writing it to Amazon S3. |
Dynamic |
The configuration of the dynamic partitioning mechanism that creates targeted data sets from the streaming data by partitioning it based on partition keys. |
Encryption |
The encryption configuration for the Kinesis Data Firehose delivery stream. |
Error |
A prefix that Kinesis Data Firehose evaluates and adds to failed records before writing them to S3. |
File |
Specify a file extension. |
Prefix | The |
Processing |
The data processing configuration for the Kinesis Data Firehose delivery stream. |
Role |
The Amazon Resource Name (ARN) of the AWS credentials. |
S3Backup |
The configuration for backup in Amazon S3. |
S3Backup |
The Amazon S3 backup mode. |
Properties
BucketArn
The Amazon Resource Name (ARN) of the Amazon S3 bucket.
string BucketArn { get; }
Property Value
System.
Remarks
For constraints, see ExtendedS3DestinationConfiguration in the Amazon Kinesis Data Firehose API Reference .
BufferingHints
The buffering option.
virtual object BufferingHints { get; }
Property Value
System.
Remarks
CloudWatchLoggingOptions
The Amazon CloudWatch logging options for your Firehose stream.
virtual object CloudWatchLoggingOptions { get; }
Property Value
System.
Remarks
CompressionFormat
The compression format.
virtual string CompressionFormat { get; }
Property Value
System.
Remarks
If no value is specified, the default is UNCOMPRESSED
.
CustomTimeZone
The time zone you prefer.
virtual string CustomTimeZone { get; }
Property Value
System.
Remarks
DataFormatConversionConfiguration
The serializer, deserializer, and schema for converting data from the JSON format to the Parquet or ORC format before writing it to Amazon S3.
virtual object DataFormatConversionConfiguration { get; }
Property Value
System.
Remarks
DynamicPartitioningConfiguration
The configuration of the dynamic partitioning mechanism that creates targeted data sets from the streaming data by partitioning it based on partition keys.
virtual object DynamicPartitioningConfiguration { get; }
Property Value
System.
Remarks
EncryptionConfiguration
The encryption configuration for the Kinesis Data Firehose delivery stream.
virtual object EncryptionConfiguration { get; }
Property Value
System.
Remarks
ErrorOutputPrefix
A prefix that Kinesis Data Firehose evaluates and adds to failed records before writing them to S3.
virtual string ErrorOutputPrefix { get; }
Property Value
System.
Remarks
This prefix appears immediately following the bucket name. For information about how to specify this prefix, see Custom Prefixes for Amazon S3 Objects .
FileExtension
Specify a file extension.
virtual string FileExtension { get; }
Property Value
System.
Remarks
Prefix
The YYYY/MM/DD/HH
time format prefix is automatically used for delivered Amazon S3 files.
virtual string Prefix { get; }
Property Value
System.
Remarks
For more information, see ExtendedS3DestinationConfiguration in the Amazon Kinesis Data Firehose API Reference .
ProcessingConfiguration
The data processing configuration for the Kinesis Data Firehose delivery stream.
virtual object ProcessingConfiguration { get; }
Property Value
System.
Remarks
RoleArn
The Amazon Resource Name (ARN) of the AWS credentials.
string RoleArn { get; }
Property Value
System.
Remarks
For constraints, see ExtendedS3DestinationConfiguration in the Amazon Kinesis Data Firehose API Reference .
S3BackupConfiguration
The configuration for backup in Amazon S3.
virtual object S3BackupConfiguration { get; }
Property Value
System.
Remarks
S3BackupMode
The Amazon S3 backup mode.
virtual string S3BackupMode { get; }
Property Value
System.
Remarks
After you create a Firehose stream, you can update it to enable Amazon S3 backup if it is disabled. If backup is enabled, you can't update the Firehose stream to disable it.