Interface CfnDeliveryStream.IRedshiftDestinationConfigurationProperty
The RedshiftDestinationConfiguration
property type specifies an Amazon Redshift cluster to which Amazon Kinesis Data Firehose (Kinesis Data Firehose) delivers data.
Namespace: Amazon.CDK.AWS.KinesisFirehose
Assembly: Amazon.CDK.AWS.KinesisFirehose.dll
Syntax (csharp)
public interface IRedshiftDestinationConfigurationProperty
Syntax (vb)
Public Interface IRedshiftDestinationConfigurationProperty
Remarks
ExampleMetadata: fixture=_generated
Examples
// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
using Amazon.CDK.AWS.KinesisFirehose;
var redshiftDestinationConfigurationProperty = new RedshiftDestinationConfigurationProperty {
ClusterJdbcurl = "clusterJdbcurl",
CopyCommand = new CopyCommandProperty {
DataTableName = "dataTableName",
// the properties below are optional
CopyOptions = "copyOptions",
DataTableColumns = "dataTableColumns"
},
Password = "password",
RoleArn = "roleArn",
S3Configuration = new S3DestinationConfigurationProperty {
BucketArn = "bucketArn",
RoleArn = "roleArn",
// the properties below are optional
BufferingHints = new BufferingHintsProperty {
IntervalInSeconds = 123,
SizeInMBs = 123
},
CloudWatchLoggingOptions = new CloudWatchLoggingOptionsProperty {
Enabled = false,
LogGroupName = "logGroupName",
LogStreamName = "logStreamName"
},
CompressionFormat = "compressionFormat",
EncryptionConfiguration = new EncryptionConfigurationProperty {
KmsEncryptionConfig = new KMSEncryptionConfigProperty {
AwskmsKeyArn = "awskmsKeyArn"
},
NoEncryptionConfig = "noEncryptionConfig"
},
ErrorOutputPrefix = "errorOutputPrefix",
Prefix = "prefix"
},
Username = "username",
// the properties below are optional
CloudWatchLoggingOptions = new CloudWatchLoggingOptionsProperty {
Enabled = false,
LogGroupName = "logGroupName",
LogStreamName = "logStreamName"
},
ProcessingConfiguration = new ProcessingConfigurationProperty {
Enabled = false,
Processors = new [] { new ProcessorProperty {
Type = "type",
// the properties below are optional
Parameters = new [] { new ProcessorParameterProperty {
ParameterName = "parameterName",
ParameterValue = "parameterValue"
} }
} }
},
RetryOptions = new RedshiftRetryOptionsProperty {
DurationInSeconds = 123
},
S3BackupConfiguration = new S3DestinationConfigurationProperty {
BucketArn = "bucketArn",
RoleArn = "roleArn",
// the properties below are optional
BufferingHints = new BufferingHintsProperty {
IntervalInSeconds = 123,
SizeInMBs = 123
},
CloudWatchLoggingOptions = new CloudWatchLoggingOptionsProperty {
Enabled = false,
LogGroupName = "logGroupName",
LogStreamName = "logStreamName"
},
CompressionFormat = "compressionFormat",
EncryptionConfiguration = new EncryptionConfigurationProperty {
KmsEncryptionConfig = new KMSEncryptionConfigProperty {
AwskmsKeyArn = "awskmsKeyArn"
},
NoEncryptionConfig = "noEncryptionConfig"
},
ErrorOutputPrefix = "errorOutputPrefix",
Prefix = "prefix"
},
S3BackupMode = "s3BackupMode"
};
Synopsis
Properties
CloudWatchLoggingOptions | The CloudWatch logging options for your delivery stream. |
ClusterJdbcurl | The connection string that Kinesis Data Firehose uses to connect to the Amazon Redshift cluster. |
CopyCommand | Configures the Amazon Redshift |
Password | The password for the Amazon Redshift user that you specified in the |
ProcessingConfiguration | The data processing configuration for the Kinesis Data Firehose delivery stream. |
RetryOptions | The retry behavior in case Kinesis Data Firehose is unable to deliver documents to Amazon Redshift. |
RoleArn | The ARN of the AWS Identity and Access Management (IAM) role that grants Kinesis Data Firehose access to your Amazon S3 bucket and AWS KMS (if you enable data encryption). |
S3BackupConfiguration | The configuration for backup in Amazon S3. |
S3BackupMode | The Amazon S3 backup mode. |
S3Configuration | The S3 bucket where Kinesis Data Firehose first delivers data. |
Username | The Amazon Redshift user that has permission to access the Amazon Redshift cluster. |
Properties
CloudWatchLoggingOptions
The CloudWatch logging options for your delivery stream.
virtual object CloudWatchLoggingOptions { get; }
Property Value
System.Object
Remarks
ClusterJdbcurl
The connection string that Kinesis Data Firehose uses to connect to the Amazon Redshift cluster.
string ClusterJdbcurl { get; }
Property Value
System.String
Remarks
CopyCommand
Configures the Amazon Redshift COPY
command that Kinesis Data Firehose uses to load data into the cluster from the Amazon S3 bucket.
object CopyCommand { get; }
Property Value
System.Object
Remarks
Password
The password for the Amazon Redshift user that you specified in the Username
property.
string Password { get; }
Property Value
System.String
Remarks
ProcessingConfiguration
The data processing configuration for the Kinesis Data Firehose delivery stream.
virtual object ProcessingConfiguration { get; }
Property Value
System.Object
Remarks
RetryOptions
The retry behavior in case Kinesis Data Firehose is unable to deliver documents to Amazon Redshift.
virtual object RetryOptions { get; }
Property Value
System.Object
Remarks
RoleArn
The ARN of the AWS Identity and Access Management (IAM) role that grants Kinesis Data Firehose access to your Amazon S3 bucket and AWS KMS (if you enable data encryption).
string RoleArn { get; }
Property Value
System.String
Remarks
For more information, see Grant Kinesis Data Firehose Access to an Amazon Redshift Destination in the Amazon Kinesis Data Firehose Developer Guide .
S3BackupConfiguration
The configuration for backup in Amazon S3.
virtual object S3BackupConfiguration { get; }
Property Value
System.Object
Remarks
S3BackupMode
The Amazon S3 backup mode.
virtual string S3BackupMode { get; }
Property Value
System.String
Remarks
After you create a delivery stream, you can update it to enable Amazon S3 backup if it is disabled. If backup is enabled, you can't update the delivery stream to disable it.
S3Configuration
The S3 bucket where Kinesis Data Firehose first delivers data.
object S3Configuration { get; }
Property Value
System.Object
Remarks
After the data is in the bucket, Kinesis Data Firehose uses the COPY
command to load the data into the Amazon Redshift cluster. For the Amazon S3 bucket's compression format, don't specify SNAPPY
or ZIP
because the Amazon Redshift COPY
command doesn't support them.
Username
The Amazon Redshift user that has permission to access the Amazon Redshift cluster.
string Username { get; }
Property Value
System.String
Remarks
This user must have INSERT
privileges for copying data from the Amazon S3 bucket to the cluster.