Class CfnPipe.PipeSourceParametersProperty
The parameters required to set up a source for your pipe.
Inheritance
Implements
Namespace: Amazon.CDK.AWS.Pipes
Assembly: Amazon.CDK.Lib.dll
Syntax (csharp)
public class PipeSourceParametersProperty : Object, CfnPipe.IPipeSourceParametersProperty
Syntax (vb)
Public Class PipeSourceParametersProperty
Inherits Object
Implements CfnPipe.IPipeSourceParametersProperty
Remarks
ExampleMetadata: fixture=_generated
Examples
// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
using Amazon.CDK.AWS.Pipes;
var pipeSourceParametersProperty = new PipeSourceParametersProperty {
ActiveMqBrokerParameters = new PipeSourceActiveMQBrokerParametersProperty {
Credentials = new MQBrokerAccessCredentialsProperty {
BasicAuth = "basicAuth"
},
QueueName = "queueName",
// the properties below are optional
BatchSize = 123,
MaximumBatchingWindowInSeconds = 123
},
DynamoDbStreamParameters = new PipeSourceDynamoDBStreamParametersProperty {
StartingPosition = "startingPosition",
// the properties below are optional
BatchSize = 123,
DeadLetterConfig = new DeadLetterConfigProperty {
Arn = "arn"
},
MaximumBatchingWindowInSeconds = 123,
MaximumRecordAgeInSeconds = 123,
MaximumRetryAttempts = 123,
OnPartialBatchItemFailure = "onPartialBatchItemFailure",
ParallelizationFactor = 123
},
FilterCriteria = new FilterCriteriaProperty {
Filters = new [] { new FilterProperty {
Pattern = "pattern"
} }
},
KinesisStreamParameters = new PipeSourceKinesisStreamParametersProperty {
StartingPosition = "startingPosition",
// the properties below are optional
BatchSize = 123,
DeadLetterConfig = new DeadLetterConfigProperty {
Arn = "arn"
},
MaximumBatchingWindowInSeconds = 123,
MaximumRecordAgeInSeconds = 123,
MaximumRetryAttempts = 123,
OnPartialBatchItemFailure = "onPartialBatchItemFailure",
ParallelizationFactor = 123,
StartingPositionTimestamp = "startingPositionTimestamp"
},
ManagedStreamingKafkaParameters = new PipeSourceManagedStreamingKafkaParametersProperty {
TopicName = "topicName",
// the properties below are optional
BatchSize = 123,
ConsumerGroupId = "consumerGroupId",
Credentials = new MSKAccessCredentialsProperty {
ClientCertificateTlsAuth = "clientCertificateTlsAuth",
SaslScram512Auth = "saslScram512Auth"
},
MaximumBatchingWindowInSeconds = 123,
StartingPosition = "startingPosition"
},
RabbitMqBrokerParameters = new PipeSourceRabbitMQBrokerParametersProperty {
Credentials = new MQBrokerAccessCredentialsProperty {
BasicAuth = "basicAuth"
},
QueueName = "queueName",
// the properties below are optional
BatchSize = 123,
MaximumBatchingWindowInSeconds = 123,
VirtualHost = "virtualHost"
},
SelfManagedKafkaParameters = new PipeSourceSelfManagedKafkaParametersProperty {
TopicName = "topicName",
// the properties below are optional
AdditionalBootstrapServers = new [] { "additionalBootstrapServers" },
BatchSize = 123,
ConsumerGroupId = "consumerGroupId",
Credentials = new SelfManagedKafkaAccessConfigurationCredentialsProperty {
BasicAuth = "basicAuth",
ClientCertificateTlsAuth = "clientCertificateTlsAuth",
SaslScram256Auth = "saslScram256Auth",
SaslScram512Auth = "saslScram512Auth"
},
MaximumBatchingWindowInSeconds = 123,
ServerRootCaCertificate = "serverRootCaCertificate",
StartingPosition = "startingPosition",
Vpc = new SelfManagedKafkaAccessConfigurationVpcProperty {
SecurityGroup = new [] { "securityGroup" },
Subnets = new [] { "subnets" }
}
},
SqsQueueParameters = new PipeSourceSqsQueueParametersProperty {
BatchSize = 123,
MaximumBatchingWindowInSeconds = 123
}
};
Synopsis
Constructors
PipeSourceParametersProperty() |
Properties
ActiveMqBrokerParameters | The parameters for using an Active MQ broker as a source. |
DynamoDbStreamParameters | The parameters for using a DynamoDB stream as a source. |
FilterCriteria | The collection of event patterns used to filter events. |
KinesisStreamParameters | The parameters for using a Kinesis stream as a source. |
ManagedStreamingKafkaParameters | The parameters for using an MSK stream as a source. |
RabbitMqBrokerParameters | The parameters for using a Rabbit MQ broker as a source. |
SelfManagedKafkaParameters | The parameters for using a stream as a source. |
SqsQueueParameters | The parameters for using a Amazon SQS stream as a source. |
Constructors
PipeSourceParametersProperty()
public PipeSourceParametersProperty()
Properties
ActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
public object ActiveMqBrokerParameters { get; set; }
Property Value
System.Object
Remarks
DynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
public object DynamoDbStreamParameters { get; set; }
Property Value
System.Object
Remarks
FilterCriteria
The collection of event patterns used to filter events.
public object FilterCriteria { get; set; }
Property Value
System.Object
Remarks
To remove a filter, specify a FilterCriteria
object with an empty array of Filter
objects.
For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
KinesisStreamParameters
The parameters for using a Kinesis stream as a source.
public object KinesisStreamParameters { get; set; }
Property Value
System.Object
Remarks
ManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
public object ManagedStreamingKafkaParameters { get; set; }
Property Value
System.Object
Remarks
RabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
public object RabbitMqBrokerParameters { get; set; }
Property Value
System.Object
Remarks
SelfManagedKafkaParameters
The parameters for using a stream as a source.
public object SelfManagedKafkaParameters { get; set; }
Property Value
System.Object
Remarks
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
SqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
public object SqsQueueParameters { get; set; }
Property Value
System.Object