Sources
PostgreSQL
PostgreSQL
Copy
Ask AI
resource "streamkap_source_postgresql" "example" {
name = "my-postgres-source"
database_hostname = var.hostname
database_port = 5432
database_user = var.username
database_password = var.password
database_dbname = "mydb"
database_sslmode = "require"
schema_include_list = "public"
table_include_list = "public.orders,public.customers"
signal_data_collection_schema_or_database = "streamkap"
slot_name = "streamkap_slot"
publication_name = "streamkap_pub"
}
MySQL
MySQL
Copy
Ask AI
resource "streamkap_source_mysql" "example" {
name = "my-mysql-source"
database_hostname = var.hostname
database_port = 3306
database_user = var.username
database_password = var.password
database_include_list = "mydb"
table_include_list = "mydb.orders,mydb.customers"
signal_data_collection_schema_or_database = "mydb"
}
MongoDB
MongoDB
Copy
Ask AI
resource "streamkap_source_mongodb" "example" {
name = "my-mongodb-source"
mongodb_connection_string = var.connection_string
database_include_list = "mydb"
collection_include_list = "mydb.users,mydb.orders"
signal_data_collection_schema_or_database = "mydb"
}
DynamoDB
DynamoDB
Copy
Ask AI
resource "streamkap_source_dynamodb" "example" {
name = "my-dynamodb-source"
aws_region = "us-west-2"
aws_access_key_id = var.aws_access_key
aws_secret_key = var.aws_secret_key
table_include_list_user_defined = "Users,Orders"
s3_export_bucket_name = "my-export-bucket"
}
SQL Server
SQL Server
Copy
Ask AI
resource "streamkap_source_sqlserver" "example" {
name = "my-sqlserver-source"
database_hostname = var.hostname
database_port = 1433
database_user = var.username
database_password = var.password
database_dbname = "mydb"
schema_include_list = "dbo"
table_include_list = "dbo.Orders,dbo.Customers"
signal_data_collection_schema_or_database = "streamkap"
}
Kafka Direct
Kafka Direct
Copy
Ask AI
resource "streamkap_source_kafkadirect" "example" {
name = "my-kafka-source"
topic_prefix = "external_"
kafka_format = "json"
schemas_enable = true
topic_include_list = "external_topic1,external_topic2"
}
Destinations
Snowflake
Snowflake
Copy
Ask AI
resource "streamkap_destination_snowflake" "example" {
name = "my-snowflake-dest"
snowflake_url_name = var.snowflake_url
snowflake_user_name = var.snowflake_user
snowflake_private_key = var.snowflake_private_key
snowflake_database_name = "ANALYTICS"
snowflake_schema_name = "PUBLIC"
snowflake_role_name = "STREAMKAP_ROLE"
sfwarehouse = "COMPUTE_WH"
ingestion_mode = "upsert"
hard_delete = true
}
Databricks
Databricks
Copy
Ask AI
resource "streamkap_destination_databricks" "example" {
name = "my-databricks-dest"
connection_url = var.databricks_jdbc_url
databricks_token = var.databricks_token
databricks_catalog = "hive_metastore"
ingestion_mode = "append"
hard_delete = true
}
ClickHouse
ClickHouse
Copy
Ask AI
resource "streamkap_destination_clickhouse" "example" {
name = "my-clickhouse-dest"
hostname = var.clickhouse_hostname
port = 8443
database = "analytics"
connection_username = var.clickhouse_user
connection_password = var.clickhouse_password
ssl = true
ingestion_mode = "append"
}
PostgreSQL
PostgreSQL
Copy
Ask AI
resource "streamkap_destination_postgresql" "example" {
name = "my-postgres-dest"
database_hostname = var.hostname
database_port = 5432
database_dbname = "analytics"
database_username = var.username
database_password = var.password
database_schema_name = "public"
insert_mode = "insert"
schema_evolution = "basic"
}
S3
S3
Copy
Ask AI
resource "streamkap_destination_s3" "example" {
name = "my-s3-dest"
aws_access_key = var.aws_access_key
aws_secret_key = var.aws_secret_key
aws_region = "us-west-2"
bucket_name = "my-data-lake"
format = "JSON Array"
output_fields = ["value", "key"]
}
Iceberg
Iceberg
Copy
Ask AI
resource "streamkap_destination_iceberg" "example" {
name = "my-iceberg-dest"
catalog_type = "rest"
catalog_name = "my_catalog"
catalog_uri = "https://iceberg-catalog.example.com"
aws_access_key = var.aws_access_key
aws_secret_key = var.aws_secret_key
aws_region = "us-west-2"
bucket_path = "s3://my-bucket/warehouse"
schema = "analytics"
}
Kafka
Kafka
Copy
Ask AI
resource "streamkap_destination_kafka" "example" {
name = "my-kafka-dest"
kafka_sink_bootstrap = "kafka-broker.example.com:9092"
destination_format = "avro"
schema_registry_url = "https://schema-registry.example.com"
}
Pipelines & Topics
Pipeline
Pipeline
Copy
Ask AI
resource "streamkap_pipeline" "example" {
name = "orders-to-warehouse"
snapshot_new_tables = true
source = {
id = streamkap_source_postgresql.orders.id
name = streamkap_source_postgresql.orders.name
connector = streamkap_source_postgresql.orders.connector
topics = ["public.orders", "public.customers"]
}
destination = {
id = streamkap_destination_snowflake.warehouse.id
name = streamkap_destination_snowflake.warehouse.name
connector = streamkap_destination_snowflake.warehouse.connector
}
}
Topic
Topic
Copy
Ask AI
resource "streamkap_topic" "high_volume" {
topic_id = "source_abc123.public.orders"
partition_count = 25
}
Topic partition counts can only be increased, not decreased.
Other
Tag
Tag
Copy
Ask AI
data "streamkap_tag" "production" {
id = "670e5bab0d119c0d1f8cda9d"
}
# Use in pipeline
resource "streamkap_pipeline" "example" {
# ...
tags = [data.streamkap_tag.production.id]
}
Transform
Transform
Copy
Ask AI
data "streamkap_transform" "enrich" {
id = "63975020676fa8f369d55001"
}
# Use in pipeline
resource "streamkap_pipeline" "example" {
# ...
transforms = [{
id = data.streamkap_transform.enrich.id
topics = ["public.orders"]
}]
}