gcp.managedkafka.ConnectCluster
Explore with Pulumi AI
Example Usage
Managedkafka Connect Cluster Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const mkcNetwork = new gcp.compute.Network("mkc_network", {
name: "my-network",
autoCreateSubnetworks: false,
});
const mkcSubnet = new gcp.compute.Subnetwork("mkc_subnet", {
name: "my-subnetwork",
ipCidrRange: "10.2.0.0/16",
region: "us-central1",
network: mkcNetwork.id,
});
const mkcAdditionalSubnet = new gcp.compute.Subnetwork("mkc_additional_subnet", {
name: "my-additional-subnetwork-0",
ipCidrRange: "10.3.0.0/16",
region: "us-central1",
network: mkcNetwork.id,
});
const project = gcp.organizations.getProject({});
const gmkCluster = new gcp.managedkafka.Cluster("gmk_cluster", {
clusterId: "my-cluster",
location: "us-central1",
capacityConfig: {
vcpuCount: "3",
memoryBytes: "3221225472",
},
gcpConfig: {
accessConfig: {
networkConfigs: [{
subnet: pulumi.all([project, mkcSubnet.id]).apply(([project, id]) => `projects/${project.projectId}/regions/us-central1/subnetworks/${id}`),
}],
},
},
});
const example = new gcp.managedkafka.ConnectCluster("example", {
connectClusterId: "my-connect-cluster",
kafkaCluster: pulumi.all([project, gmkCluster.clusterId]).apply(([project, clusterId]) => `projects/${project.projectId}/locations/us-central1/clusters/${clusterId}`),
location: "us-central1",
capacityConfig: {
vcpuCount: "12",
memoryBytes: "21474836480",
},
gcpConfig: {
accessConfig: {
networkConfigs: [{
primarySubnet: pulumi.all([project, mkcSubnet.id]).apply(([project, id]) => `projects/${project.projectId}/regions/us-central1/subnetworks/${id}`),
additionalSubnets: [mkcAdditionalSubnet.id],
dnsDomainNames: [pulumi.all([gmkCluster.clusterId, project]).apply(([clusterId, project]) => `${clusterId}.us-central1.managedkafka-staging.${project.projectId}.cloud-staging.goog`)],
}],
},
},
labels: {
key: "value",
},
});
import pulumi
import pulumi_gcp as gcp
mkc_network = gcp.compute.Network("mkc_network",
name="my-network",
auto_create_subnetworks=False)
mkc_subnet = gcp.compute.Subnetwork("mkc_subnet",
name="my-subnetwork",
ip_cidr_range="10.2.0.0/16",
region="us-central1",
network=mkc_network.id)
mkc_additional_subnet = gcp.compute.Subnetwork("mkc_additional_subnet",
name="my-additional-subnetwork-0",
ip_cidr_range="10.3.0.0/16",
region="us-central1",
network=mkc_network.id)
project = gcp.organizations.get_project()
gmk_cluster = gcp.managedkafka.Cluster("gmk_cluster",
cluster_id="my-cluster",
location="us-central1",
capacity_config={
"vcpu_count": "3",
"memory_bytes": "3221225472",
},
gcp_config={
"access_config": {
"network_configs": [{
"subnet": mkc_subnet.id.apply(lambda id: f"projects/{project.project_id}/regions/us-central1/subnetworks/{id}"),
}],
},
})
example = gcp.managedkafka.ConnectCluster("example",
connect_cluster_id="my-connect-cluster",
kafka_cluster=gmk_cluster.cluster_id.apply(lambda cluster_id: f"projects/{project.project_id}/locations/us-central1/clusters/{cluster_id}"),
location="us-central1",
capacity_config={
"vcpu_count": "12",
"memory_bytes": "21474836480",
},
gcp_config={
"access_config": {
"network_configs": [{
"primary_subnet": mkc_subnet.id.apply(lambda id: f"projects/{project.project_id}/regions/us-central1/subnetworks/{id}"),
"additional_subnets": [mkc_additional_subnet.id],
"dns_domain_names": [gmk_cluster.cluster_id.apply(lambda cluster_id: f"{cluster_id}.us-central1.managedkafka-staging.{project.project_id}.cloud-staging.goog")],
}],
},
},
labels={
"key": "value",
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/compute"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/managedkafka"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
mkcNetwork, err := compute.NewNetwork(ctx, "mkc_network", &compute.NetworkArgs{
Name: pulumi.String("my-network"),
AutoCreateSubnetworks: pulumi.Bool(false),
})
if err != nil {
return err
}
mkcSubnet, err := compute.NewSubnetwork(ctx, "mkc_subnet", &compute.SubnetworkArgs{
Name: pulumi.String("my-subnetwork"),
IpCidrRange: pulumi.String("10.2.0.0/16"),
Region: pulumi.String("us-central1"),
Network: mkcNetwork.ID(),
})
if err != nil {
return err
}
mkcAdditionalSubnet, err := compute.NewSubnetwork(ctx, "mkc_additional_subnet", &compute.SubnetworkArgs{
Name: pulumi.String("my-additional-subnetwork-0"),
IpCidrRange: pulumi.String("10.3.0.0/16"),
Region: pulumi.String("us-central1"),
Network: mkcNetwork.ID(),
})
if err != nil {
return err
}
project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
gmkCluster, err := managedkafka.NewCluster(ctx, "gmk_cluster", &managedkafka.ClusterArgs{
ClusterId: pulumi.String("my-cluster"),
Location: pulumi.String("us-central1"),
CapacityConfig: &managedkafka.ClusterCapacityConfigArgs{
VcpuCount: pulumi.String("3"),
MemoryBytes: pulumi.String("3221225472"),
},
GcpConfig: &managedkafka.ClusterGcpConfigArgs{
AccessConfig: &managedkafka.ClusterGcpConfigAccessConfigArgs{
NetworkConfigs: managedkafka.ClusterGcpConfigAccessConfigNetworkConfigArray{
&managedkafka.ClusterGcpConfigAccessConfigNetworkConfigArgs{
Subnet: mkcSubnet.ID().ApplyT(func(id string) (string, error) {
return fmt.Sprintf("projects/%v/regions/us-central1/subnetworks/%v", project.ProjectId, id), nil
}).(pulumi.StringOutput),
},
},
},
},
})
if err != nil {
return err
}
_, err = managedkafka.NewConnectCluster(ctx, "example", &managedkafka.ConnectClusterArgs{
ConnectClusterId: pulumi.String("my-connect-cluster"),
KafkaCluster: gmkCluster.ClusterId.ApplyT(func(clusterId string) (string, error) {
return fmt.Sprintf("projects/%v/locations/us-central1/clusters/%v", project.ProjectId, clusterId), nil
}).(pulumi.StringOutput),
Location: pulumi.String("us-central1"),
CapacityConfig: &managedkafka.ConnectClusterCapacityConfigArgs{
VcpuCount: pulumi.String("12"),
MemoryBytes: pulumi.String("21474836480"),
},
GcpConfig: &managedkafka.ConnectClusterGcpConfigArgs{
AccessConfig: &managedkafka.ConnectClusterGcpConfigAccessConfigArgs{
NetworkConfigs: managedkafka.ConnectClusterGcpConfigAccessConfigNetworkConfigArray{
&managedkafka.ConnectClusterGcpConfigAccessConfigNetworkConfigArgs{
PrimarySubnet: mkcSubnet.ID().ApplyT(func(id string) (string, error) {
return fmt.Sprintf("projects/%v/regions/us-central1/subnetworks/%v", project.ProjectId, id), nil
}).(pulumi.StringOutput),
AdditionalSubnets: pulumi.StringArray{
mkcAdditionalSubnet.ID(),
},
DnsDomainNames: pulumi.StringArray{
gmkCluster.ClusterId.ApplyT(func(clusterId string) (string, error) {
return fmt.Sprintf("%v.us-central1.managedkafka-staging.%v.cloud-staging.goog", clusterId, project.ProjectId), nil
}).(pulumi.StringOutput),
},
},
},
},
},
Labels: pulumi.StringMap{
"key": pulumi.String("value"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var mkcNetwork = new Gcp.Compute.Network("mkc_network", new()
{
Name = "my-network",
AutoCreateSubnetworks = false,
});
var mkcSubnet = new Gcp.Compute.Subnetwork("mkc_subnet", new()
{
Name = "my-subnetwork",
IpCidrRange = "10.2.0.0/16",
Region = "us-central1",
Network = mkcNetwork.Id,
});
var mkcAdditionalSubnet = new Gcp.Compute.Subnetwork("mkc_additional_subnet", new()
{
Name = "my-additional-subnetwork-0",
IpCidrRange = "10.3.0.0/16",
Region = "us-central1",
Network = mkcNetwork.Id,
});
var project = Gcp.Organizations.GetProject.Invoke();
var gmkCluster = new Gcp.ManagedKafka.Cluster("gmk_cluster", new()
{
ClusterId = "my-cluster",
Location = "us-central1",
CapacityConfig = new Gcp.ManagedKafka.Inputs.ClusterCapacityConfigArgs
{
VcpuCount = "3",
MemoryBytes = "3221225472",
},
GcpConfig = new Gcp.ManagedKafka.Inputs.ClusterGcpConfigArgs
{
AccessConfig = new Gcp.ManagedKafka.Inputs.ClusterGcpConfigAccessConfigArgs
{
NetworkConfigs = new[]
{
new Gcp.ManagedKafka.Inputs.ClusterGcpConfigAccessConfigNetworkConfigArgs
{
Subnet = Output.Tuple(project, mkcSubnet.Id).Apply(values =>
{
var project = values.Item1;
var id = values.Item2;
return $"projects/{project.Apply(getProjectResult => getProjectResult.ProjectId)}/regions/us-central1/subnetworks/{id}";
}),
},
},
},
},
});
var example = new Gcp.ManagedKafka.ConnectCluster("example", new()
{
ConnectClusterId = "my-connect-cluster",
KafkaCluster = Output.Tuple(project, gmkCluster.ClusterId).Apply(values =>
{
var project = values.Item1;
var clusterId = values.Item2;
return $"projects/{project.Apply(getProjectResult => getProjectResult.ProjectId)}/locations/us-central1/clusters/{clusterId}";
}),
Location = "us-central1",
CapacityConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterCapacityConfigArgs
{
VcpuCount = "12",
MemoryBytes = "21474836480",
},
GcpConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigArgs
{
AccessConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigAccessConfigArgs
{
NetworkConfigs = new[]
{
new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigAccessConfigNetworkConfigArgs
{
PrimarySubnet = Output.Tuple(project, mkcSubnet.Id).Apply(values =>
{
var project = values.Item1;
var id = values.Item2;
return $"projects/{project.Apply(getProjectResult => getProjectResult.ProjectId)}/regions/us-central1/subnetworks/{id}";
}),
AdditionalSubnets = new[]
{
mkcAdditionalSubnet.Id,
},
DnsDomainNames = new[]
{
Output.Tuple(gmkCluster.ClusterId, project).Apply(values =>
{
var clusterId = values.Item1;
var project = values.Item2;
return $"{clusterId}.us-central1.managedkafka-staging.{project.Apply(getProjectResult => getProjectResult.ProjectId)}.cloud-staging.goog";
}),
},
},
},
},
},
Labels =
{
{ "key", "value" },
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.compute.Network;
import com.pulumi.gcp.compute.NetworkArgs;
import com.pulumi.gcp.compute.Subnetwork;
import com.pulumi.gcp.compute.SubnetworkArgs;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.managedkafka.Cluster;
import com.pulumi.gcp.managedkafka.ClusterArgs;
import com.pulumi.gcp.managedkafka.inputs.ClusterCapacityConfigArgs;
import com.pulumi.gcp.managedkafka.inputs.ClusterGcpConfigArgs;
import com.pulumi.gcp.managedkafka.inputs.ClusterGcpConfigAccessConfigArgs;
import com.pulumi.gcp.managedkafka.ConnectCluster;
import com.pulumi.gcp.managedkafka.ConnectClusterArgs;
import com.pulumi.gcp.managedkafka.inputs.ConnectClusterCapacityConfigArgs;
import com.pulumi.gcp.managedkafka.inputs.ConnectClusterGcpConfigArgs;
import com.pulumi.gcp.managedkafka.inputs.ConnectClusterGcpConfigAccessConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var mkcNetwork = new Network("mkcNetwork", NetworkArgs.builder()
.name("my-network")
.autoCreateSubnetworks(false)
.build());
var mkcSubnet = new Subnetwork("mkcSubnet", SubnetworkArgs.builder()
.name("my-subnetwork")
.ipCidrRange("10.2.0.0/16")
.region("us-central1")
.network(mkcNetwork.id())
.build());
var mkcAdditionalSubnet = new Subnetwork("mkcAdditionalSubnet", SubnetworkArgs.builder()
.name("my-additional-subnetwork-0")
.ipCidrRange("10.3.0.0/16")
.region("us-central1")
.network(mkcNetwork.id())
.build());
final var project = OrganizationsFunctions.getProject();
var gmkCluster = new Cluster("gmkCluster", ClusterArgs.builder()
.clusterId("my-cluster")
.location("us-central1")
.capacityConfig(ClusterCapacityConfigArgs.builder()
.vcpuCount(3)
.memoryBytes(3221225472)
.build())
.gcpConfig(ClusterGcpConfigArgs.builder()
.accessConfig(ClusterGcpConfigAccessConfigArgs.builder()
.networkConfigs(ClusterGcpConfigAccessConfigNetworkConfigArgs.builder()
.subnet(mkcSubnet.id().applyValue(id -> String.format("projects/%s/regions/us-central1/subnetworks/%s", project.applyValue(getProjectResult -> getProjectResult.projectId()),id)))
.build())
.build())
.build())
.build());
var example = new ConnectCluster("example", ConnectClusterArgs.builder()
.connectClusterId("my-connect-cluster")
.kafkaCluster(gmkCluster.clusterId().applyValue(clusterId -> String.format("projects/%s/locations/us-central1/clusters/%s", project.applyValue(getProjectResult -> getProjectResult.projectId()),clusterId)))
.location("us-central1")
.capacityConfig(ConnectClusterCapacityConfigArgs.builder()
.vcpuCount(12)
.memoryBytes(21474836480)
.build())
.gcpConfig(ConnectClusterGcpConfigArgs.builder()
.accessConfig(ConnectClusterGcpConfigAccessConfigArgs.builder()
.networkConfigs(ConnectClusterGcpConfigAccessConfigNetworkConfigArgs.builder()
.primarySubnet(mkcSubnet.id().applyValue(id -> String.format("projects/%s/regions/us-central1/subnetworks/%s", project.applyValue(getProjectResult -> getProjectResult.projectId()),id)))
.additionalSubnets(mkcAdditionalSubnet.id())
.dnsDomainNames(gmkCluster.clusterId().applyValue(clusterId -> String.format("%s.us-central1.managedkafka-staging.%s.cloud-staging.goog", clusterId,project.applyValue(getProjectResult -> getProjectResult.projectId()))))
.build())
.build())
.build())
.labels(Map.of("key", "value"))
.build());
}
}
resources:
mkcNetwork:
type: gcp:compute:Network
name: mkc_network
properties:
name: my-network
autoCreateSubnetworks: false
mkcSubnet:
type: gcp:compute:Subnetwork
name: mkc_subnet
properties:
name: my-subnetwork
ipCidrRange: 10.2.0.0/16
region: us-central1
network: ${mkcNetwork.id}
mkcAdditionalSubnet:
type: gcp:compute:Subnetwork
name: mkc_additional_subnet
properties:
name: my-additional-subnetwork-0
ipCidrRange: 10.3.0.0/16
region: us-central1
network: ${mkcNetwork.id}
gmkCluster:
type: gcp:managedkafka:Cluster
name: gmk_cluster
properties:
clusterId: my-cluster
location: us-central1
capacityConfig:
vcpuCount: 3
memoryBytes: 3.221225472e+09
gcpConfig:
accessConfig:
networkConfigs:
- subnet: projects/${project.projectId}/regions/us-central1/subnetworks/${mkcSubnet.id}
example:
type: gcp:managedkafka:ConnectCluster
properties:
connectClusterId: my-connect-cluster
kafkaCluster: projects/${project.projectId}/locations/us-central1/clusters/${gmkCluster.clusterId}
location: us-central1
capacityConfig:
vcpuCount: 12
memoryBytes: 2.147483648e+10
gcpConfig:
accessConfig:
networkConfigs:
- primarySubnet: projects/${project.projectId}/regions/us-central1/subnetworks/${mkcSubnet.id}
additionalSubnets:
- ${mkcAdditionalSubnet.id}
dnsDomainNames:
- ${gmkCluster.clusterId}.us-central1.managedkafka-staging.${project.projectId}.cloud-staging.goog
labels:
key: value
variables:
project:
fn::invoke:
function: gcp:organizations:getProject
arguments: {}
Create ConnectCluster Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new ConnectCluster(name: string, args: ConnectClusterArgs, opts?: CustomResourceOptions);
@overload
def ConnectCluster(resource_name: str,
args: ConnectClusterArgs,
opts: Optional[ResourceOptions] = None)
@overload
def ConnectCluster(resource_name: str,
opts: Optional[ResourceOptions] = None,
capacity_config: Optional[ConnectClusterCapacityConfigArgs] = None,
connect_cluster_id: Optional[str] = None,
gcp_config: Optional[ConnectClusterGcpConfigArgs] = None,
kafka_cluster: Optional[str] = None,
location: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
project: Optional[str] = None)
func NewConnectCluster(ctx *Context, name string, args ConnectClusterArgs, opts ...ResourceOption) (*ConnectCluster, error)
public ConnectCluster(string name, ConnectClusterArgs args, CustomResourceOptions? opts = null)
public ConnectCluster(String name, ConnectClusterArgs args)
public ConnectCluster(String name, ConnectClusterArgs args, CustomResourceOptions options)
type: gcp:managedkafka:ConnectCluster
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args ConnectClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args ConnectClusterArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args ConnectClusterArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args ConnectClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args ConnectClusterArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var connectClusterResource = new Gcp.ManagedKafka.ConnectCluster("connectClusterResource", new()
{
CapacityConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterCapacityConfigArgs
{
MemoryBytes = "string",
VcpuCount = "string",
},
ConnectClusterId = "string",
GcpConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigArgs
{
AccessConfig = new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigAccessConfigArgs
{
NetworkConfigs = new[]
{
new Gcp.ManagedKafka.Inputs.ConnectClusterGcpConfigAccessConfigNetworkConfigArgs
{
PrimarySubnet = "string",
AdditionalSubnets = new[]
{
"string",
},
DnsDomainNames = new[]
{
"string",
},
},
},
},
},
KafkaCluster = "string",
Location = "string",
Labels =
{
{ "string", "string" },
},
Project = "string",
});
example, err := managedkafka.NewConnectCluster(ctx, "connectClusterResource", &managedkafka.ConnectClusterArgs{
CapacityConfig: &managedkafka.ConnectClusterCapacityConfigArgs{
MemoryBytes: pulumi.String("string"),
VcpuCount: pulumi.String("string"),
},
ConnectClusterId: pulumi.String("string"),
GcpConfig: &managedkafka.ConnectClusterGcpConfigArgs{
AccessConfig: &managedkafka.ConnectClusterGcpConfigAccessConfigArgs{
NetworkConfigs: managedkafka.ConnectClusterGcpConfigAccessConfigNetworkConfigArray{
&managedkafka.ConnectClusterGcpConfigAccessConfigNetworkConfigArgs{
PrimarySubnet: pulumi.String("string"),
AdditionalSubnets: pulumi.StringArray{
pulumi.String("string"),
},
DnsDomainNames: pulumi.StringArray{
pulumi.String("string"),
},
},
},
},
},
KafkaCluster: pulumi.String("string"),
Location: pulumi.String("string"),
Labels: pulumi.StringMap{
"string": pulumi.String("string"),
},
Project: pulumi.String("string"),
})
var connectClusterResource = new ConnectCluster("connectClusterResource", ConnectClusterArgs.builder()
.capacityConfig(ConnectClusterCapacityConfigArgs.builder()
.memoryBytes("string")
.vcpuCount("string")
.build())
.connectClusterId("string")
.gcpConfig(ConnectClusterGcpConfigArgs.builder()
.accessConfig(ConnectClusterGcpConfigAccessConfigArgs.builder()
.networkConfigs(ConnectClusterGcpConfigAccessConfigNetworkConfigArgs.builder()
.primarySubnet("string")
.additionalSubnets("string")
.dnsDomainNames("string")
.build())
.build())
.build())
.kafkaCluster("string")
.location("string")
.labels(Map.of("string", "string"))
.project("string")
.build());
connect_cluster_resource = gcp.managedkafka.ConnectCluster("connectClusterResource",
capacity_config={
"memory_bytes": "string",
"vcpu_count": "string",
},
connect_cluster_id="string",
gcp_config={
"access_config": {
"network_configs": [{
"primary_subnet": "string",
"additional_subnets": ["string"],
"dns_domain_names": ["string"],
}],
},
},
kafka_cluster="string",
location="string",
labels={
"string": "string",
},
project="string")
const connectClusterResource = new gcp.managedkafka.ConnectCluster("connectClusterResource", {
capacityConfig: {
memoryBytes: "string",
vcpuCount: "string",
},
connectClusterId: "string",
gcpConfig: {
accessConfig: {
networkConfigs: [{
primarySubnet: "string",
additionalSubnets: ["string"],
dnsDomainNames: ["string"],
}],
},
},
kafkaCluster: "string",
location: "string",
labels: {
string: "string",
},
project: "string",
});
type: gcp:managedkafka:ConnectCluster
properties:
capacityConfig:
memoryBytes: string
vcpuCount: string
connectClusterId: string
gcpConfig:
accessConfig:
networkConfigs:
- additionalSubnets:
- string
dnsDomainNames:
- string
primarySubnet: string
kafkaCluster: string
labels:
string: string
location: string
project: string
ConnectCluster Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The ConnectCluster resource accepts the following input properties:
- Capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- Connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - Gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- Kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - Location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- Labels Dictionary<string, string>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Project string
- Capacity
Config ConnectCluster Capacity Config Args - A capacity configuration of a Kafka cluster. Structure is documented below.
- Connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - Gcp
Config ConnectCluster Gcp Config Args - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- Kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - Location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- Labels map[string]string
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Project string
- capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster StringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster String - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - location String
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- labels Map<String,String>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project String
- capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- labels {[key: string]: string}
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project string
- capacity_
config ConnectCluster Capacity Config Args - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect_
cluster_ strid - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - gcp_
config ConnectCluster Gcp Config Args - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka_
cluster str - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - location str
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- labels Mapping[str, str]
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project str
- capacity
Config Property Map - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster StringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - gcp
Config Property Map - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster String - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - location String
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- labels Map<String>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project String
Outputs
All input properties are implicitly available as output properties. Additionally, the ConnectCluster resource produces the following output properties:
- Create
Time string - The time when the cluster was created.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - Update
Time string - The time when the cluster was last updated.
- Create
Time string - The time when the cluster was created.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - Update
Time string - The time when the cluster was last updated.
- create
Time String - The time when the cluster was created.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- name String
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time String - The time when the cluster was last updated.
- create
Time string - The time when the cluster was created.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id string
- The provider-assigned unique ID for this managed resource.
- name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- state string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time string - The time when the cluster was last updated.
- create_
time str - The time when the cluster was created.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id str
- The provider-assigned unique ID for this managed resource.
- name str
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- state str
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update_
time str - The time when the cluster was last updated.
- create
Time String - The time when the cluster was created.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- name String
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time String - The time when the cluster was last updated.
Look up Existing ConnectCluster Resource
Get an existing ConnectCluster resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: ConnectClusterState, opts?: CustomResourceOptions): ConnectCluster
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
capacity_config: Optional[ConnectClusterCapacityConfigArgs] = None,
connect_cluster_id: Optional[str] = None,
create_time: Optional[str] = None,
effective_labels: Optional[Mapping[str, str]] = None,
gcp_config: Optional[ConnectClusterGcpConfigArgs] = None,
kafka_cluster: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
pulumi_labels: Optional[Mapping[str, str]] = None,
state: Optional[str] = None,
update_time: Optional[str] = None) -> ConnectCluster
func GetConnectCluster(ctx *Context, name string, id IDInput, state *ConnectClusterState, opts ...ResourceOption) (*ConnectCluster, error)
public static ConnectCluster Get(string name, Input<string> id, ConnectClusterState? state, CustomResourceOptions? opts = null)
public static ConnectCluster get(String name, Output<String> id, ConnectClusterState state, CustomResourceOptions options)
resources: _: type: gcp:managedkafka:ConnectCluster get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- Connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - Create
Time string - The time when the cluster was created.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- Kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - Labels Dictionary<string, string>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- Name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - Project string
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - Update
Time string - The time when the cluster was last updated.
- Capacity
Config ConnectCluster Capacity Config Args - A capacity configuration of a Kafka cluster. Structure is documented below.
- Connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - Create
Time string - The time when the cluster was created.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Gcp
Config ConnectCluster Gcp Config Args - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- Kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - Labels map[string]string
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- Name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - Project string
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - Update
Time string - The time when the cluster was last updated.
- capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster StringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - create
Time String - The time when the cluster was created.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster String - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - labels Map<String,String>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location String
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- name String
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - project String
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time String - The time when the cluster was last updated.
- capacity
Config ConnectCluster Capacity Config - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster stringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - create
Time string - The time when the cluster was created.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- gcp
Config ConnectCluster Gcp Config - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster string - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - labels {[key: string]: string}
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location string
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- name string
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - project string
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- state string
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time string - The time when the cluster was last updated.
- capacity_
config ConnectCluster Capacity Config Args - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect_
cluster_ strid - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - create_
time str - The time when the cluster was created.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- gcp_
config ConnectCluster Gcp Config Args - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka_
cluster str - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - labels Mapping[str, str]
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location str
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- name str
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - project str
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- state str
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update_
time str - The time when the cluster was last updated.
- capacity
Config Property Map - A capacity configuration of a Kafka cluster. Structure is documented below.
- connect
Cluster StringId - The ID to use for the Connect Cluster, which will become the final component of the connect cluster's name. This value is structured like:
my-connect-cluster-id
. - create
Time String - The time when the cluster was created.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- gcp
Config Property Map - Configuration properties for a Kafka Connect cluster deployed to Google Cloud Platform. Structure is documented below.
- kafka
Cluster String - The name of the Kafka cluster this Kafka Connect cluster is attached to. Structured like:
projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID
. - labels Map<String>
- List of label KEY=VALUE pairs to add. Keys must start with a lowercase character and contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Values must contain only hyphens (-), underscores ( ), lowercase characters, and numbers. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location String
- ID of the location of the Kafka Connect resource. See https://cloud.google.com/managed-kafka/docs/locations for a list of supported locations.
- name String
- The name of the connect cluster. Structured like:
projects/PROJECT_ID/locations/LOCATION/connectClusters/CONNECT_CLUSTER_ID
. - project String
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The current state of the connect cluster. Possible values:
STATE_UNSPECIFIED
,CREATING
,ACTIVE
,DELETING
. - update
Time String - The time when the cluster was last updated.
Supporting Types
ConnectClusterCapacityConfig, ConnectClusterCapacityConfigArgs
- Memory
Bytes string - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- Vcpu
Count string - The number of vCPUs to provision for the cluster. The minimum is 3.
- Memory
Bytes string - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- Vcpu
Count string - The number of vCPUs to provision for the cluster. The minimum is 3.
- memory
Bytes String - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- vcpu
Count String - The number of vCPUs to provision for the cluster. The minimum is 3.
- memory
Bytes string - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- vcpu
Count string - The number of vCPUs to provision for the cluster. The minimum is 3.
- memory_
bytes str - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- vcpu_
count str - The number of vCPUs to provision for the cluster. The minimum is 3.
- memory
Bytes String - The memory to provision for the cluster in bytes. The CPU:memory ratio (vCPU:GiB) must be between 1:1 and 1:8. Minimum: 3221225472 (3 GiB).
- vcpu
Count String - The number of vCPUs to provision for the cluster. The minimum is 3.
ConnectClusterGcpConfig, ConnectClusterGcpConfigArgs
- Access
Config ConnectCluster Gcp Config Access Config - The configuration of access to the Kafka Connect cluster. Structure is documented below.
- Access
Config ConnectCluster Gcp Config Access Config - The configuration of access to the Kafka Connect cluster. Structure is documented below.
- access
Config ConnectCluster Gcp Config Access Config - The configuration of access to the Kafka Connect cluster. Structure is documented below.
- access
Config ConnectCluster Gcp Config Access Config - The configuration of access to the Kafka Connect cluster. Structure is documented below.
- access_
config ConnectCluster Gcp Config Access Config - The configuration of access to the Kafka Connect cluster. Structure is documented below.
- access
Config Property Map - The configuration of access to the Kafka Connect cluster. Structure is documented below.
ConnectClusterGcpConfigAccessConfig, ConnectClusterGcpConfigAccessConfigArgs
- Network
Configs List<ConnectCluster Gcp Config Access Config Network Config> - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
- Network
Configs []ConnectCluster Gcp Config Access Config Network Config - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
- network
Configs List<ConnectCluster Gcp Config Access Config Network Config> - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
- network
Configs ConnectCluster Gcp Config Access Config Network Config[] - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
- network_
configs Sequence[ConnectCluster Gcp Config Access Config Network Config] - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
- network
Configs List<Property Map> - Virtual Private Cloud (VPC) subnets where IP addresses for the Kafka Connect cluster are allocated. To make the connect cluster available in a VPC, you must specify at least one subnet per network. You must specify between 1 and 10 subnets. Additional subnets may be specified with additional
network_configs
blocks. Structure is documented below.
ConnectClusterGcpConfigAccessConfigNetworkConfig, ConnectClusterGcpConfigAccessConfigNetworkConfigArgs
- Primary
Subnet string - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- Additional
Subnets List<string> - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- Dns
Domain List<string>Names - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
- Primary
Subnet string - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- Additional
Subnets []string - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- Dns
Domain []stringNames - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
- primary
Subnet String - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- additional
Subnets List<String> - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- dns
Domain List<String>Names - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
- primary
Subnet string - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- additional
Subnets string[] - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- dns
Domain string[]Names - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
- primary_
subnet str - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- additional_
subnets Sequence[str] - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- dns_
domain_ Sequence[str]names - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
- primary
Subnet String - VPC subnet to make available to the Kafka Connect cluster. Structured like: projects/{project}/regions/{region}/subnetworks/{subnet_id}. It is used to create a Private Service Connect (PSC) interface for the Kafka Connect workers. It must be located in the same region as the Kafka Connect cluster. The CIDR range of the subnet must be within the IPv4 address ranges for private networks, as specified in RFC 1918. The primary subnet CIDR range must have a minimum size of /22 (1024 addresses).
- additional
Subnets List<String> - Additional subnets may be specified. They may be in another region, but must be in the same VPC network. The Connect workers can communicate with network endpoints in either the primary or additional subnets.
- dns
Domain List<String>Names - Additional DNS domain names from the subnet's network to be made visible to the Connect Cluster. When using MirrorMaker2, it's necessary to add the bootstrap address's dns domain name of the target cluster to make it visible to the connector. For example: my-kafka-cluster.us-central1.managedkafka.my-project.cloud.goog
Import
ConnectCluster can be imported using any of these accepted formats:
projects/{{project}}/locations/{{location}}/connectClusters/{{connect_cluster_id}}
{{project}}/{{location}}/{{connect_cluster_id}}
{{location}}/{{connect_cluster_id}}
When using the pulumi import
command, ConnectCluster can be imported using one of the formats above. For example:
$ pulumi import gcp:managedkafka/connectCluster:ConnectCluster default projects/{{project}}/locations/{{location}}/connectClusters/{{connect_cluster_id}}
$ pulumi import gcp:managedkafka/connectCluster:ConnectCluster default {{project}}/{{location}}/{{connect_cluster_id}}
$ pulumi import gcp:managedkafka/connectCluster:ConnectCluster default {{location}}/{{connect_cluster_id}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
google-beta
Terraform Provider.