1. Packages
  2. Ionoscloud
  3. API Docs
  4. kafka
  5. getTopic
IonosCloud v0.2.3 published on Tuesday, May 13, 2025 by ionos-cloud

ionoscloud.kafka.getTopic

Explore with Pulumi AI

ionoscloud logo
IonosCloud v0.2.3 published on Tuesday, May 13, 2025 by ionos-cloud

    The Kafka topic data source can be used to search for and return an existing Kafka Cluster Topic. You can provide a string for the name parameter which will be compared with provisioned Kafka Cluster Topics. If a single match is found, it will be returned. If your search results in multiple matches, an error will be returned. When this happens, please refine your search string so that it is specific enough to return only one result.

    Example Usage

    By ID

    import * as pulumi from "@pulumi/pulumi";
    import * as ionoscloud from "@pulumi/ionoscloud";
    
    const example = ionoscloud.kafka.getTopic({
        id: "your_kafka_cluster_topic_id",
        clusterId: "your_kafka_cluster_id",
        location: "your_kafka_cluster_location",
    });
    
    import pulumi
    import pulumi_ionoscloud as ionoscloud
    
    example = ionoscloud.kafka.get_topic(id="your_kafka_cluster_topic_id",
        cluster_id="your_kafka_cluster_id",
        location="your_kafka_cluster_location")
    
    package main
    
    import (
    	"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := kafka.LookupTopic(ctx, &kafka.LookupTopicArgs{
    			Id:        pulumi.StringRef("your_kafka_cluster_topic_id"),
    			ClusterId: "your_kafka_cluster_id",
    			Location:  "your_kafka_cluster_location",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Ionoscloud = Pulumi.Ionoscloud;
    
    return await Deployment.RunAsync(() => 
    {
        var example = Ionoscloud.Kafka.GetTopic.Invoke(new()
        {
            Id = "your_kafka_cluster_topic_id",
            ClusterId = "your_kafka_cluster_id",
            Location = "your_kafka_cluster_location",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.ionoscloud.kafka.KafkaFunctions;
    import com.pulumi.ionoscloud.kafka.inputs.GetTopicArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var example = KafkaFunctions.getTopic(GetTopicArgs.builder()
                .id("your_kafka_cluster_topic_id")
                .clusterId("your_kafka_cluster_id")
                .location("your_kafka_cluster_location")
                .build());
    
        }
    }
    
    variables:
      example:
        fn::invoke:
          function: ionoscloud:kafka:getTopic
          arguments:
            id: your_kafka_cluster_topic_id
            clusterId: your_kafka_cluster_id
            location: your_kafka_cluster_location
    

    By Name

    Needs to have the resource be previously created, or a depends_on clause to ensure that the resource is created before this data source is called.

    import * as pulumi from "@pulumi/pulumi";
    import * as ionoscloud from "@pulumi/ionoscloud";
    
    const example = ionoscloud.kafka.getTopic({
        name: "kafka-cluster-topic",
        clusterId: "your_kafka_cluster_id",
        location: "location_of_kafka_cluster",
    });
    
    import pulumi
    import pulumi_ionoscloud as ionoscloud
    
    example = ionoscloud.kafka.get_topic(name="kafka-cluster-topic",
        cluster_id="your_kafka_cluster_id",
        location="location_of_kafka_cluster")
    
    package main
    
    import (
    	"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := kafka.LookupTopic(ctx, &kafka.LookupTopicArgs{
    			Name:      pulumi.StringRef("kafka-cluster-topic"),
    			ClusterId: "your_kafka_cluster_id",
    			Location:  "location_of_kafka_cluster",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Ionoscloud = Pulumi.Ionoscloud;
    
    return await Deployment.RunAsync(() => 
    {
        var example = Ionoscloud.Kafka.GetTopic.Invoke(new()
        {
            Name = "kafka-cluster-topic",
            ClusterId = "your_kafka_cluster_id",
            Location = "location_of_kafka_cluster",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.ionoscloud.kafka.KafkaFunctions;
    import com.pulumi.ionoscloud.kafka.inputs.GetTopicArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var example = KafkaFunctions.getTopic(GetTopicArgs.builder()
                .name("kafka-cluster-topic")
                .clusterId("your_kafka_cluster_id")
                .location("location_of_kafka_cluster")
                .build());
    
        }
    }
    
    variables:
      example:
        fn::invoke:
          function: ionoscloud:kafka:getTopic
          arguments:
            name: kafka-cluster-topic
            clusterId: your_kafka_cluster_id
            location: location_of_kafka_cluster
    

    Using getTopic

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getTopic(args: GetTopicArgs, opts?: InvokeOptions): Promise<GetTopicResult>
    function getTopicOutput(args: GetTopicOutputArgs, opts?: InvokeOptions): Output<GetTopicResult>
    def get_topic(cluster_id: Optional[str] = None,
                  id: Optional[str] = None,
                  location: Optional[str] = None,
                  name: Optional[str] = None,
                  partial_match: Optional[bool] = None,
                  opts: Optional[InvokeOptions] = None) -> GetTopicResult
    def get_topic_output(cluster_id: Optional[pulumi.Input[str]] = None,
                  id: Optional[pulumi.Input[str]] = None,
                  location: Optional[pulumi.Input[str]] = None,
                  name: Optional[pulumi.Input[str]] = None,
                  partial_match: Optional[pulumi.Input[bool]] = None,
                  opts: Optional[InvokeOptions] = None) -> Output[GetTopicResult]
    func LookupTopic(ctx *Context, args *LookupTopicArgs, opts ...InvokeOption) (*LookupTopicResult, error)
    func LookupTopicOutput(ctx *Context, args *LookupTopicOutputArgs, opts ...InvokeOption) LookupTopicResultOutput

    > Note: This function is named LookupTopic in the Go SDK.

    public static class GetTopic 
    {
        public static Task<GetTopicResult> InvokeAsync(GetTopicArgs args, InvokeOptions? opts = null)
        public static Output<GetTopicResult> Invoke(GetTopicInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetTopicResult> getTopic(GetTopicArgs args, InvokeOptions options)
    public static Output<GetTopicResult> getTopic(GetTopicArgs args, InvokeOptions options)
    
    fn::invoke:
      function: ionoscloud:kafka/getTopic:getTopic
      arguments:
        # arguments dictionary

    The following arguments are supported:

    ClusterId string
    ID of the Kafka Cluster that the topic belongs to.
    Location string
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    Id string
    ID of an existing Kafka Cluster Topic that you want to search for.
    Name string
    Name of an existing Kafka Cluster Topic that you want to search for.
    PartialMatch bool
    ClusterId string
    ID of the Kafka Cluster that the topic belongs to.
    Location string
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    Id string
    ID of an existing Kafka Cluster Topic that you want to search for.
    Name string
    Name of an existing Kafka Cluster Topic that you want to search for.
    PartialMatch bool
    clusterId String
    ID of the Kafka Cluster that the topic belongs to.
    location String
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    id String
    ID of an existing Kafka Cluster Topic that you want to search for.
    name String
    Name of an existing Kafka Cluster Topic that you want to search for.
    partialMatch Boolean
    clusterId string
    ID of the Kafka Cluster that the topic belongs to.
    location string
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    id string
    ID of an existing Kafka Cluster Topic that you want to search for.
    name string
    Name of an existing Kafka Cluster Topic that you want to search for.
    partialMatch boolean
    cluster_id str
    ID of the Kafka Cluster that the topic belongs to.
    location str
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    id str
    ID of an existing Kafka Cluster Topic that you want to search for.
    name str
    Name of an existing Kafka Cluster Topic that you want to search for.
    partial_match bool
    clusterId String
    ID of the Kafka Cluster that the topic belongs to.
    location String
    The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka Cluster. Possible values: de/fra, de/txl
    id String
    ID of an existing Kafka Cluster Topic that you want to search for.
    name String
    Name of an existing Kafka Cluster Topic that you want to search for.
    partialMatch Boolean

    getTopic Result

    The following output properties are available:

    ClusterId string
    The id of the Kafka Cluster that the topic belongs to.
    Id string
    UUID of the Kafka Cluster Topic.
    Location string
    Name string
    The name of the Kafka Cluster Topic.
    NumberOfPartitions int
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    ReplicationFactor int
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    RetentionTime int
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    SegmentBytes int
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    PartialMatch bool
    ClusterId string
    The id of the Kafka Cluster that the topic belongs to.
    Id string
    UUID of the Kafka Cluster Topic.
    Location string
    Name string
    The name of the Kafka Cluster Topic.
    NumberOfPartitions int
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    ReplicationFactor int
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    RetentionTime int
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    SegmentBytes int
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    PartialMatch bool
    clusterId String
    The id of the Kafka Cluster that the topic belongs to.
    id String
    UUID of the Kafka Cluster Topic.
    location String
    name String
    The name of the Kafka Cluster Topic.
    numberOfPartitions Integer
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    replicationFactor Integer
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    retentionTime Integer
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    segmentBytes Integer
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    partialMatch Boolean
    clusterId string
    The id of the Kafka Cluster that the topic belongs to.
    id string
    UUID of the Kafka Cluster Topic.
    location string
    name string
    The name of the Kafka Cluster Topic.
    numberOfPartitions number
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    replicationFactor number
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    retentionTime number
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    segmentBytes number
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    partialMatch boolean
    cluster_id str
    The id of the Kafka Cluster that the topic belongs to.
    id str
    UUID of the Kafka Cluster Topic.
    location str
    name str
    The name of the Kafka Cluster Topic.
    number_of_partitions int
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    replication_factor int
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    retention_time int
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    segment_bytes int
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    partial_match bool
    clusterId String
    The id of the Kafka Cluster that the topic belongs to.
    id String
    UUID of the Kafka Cluster Topic.
    location String
    name String
    The name of the Kafka Cluster Topic.
    numberOfPartitions Number
    The number of partitions of the topic. Partitions allow for parallel processing of messages.
    replicationFactor Number
    The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
    retentionTime Number
    This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
    segmentBytes Number
    This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
    partialMatch Boolean

    Package Details

    Repository
    ionoscloud ionos-cloud/pulumi-ionoscloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the ionoscloud Terraform Provider.
    ionoscloud logo
    IonosCloud v0.2.3 published on Tuesday, May 13, 2025 by ionos-cloud