ionoscloud.kafka.getTopic
Explore with Pulumi AI
The Kafka topic data source can be used to search for and return an existing Kafka Cluster Topic. You can provide a string for the name parameter which will be compared with provisioned Kafka Cluster Topics. If a single match is found, it will be returned. If your search results in multiple matches, an error will be returned. When this happens, please refine your search string so that it is specific enough to return only one result.
Example Usage
By ID
import * as pulumi from "@pulumi/pulumi";
import * as ionoscloud from "@pulumi/ionoscloud";
const example = ionoscloud.kafka.getTopic({
id: "your_kafka_cluster_topic_id",
clusterId: "your_kafka_cluster_id",
location: "your_kafka_cluster_location",
});
import pulumi
import pulumi_ionoscloud as ionoscloud
example = ionoscloud.kafka.get_topic(id="your_kafka_cluster_topic_id",
cluster_id="your_kafka_cluster_id",
location="your_kafka_cluster_location")
package main
import (
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := kafka.LookupTopic(ctx, &kafka.LookupTopicArgs{
Id: pulumi.StringRef("your_kafka_cluster_topic_id"),
ClusterId: "your_kafka_cluster_id",
Location: "your_kafka_cluster_location",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Ionoscloud = Pulumi.Ionoscloud;
return await Deployment.RunAsync(() =>
{
var example = Ionoscloud.Kafka.GetTopic.Invoke(new()
{
Id = "your_kafka_cluster_topic_id",
ClusterId = "your_kafka_cluster_id",
Location = "your_kafka_cluster_location",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.ionoscloud.kafka.KafkaFunctions;
import com.pulumi.ionoscloud.kafka.inputs.GetTopicArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = KafkaFunctions.getTopic(GetTopicArgs.builder()
.id("your_kafka_cluster_topic_id")
.clusterId("your_kafka_cluster_id")
.location("your_kafka_cluster_location")
.build());
}
}
variables:
example:
fn::invoke:
function: ionoscloud:kafka:getTopic
arguments:
id: your_kafka_cluster_topic_id
clusterId: your_kafka_cluster_id
location: your_kafka_cluster_location
By Name
Needs to have the resource be previously created, or a depends_on clause to ensure that the resource is created before this data source is called.
import * as pulumi from "@pulumi/pulumi";
import * as ionoscloud from "@pulumi/ionoscloud";
const example = ionoscloud.kafka.getTopic({
name: "kafka-cluster-topic",
clusterId: "your_kafka_cluster_id",
location: "location_of_kafka_cluster",
});
import pulumi
import pulumi_ionoscloud as ionoscloud
example = ionoscloud.kafka.get_topic(name="kafka-cluster-topic",
cluster_id="your_kafka_cluster_id",
location="location_of_kafka_cluster")
package main
import (
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := kafka.LookupTopic(ctx, &kafka.LookupTopicArgs{
Name: pulumi.StringRef("kafka-cluster-topic"),
ClusterId: "your_kafka_cluster_id",
Location: "location_of_kafka_cluster",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Ionoscloud = Pulumi.Ionoscloud;
return await Deployment.RunAsync(() =>
{
var example = Ionoscloud.Kafka.GetTopic.Invoke(new()
{
Name = "kafka-cluster-topic",
ClusterId = "your_kafka_cluster_id",
Location = "location_of_kafka_cluster",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.ionoscloud.kafka.KafkaFunctions;
import com.pulumi.ionoscloud.kafka.inputs.GetTopicArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = KafkaFunctions.getTopic(GetTopicArgs.builder()
.name("kafka-cluster-topic")
.clusterId("your_kafka_cluster_id")
.location("location_of_kafka_cluster")
.build());
}
}
variables:
example:
fn::invoke:
function: ionoscloud:kafka:getTopic
arguments:
name: kafka-cluster-topic
clusterId: your_kafka_cluster_id
location: location_of_kafka_cluster
Using getTopic
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getTopic(args: GetTopicArgs, opts?: InvokeOptions): Promise<GetTopicResult>
function getTopicOutput(args: GetTopicOutputArgs, opts?: InvokeOptions): Output<GetTopicResult>
def get_topic(cluster_id: Optional[str] = None,
id: Optional[str] = None,
location: Optional[str] = None,
name: Optional[str] = None,
partial_match: Optional[bool] = None,
opts: Optional[InvokeOptions] = None) -> GetTopicResult
def get_topic_output(cluster_id: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
partial_match: Optional[pulumi.Input[bool]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetTopicResult]
func LookupTopic(ctx *Context, args *LookupTopicArgs, opts ...InvokeOption) (*LookupTopicResult, error)
func LookupTopicOutput(ctx *Context, args *LookupTopicOutputArgs, opts ...InvokeOption) LookupTopicResultOutput
> Note: This function is named LookupTopic
in the Go SDK.
public static class GetTopic
{
public static Task<GetTopicResult> InvokeAsync(GetTopicArgs args, InvokeOptions? opts = null)
public static Output<GetTopicResult> Invoke(GetTopicInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetTopicResult> getTopic(GetTopicArgs args, InvokeOptions options)
public static Output<GetTopicResult> getTopic(GetTopicArgs args, InvokeOptions options)
fn::invoke:
function: ionoscloud:kafka/getTopic:getTopic
arguments:
# arguments dictionary
The following arguments are supported:
- Cluster
Id string - ID of the Kafka Cluster that the topic belongs to.
- Location string
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- Id string
- ID of an existing Kafka Cluster Topic that you want to search for.
- Name string
- Name of an existing Kafka Cluster Topic that you want to search for.
- Partial
Match bool
- Cluster
Id string - ID of the Kafka Cluster that the topic belongs to.
- Location string
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- Id string
- ID of an existing Kafka Cluster Topic that you want to search for.
- Name string
- Name of an existing Kafka Cluster Topic that you want to search for.
- Partial
Match bool
- cluster
Id String - ID of the Kafka Cluster that the topic belongs to.
- location String
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- id String
- ID of an existing Kafka Cluster Topic that you want to search for.
- name String
- Name of an existing Kafka Cluster Topic that you want to search for.
- partial
Match Boolean
- cluster
Id string - ID of the Kafka Cluster that the topic belongs to.
- location string
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- id string
- ID of an existing Kafka Cluster Topic that you want to search for.
- name string
- Name of an existing Kafka Cluster Topic that you want to search for.
- partial
Match boolean
- cluster_
id str - ID of the Kafka Cluster that the topic belongs to.
- location str
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- id str
- ID of an existing Kafka Cluster Topic that you want to search for.
- name str
- Name of an existing Kafka Cluster Topic that you want to search for.
- partial_
match bool
- cluster
Id String - ID of the Kafka Cluster that the topic belongs to.
- location String
- The location of the Kafka Cluster Topic. Must be the same as the location of the Kafka
Cluster. Possible values:
de/fra
,de/txl
- id String
- ID of an existing Kafka Cluster Topic that you want to search for.
- name String
- Name of an existing Kafka Cluster Topic that you want to search for.
- partial
Match Boolean
getTopic Result
The following output properties are available:
- Cluster
Id string - The id of the Kafka Cluster that the topic belongs to.
- Id string
- UUID of the Kafka Cluster Topic.
- Location string
- Name string
- The name of the Kafka Cluster Topic.
- Number
Of intPartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- Replication
Factor int - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- Retention
Time int - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- Segment
Bytes int - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- Partial
Match bool
- Cluster
Id string - The id of the Kafka Cluster that the topic belongs to.
- Id string
- UUID of the Kafka Cluster Topic.
- Location string
- Name string
- The name of the Kafka Cluster Topic.
- Number
Of intPartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- Replication
Factor int - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- Retention
Time int - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- Segment
Bytes int - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- Partial
Match bool
- cluster
Id String - The id of the Kafka Cluster that the topic belongs to.
- id String
- UUID of the Kafka Cluster Topic.
- location String
- name String
- The name of the Kafka Cluster Topic.
- number
Of IntegerPartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- replication
Factor Integer - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- retention
Time Integer - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- segment
Bytes Integer - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- partial
Match Boolean
- cluster
Id string - The id of the Kafka Cluster that the topic belongs to.
- id string
- UUID of the Kafka Cluster Topic.
- location string
- name string
- The name of the Kafka Cluster Topic.
- number
Of numberPartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- replication
Factor number - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- retention
Time number - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- segment
Bytes number - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- partial
Match boolean
- cluster_
id str - The id of the Kafka Cluster that the topic belongs to.
- id str
- UUID of the Kafka Cluster Topic.
- location str
- name str
- The name of the Kafka Cluster Topic.
- number_
of_ intpartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- replication_
factor int - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- retention_
time int - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- segment_
bytes int - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- partial_
match bool
- cluster
Id String - The id of the Kafka Cluster that the topic belongs to.
- id String
- UUID of the Kafka Cluster Topic.
- location String
- name String
- The name of the Kafka Cluster Topic.
- number
Of NumberPartitions - The number of partitions of the topic. Partitions allow for parallel processing of messages.
- replication
Factor Number - The number of replicas of the topic. The replication factor determines how many copies of the topic are stored on different brokers.
- retention
Time Number - This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied.
- segment
Bytes Number - This configuration controls the segment file size for the log. Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention.
- partial
Match Boolean
Package Details
- Repository
- ionoscloud ionos-cloud/pulumi-ionoscloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
ionoscloud
Terraform Provider.