Skip to content

Commit 49cbc6b

Browse files
add ckafka topic resource and data_source file
1 parent 3e6afb4 commit 49cbc6b

10 files changed

+1371
-2
lines changed

CHANGELOG.md

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,24 @@
1-
## 1.41.1 (Unreleased)
1+
## 1.41.3 (Unreleased)
2+
3+
FEATURES:
4+
5+
* **New Resource**: `tencentcloud_ckafka_topic`
6+
* **New Data Source**: `tencentcloud_ckafka_topics`
7+
8+
## 1.41.2 (August 28, 2020)
9+
10+
BUG FIXES:
11+
* Resource: `tencentcloud_vpn_connection` fix `security_group_policy` update issue when apply repeatedly.
12+
* Resource: `tencentcloud_vpn_connection` fix inconsistent state when deleted on console.
13+
14+
## 1.41.1 (August 27, 2020)
15+
16+
BUG FIXES:
17+
18+
* Resource: `tencentcloud_vpn_gateway` fix force new issue when apply repeatedly.
19+
* Resource: `tencentcloud_vpn_connection` fix force new issue when apply repeatedly.
20+
* Resource: `tencentcloud_instance` support for adjusting `internet_max_bandwidth_out` without forceNew when attribute `internet_charge_type` within `TRAFFIC_POSTPAID_BY_HOUR`,`BANDWIDTH_POSTPAID_BY_HOUR`,`BANDWIDTH_PACKAGE` ([#498](https://github.com/tencentcloudstack/terraform-provider-tencentcloud/issues/498)).
21+
222
## 1.41.0 (August 17, 2020)
323

424
FEATURES:
@@ -1129,4 +1149,4 @@ RESOURCES:
11291149
* storage read
11301150
* storage update (update name)
11311151
* storage attach
1132-
* storage detach
1152+
* storage detach
Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
/*
2+
Use this data source to query detailed information of ckafka topic instances.
3+
4+
Example Usage
5+
6+
```hcl
7+
resource "tencentcloud_ckafka_topic" "foo" {
8+
instance_id = "ckafka-f9ife4zz"
9+
topic_name = "example"
10+
note = "topic note"
11+
replica_num = 2
12+
partition_num = 1
13+
enable_white_list = 1
14+
ip_white_list = ["ip1","ip2"]
15+
clean_up_policy = "delete"
16+
sync_replica_min_num = 1
17+
unclean_leader_election_enable = false
18+
segment = 3600000
19+
retention = 60000
20+
max_message_bytes = 0
21+
}
22+
```
23+
*/
24+
package tencentcloud
25+
26+
import (
27+
"context"
28+
"log"
29+
30+
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
31+
"github.com/terraform-providers/terraform-provider-tencentcloud/tencentcloud/internal/helper"
32+
)
33+
34+
func dataSourceTencentCloudCkafkaTopics() *schema.Resource {
35+
return &schema.Resource{
36+
Read: dataSourceTencentCloudCkafkaTopicRead,
37+
38+
Schema: map[string]*schema.Schema{
39+
"instance_id": {
40+
Type: schema.TypeString,
41+
Required: true,
42+
Description: "Ckafka instance ID.",
43+
},
44+
"topic_name": {
45+
Type: schema.TypeString,
46+
Optional: true,
47+
ValidateFunc: validateStringLengthInRange(1, 64),
48+
Description: "Name of the CKafka topic. It must start with a letter, the rest can contain letters, numbers and dashes(-). The length range is from 1 to 64.",
49+
},
50+
"result_output_file": {
51+
Type: schema.TypeString,
52+
Optional: true,
53+
Description: "Used to store results.",
54+
},
55+
// computed
56+
"instance_list": {
57+
Type: schema.TypeList,
58+
Computed: true,
59+
Description: "A list of instances. Each element contains the following attributes.",
60+
Elem: &schema.Resource{
61+
Schema: map[string]*schema.Schema{
62+
"topic_id": {
63+
Type: schema.TypeString,
64+
Computed: true,
65+
Description: "Id of the CKafka topic.",
66+
},
67+
"topic_name": {
68+
Type: schema.TypeString,
69+
Computed: true,
70+
Description: "Name of the CKafka topic. It must start with a letter, the rest can contain letters, numbers and dashes(-). The length range is from 1 to 64.",
71+
},
72+
"partition_num": {
73+
Type: schema.TypeInt,
74+
Computed: true,
75+
Description: "The number of partition.",
76+
},
77+
"replica_num": {
78+
Type: schema.TypeInt,
79+
Computed: true,
80+
Description: "The number of replica, the maximum is 3.",
81+
},
82+
"note": {
83+
Type: schema.TypeString,
84+
Computed: true,
85+
Description: "The subject note is a string of no more than 64 characters. It must start with a letter, and the remaining part can contain letters, numbers and dashes (-).",
86+
},
87+
"create_time": {
88+
Type: schema.TypeString,
89+
Computed: true,
90+
Description: "Create time of the topic instance.",
91+
},
92+
"enable_white_list": {
93+
Type: schema.TypeBool,
94+
Computed: true,
95+
Description: "IP Whitelist switch, 1: open; 0: close.",
96+
},
97+
"ip_white_list_count": {
98+
Type: schema.TypeInt,
99+
Computed: true,
100+
Description: "IP Whitelist count.",
101+
},
102+
"forward_interval": {
103+
Type: schema.TypeInt,
104+
Computed: true,
105+
Description: "Periodic frequency of data backup to cos.",
106+
},
107+
"forward_cos_bucket": {
108+
Type: schema.TypeString,
109+
Computed: true,
110+
Description: "Data backup cos bucket: the bucket address that is dumped to cos.",
111+
},
112+
"forward_status": {
113+
Type: schema.TypeInt,
114+
Computed: true,
115+
Description: "Data backup cos status: 1 do not open data backup, 0 open data backup.",
116+
},
117+
"retention": {
118+
Type: schema.TypeInt,
119+
Computed: true,
120+
Description: "Message can be selected. Retention time, unit ms, the current minimum value is 60000ms.",
121+
},
122+
"sync_replica_min_num": {
123+
Type: schema.TypeInt,
124+
Computed: true,
125+
Description: "Min number of sync replicas, Default is 1.",
126+
},
127+
"clean_up_policy": {
128+
Type: schema.TypeString,
129+
Computed: true,
130+
Description: "Clear log policy, log clear mode, the default is delete. delete: logs are deleted according to the storage time, compact: logs are compressed according to the key, compact, delete: logs are compressed according to the key and will be deleted according to the storage time.",
131+
},
132+
"unclean_leader_election_enable": {
133+
Type: schema.TypeInt,
134+
Computed: true,
135+
Description: "Whether to allow unsynchronized replicas to be selected as leader, false: not allowed, true: allowed, not allowed by default.",
136+
},
137+
"max_message_bytes": {
138+
Type: schema.TypeInt,
139+
Computed: true,
140+
Description: "Max message bytes.",
141+
},
142+
"segment": {
143+
Type: schema.TypeInt,
144+
Computed: true,
145+
Description: "Segment scrolling time, in ms, the current minimum is 3600000ms.",
146+
},
147+
"segment_bytes": {
148+
Type: schema.TypeInt,
149+
Computed: true,
150+
Description: "Number of bytes rolled by shard.",
151+
},
152+
},
153+
},
154+
},
155+
},
156+
}
157+
}
158+
159+
func dataSourceTencentCloudCkafkaTopicRead(d *schema.ResourceData, meta interface{}) error {
160+
defer logElapsed("data_source.tencentcloud_ckafka_topic.read")()
161+
162+
logId := getLogId(contextNil)
163+
ctx := context.WithValue(context.TODO(), logIdKey, logId)
164+
165+
var instanceId, topicName string
166+
if v, ok := d.GetOk("instance_id"); ok {
167+
instanceId = v.(string)
168+
}
169+
if v, ok := d.GetOk("topic_name"); ok {
170+
topicName = v.(string)
171+
}
172+
ckafkcService := CkafkaService{
173+
client: meta.(*TencentCloudClient).apiV3Conn,
174+
}
175+
topicDetails, err := ckafkcService.DescribeCkafkaTopics(ctx, instanceId, topicName)
176+
if err != nil {
177+
return err
178+
}
179+
180+
instanceList := make([]map[string]interface{}, 0, len(topicDetails))
181+
ids := make([]string, 0, len(topicDetails))
182+
183+
for _, topic := range topicDetails {
184+
//configs := []*ckafka.Config{topic.Config}
185+
instance := map[string]interface{}{
186+
"topic_name": topic.TopicName,
187+
"topic_id": topic.TopicId,
188+
"partition_num": topic.PartitionNum,
189+
"replica_num": topic.ReplicaNum,
190+
"note": topic.Note,
191+
"create_time": helper.FormatUnixTime(uint64(*topic.CreateTime)),
192+
"enable_white_list": topic.EnableWhiteList,
193+
"ip_white_list_count": topic.IpWhiteListCount,
194+
"forward_interval": topic.ForwardInterval,
195+
"forward_cos_bucket": topic.ForwardCosBucket,
196+
"forward_status": topic.ForwardStatus,
197+
"retention": topic.Config.Retention,
198+
"sync_replica_min_num": topic.Config.MinInsyncReplicas,
199+
"clean_up_policy": topic.Config.CleanUpPolicy,
200+
"unclean_leader_election_enable": topic.Config.UncleanLeaderElectionEnable,
201+
"max_message_bytes": topic.Config.MaxMessageBytes,
202+
"segment": topic.Config.SegmentMs,
203+
"segment_bytes": topic.Config.SegmentBytes,
204+
}
205+
resourceId := instanceId + FILED_SP + *topic.TopicName
206+
instanceList = append(instanceList, instance)
207+
ids = append(ids, resourceId)
208+
}
209+
210+
d.SetId(helper.DataResourceIdsHash(ids))
211+
if err = d.Set("instance_list", instanceList); err != nil {
212+
log.Printf("[CRITAL]%s provider set ckafka topic instance list fail, reason:%s\n ", logId, err.Error())
213+
return err
214+
}
215+
216+
output, ok := d.GetOk("result_output_file")
217+
if ok && output.(string) != "" {
218+
if err := writeToFile(output.(string), instanceList); err != nil {
219+
return err
220+
}
221+
}
222+
223+
return nil
224+
}
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
package tencentcloud
2+
3+
import (
4+
"testing"
5+
6+
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
7+
)
8+
9+
func TestAccTencentCloudCkafkaTopicDataSource(t *testing.T) {
10+
resource.Test(t, resource.TestCase{
11+
PreCheck: func() { testAccPreCheck(t) },
12+
Providers: testAccProviders,
13+
CheckDestroy: testAccTencentCloudKafkaTopicDestory,
14+
Steps: []resource.TestStep{
15+
{
16+
Config: testAccTencentCloudCkafkaTopicDataSourceConfig,
17+
Check: resource.ComposeTestCheckFunc(
18+
testAccCheckKafkaTopicInstanceExists("tencentcloud_ckafka_topic.kafka_topic"),
19+
resource.TestCheckResourceAttr("data.tencentcloud_ckafka_topics.kafka_topics", "instance_id", "ckafka-f9ife4zz"),
20+
resource.TestCheckResourceAttrSet("data.tencentcloud_ckafka_topics.kafka_topics", "instance_list.#"),
21+
resource.TestCheckResourceAttr("data.tencentcloud_ckafka_topics.kafka_topics", "instance_list.0.topic_name", "ckafkaTopic-tf-test"),
22+
resource.TestCheckResourceAttr("data.tencentcloud_ckafka_topics.kafka_topics", "instance_list.0.partition_num", "1"),
23+
resource.TestCheckResourceAttr("data.tencentcloud_ckafka_topics.kafka_topics", "instance_list.0.replica_num", "2"),
24+
resource.TestCheckResourceAttrSet("data.tencentcloud_ckafka_topics.kafka_topics", "instance_list.0.create_time"),
25+
),
26+
},
27+
},
28+
})
29+
}
30+
31+
const testAccTencentCloudCkafkaTopicDataSourceConfig = `
32+
resource "tencentcloud_ckafka_topic" "kafka_topic" {
33+
instance_id = "ckafka-f9ife4zz"
34+
topic_name = "ckafkaTopic-tf-test"
35+
replica_num = 2
36+
partition_num = 1
37+
}
38+
39+
data "tencentcloud_ckafka_topics" "kafka_topics" {
40+
instance_id = tencentcloud_ckafka_topic.kafka_topic.instance_id
41+
topic_name = tencentcloud_ckafka_topic.kafka_topic.topic_name
42+
}
43+
`

tencentcloud/provider.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,10 +81,12 @@ Ckafka
8181
Data Source
8282
tencentcloud_ckafka_users
8383
tencentcloud_ckafka_acls
84+
tencentcloud_ckafka_topics
8485
8586
Resource
8687
tencentcloud_ckafka_user
8788
tencentcloud_ckafka_acl
89+
tencentcloud_ckafka_topic
8890
8991
Cloud Access Management(CAM)
9092
Data Source
@@ -636,6 +638,7 @@ func Provider() terraform.ResourceProvider {
636638
"tencentcloud_sqlserver_readonly_groups": dataSourceTencentCloudSqlserverReadonlyGroups(),
637639
"tencentcloud_ckafka_users": dataSourceTencentCloudCkafkaUsers(),
638640
"tencentcloud_ckafka_acls": dataSourceTencentCloudCkafkaAcls(),
641+
"tencentcloud_ckafka_topics": dataSourceTencentCloudCkafkaTopics(),
639642
},
640643

641644
ResourcesMap: map[string]*schema.Resource{
@@ -757,6 +760,7 @@ func Provider() terraform.ResourceProvider {
757760
"tencentcloud_sqlserver_readonly_instance": resourceTencentCloudSqlserverReadonlyInstance(),
758761
"tencentcloud_ckafka_user": resourceTencentCloudCkafkaUser(),
759762
"tencentcloud_ckafka_acl": resourceTencentCloudCkafkaAcl(),
763+
"tencentcloud_ckafka_topic": resourceTencentCloudCkafkaTopic(),
760764
},
761765

762766
ConfigureFunc: providerConfigure,

0 commit comments

Comments
 (0)