diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 878827a1f..b77bc0694 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -53,8 +53,8 @@ jobs:
runs-on: ubuntu-latest
env:
ELASTIC_PASSWORD: password
- KIBANA_USERNAME: kibana_system
- KIBANA_PASSWORD: password
+ KIBANA_SYSTEM_USERNAME: kibana_system
+ KIBANA_SYSTEM_PASSWORD: password
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:${{ matrix.version }}
@@ -72,8 +72,8 @@ jobs:
env:
SERVER_NAME: kibana
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
- ELASTICSEARCH_USERNAME: ${{ env.KIBANA_USERNAME }}
- ELASTICSEARCH_PASSWORD: ${{ env.KIBANA_PASSWORD }}
+ ELASTICSEARCH_USERNAME: ${{ env.KIBANA_SYSTEM_USERNAME }}
+ ELASTICSEARCH_PASSWORD: ${{ env.KIBANA_SYSTEM_PASSWORD }}
ports:
- 5601:5601
@@ -113,8 +113,8 @@ jobs:
ELASTICSEARCH_ENDPOINTS: "http://localhost:9200"
ELASTICSEARCH_USERNAME: "elastic"
ELASTICSEARCH_PASSWORD: ${{ env.ELASTIC_PASSWORD }}
- KIBANA_USERNAME: ${{ env.KIBANA_USERNAME }}
- KIBANA_PASSWORD: ${{ env.KIBANA_PASSWORD }}
+ KIBANA_SYSTEM_USERNAME: ${{ env.KIBANA_SYSTEM_USERNAME }}
+ KIBANA_SYSTEM_PASSWORD: ${{ env.KIBANA_SYSTEM_PASSWORD }}
- name: TF acceptance tests
timeout-minutes: 10
@@ -125,4 +125,4 @@ jobs:
ELASTICSEARCH_ENDPOINTS: "http://localhost:9200"
ELASTICSEARCH_USERNAME: "elastic"
ELASTICSEARCH_PASSWORD: ${{ env.ELASTIC_PASSWORD }}
- KIBANA_ENDPOINTS: "http://localhost:5601"
+ KIBANA_ENDPOINT: "http://localhost:5601"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7a3720fa4..ee10cc58c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,6 +16,7 @@
deletion_protection = false
}
```
+- Add support for managing Kibana spaces ([#272](https://github.com/elastic/terraform-provider-elasticstack/pull/272))
### Fixed
- Respect `ignore_unavailable` and `include_global_state` values when configuring SLM policies ([#224](https://github.com/elastic/terraform-provider-elasticstack/pull/224))
diff --git a/Makefile b/Makefile
index 08a85adb1..5de95ca75 100644
--- a/Makefile
+++ b/Makefile
@@ -24,9 +24,9 @@ ELASTICSEARCH_NETWORK ?= elasticstack-network
ELASTICSEARCH_MEM ?= 1024m
KIBANA_NAME ?= terraform-elasticstack-kb
-KIBANA_ENDPOINTS ?= http://$(KIBANA_NAME):5601
-KIBANA_USERNAME ?= kibana_system
-KIBANA_PASSWORD ?= password
+KIBANA_ENDPOINT ?= http://$(KIBANA_NAME):5601
+KIBANA_SYSTEM_USERNAME ?= kibana_system
+KIBANA_SYSTEM_PASSWORD ?= password
SOURCE_LOCATION ?= $(shell pwd)
@@ -52,7 +52,6 @@ build: lint build-ci ## build the terraform provider
testacc: ## Run acceptance tests
TF_ACC=1 go test -v ./... -count $(ACCTEST_COUNT) -parallel $(ACCTEST_PARALLELISM) $(TESTARGS) -timeout $(ACCTEST_TIMEOUT)
-
.PHONY: test
test: ## Run unit tests
go test -v $(TEST) $(TESTARGS) -timeout=5m -parallel=4
@@ -71,7 +70,7 @@ retry = until [ $$(if [ -z "$$attempt" ]; then echo -n "0"; else echo -n "$$atte
docker-testacc: docker-elasticsearch docker-kibana ## Run acceptance tests in the docker container
@ docker run --rm \
-e ELASTICSEARCH_ENDPOINTS="$(ELASTICSEARCH_ENDPOINTS)" \
- -e KIBANA_ENDPOINTS="$(KIBANA_ENDPOINTS)"
+ -e KIBANA_ENDPOINT="$(KIBANA_ENDPOINT)" \
-e ELASTICSEARCH_USERNAME="$(ELASTICSEARCH_USERNAME)" \
-e ELASTICSEARCH_PASSWORD="$(ELASTICSEARCH_PASSWORD)" \
--network $(ELASTICSEARCH_NETWORK) \
@@ -102,8 +101,9 @@ docker-kibana: docker-network docker-elasticsearch set-kibana-password ## Start
-p 5601:5601 \
-e SERVER_NAME=kibana \
-e ELASTICSEARCH_HOSTS=$(ELASTICSEARCH_ENDPOINTS) \
- -e ELASTICSEARCH_USERNAME=$(KIBANA_USERNAME) \
- -e ELASTICSEARCH_PASSWORD=$(KIBANA_PASSWORD) \
+ -e ELASTICSEARCH_USERNAME=$(KIBANA_SYSTEM_USERNAME) \
+ -e ELASTICSEARCH_PASSWORD=$(KIBANA_SYSTEM_PASSWORD) \
+ -e "logging.root.level=debug" \
--name $(KIBANA_NAME) \
--network $(ELASTICSEARCH_NETWORK) \
docker.elastic.co/kibana/kibana:$(STACK_VERSION); \
@@ -116,8 +116,8 @@ docker-network: ## Create a dedicated network for ES and test runs
fi
.PHONY: set-kibana-password
-set-kibana-password: ## Sets the ES KIBANA_USERNAME's password to KIBANA_PASSWORD. This expects Elasticsearch to be available at localhost:9200
- @ $(call retry, 10, curl -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" http://localhost:9200/_security/user/$(KIBANA_USERNAME)/_password -d "{\"password\":\"$(KIBANA_PASSWORD)\"}" | grep -q "^{}")
+set-kibana-password: ## Sets the ES KIBANA_SYSTEM_USERNAME's password to KIBANA_SYSTEM_PASSWORD. This expects Elasticsearch to be available at localhost:9200
+ @ $(call retry, 10, curl -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" http://localhost:9200/_security/user/$(KIBANA_SYSTEM_USERNAME)/_password -d "{\"password\":\"$(KIBANA_SYSTEM_PASSWORD)\"}" | grep -q "^{}")
.PHONY: docker-clean
docker-clean: ## Try to remove provisioned nodes and assigned network
diff --git a/docs/index.md b/docs/index.md
index 424459afa..e6be9e68e 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -109,12 +109,9 @@ Optional:
### Nested Schema for `kibana`
-Required:
-
-- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.
-
Optional:
+- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.
- `insecure` (Boolean) Disable TLS certificate validation
- `password` (String, Sensitive) Password to use for API authentication to Kibana.
- `username` (String) Username to use for API authentication to Kibana.
diff --git a/docs/resources/kibana_space.md b/docs/resources/kibana_space.md
new file mode 100644
index 000000000..ca65af39f
--- /dev/null
+++ b/docs/resources/kibana_space.md
@@ -0,0 +1,53 @@
+---
+subcategory: "Kibana"
+layout: ""
+page_title: "Elasticstack: elasticstack_kibana_space Resource"
+description: |-
+ Creates or updates a Kibana space.
+---
+
+# Resource: elasticstack_kibana_space
+
+Creates or updates a Kibana space. See https://www.elastic.co/guide/en/kibana/master/xpack-spaces.html
+
+## Example Usage
+
+```terraform
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+resource "elasticstack_kibana_space" "example" {
+ space_id = "test_space"
+ name = "Test Space"
+ description = "A fresh space for testing visualisations"
+ initials = "ts"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) The display name for the space.
+- `space_id` (String) The space ID that is part of the Kibana URL when inside the space.
+
+### Optional
+
+- `color` (String) The hexadecimal color code used in the space avatar. By default, the color is automatically generated from the space name.
+- `description` (String) The description for the space.
+- `disabled_features` (Set of String) The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html).
+- `initials` (String) The initials shown in the space avatar. By default, the initials are automatically generated from the space name. Initials must be 1 or 2 characters.
+
+### Read-Only
+
+- `id` (String) Internal identifier of the resource.
+
+## Import
+
+Import is supported using the following syntax:
+
+```shell
+terraform import elasticstack_kibana_space.my_space /
+```
diff --git a/examples/resources/elasticstack_kibana_space/import.sh b/examples/resources/elasticstack_kibana_space/import.sh
new file mode 100644
index 000000000..e79c1dc1c
--- /dev/null
+++ b/examples/resources/elasticstack_kibana_space/import.sh
@@ -0,0 +1 @@
+terraform import elasticstack_kibana_space.my_space /
diff --git a/examples/resources/elasticstack_kibana_space/resource.tf b/examples/resources/elasticstack_kibana_space/resource.tf
new file mode 100644
index 000000000..c42be10f5
--- /dev/null
+++ b/examples/resources/elasticstack_kibana_space/resource.tf
@@ -0,0 +1,10 @@
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+resource "elasticstack_kibana_space" "example" {
+ space_id = "test_space"
+ name = "Test Space"
+ description = "A fresh space for testing visualisations"
+ initials = "ts"
+}
diff --git a/internal/acctest/acctest.go b/internal/acctest/acctest.go
index b3c6c472e..09939cddc 100644
--- a/internal/acctest/acctest.go
+++ b/internal/acctest/acctest.go
@@ -25,17 +25,22 @@ func init() {
}
func PreCheck(t *testing.T) {
- _, endpointsOk := os.LookupEnv("ELASTICSEARCH_ENDPOINTS")
+ _, elasticsearchEndpointsOk := os.LookupEnv("ELASTICSEARCH_ENDPOINTS")
+ _, kibanaEndpointOk := os.LookupEnv("KIBANA_ENDPOINT")
_, userOk := os.LookupEnv("ELASTICSEARCH_USERNAME")
_, passOk := os.LookupEnv("ELASTICSEARCH_PASSWORD")
- _, apikeyOk := os.LookupEnv("ELASTICSEARCH_API_KEY")
- if !endpointsOk {
+ if !elasticsearchEndpointsOk {
t.Fatal("ELASTICSEARCH_ENDPOINTS must be set for acceptance tests to run")
}
+ if !kibanaEndpointOk {
+ t.Fatal("KIBANA_ENDPOINT must be set for acceptance tests to run")
+ }
+
+ // Technically ES tests can use the API Key, however username/password is required for Kibana tests.
usernamePasswordOk := userOk && passOk
- if !((!usernamePasswordOk && apikeyOk) || (usernamePasswordOk && !apikeyOk)) {
- t.Fatal("Either ELASTICSEARCH_USERNAME and ELASTICSEARCH_PASSWORD must be set, or ELASTICSEARCH_API_KEY must be set for acceptance tests to run")
+ if !usernamePasswordOk {
+ t.Fatal("ELASTICSEARCH_USERNAME and ELASTICSEARCH_PASSWORD must be set for acceptance tests to run")
}
}
diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go
index 0514ee4ba..e4b04bd68 100644
--- a/internal/clients/api_client.go
+++ b/internal/clients/api_client.go
@@ -65,36 +65,61 @@ type ApiClient struct {
func NewApiClientFunc(version string) func(context.Context, *schema.ResourceData) (interface{}, diag.Diagnostics) {
return func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) {
- return newApiClient(d, version, true)
+ return newApiClient(d, version)
}
}
func NewAcceptanceTestingClient() (*ApiClient, error) {
- config := elasticsearch.Config{
- Header: buildHeader("tf-acceptance-testing"),
+ baseConfig := BaseConfig{
+ Header: buildHeader("tf-acceptance-testing"),
+ Username: os.Getenv("ELASTICSEARCH_USERNAME"),
+ Password: os.Getenv("ELASTICSEARCH_PASSWORD"),
}
- if es := os.Getenv("ELASTICSEARCH_ENDPOINTS"); es != "" {
- endpoints := make([]string, 0)
- for _, e := range strings.Split(es, ",") {
- endpoints = append(endpoints, strings.TrimSpace(e))
+ buildEsAccClient := func() (*elasticsearch.Client, error) {
+ config := elasticsearch.Config{
+ Header: baseConfig.Header,
+ }
+
+ if apiKey := os.Getenv("ELASTICSEARCH_API_KEY"); apiKey != "" {
+ config.APIKey = apiKey
+ } else {
+ config.Username = baseConfig.Username
+ config.Password = baseConfig.Password
+ }
+
+ if es := os.Getenv("ELASTICSEARCH_ENDPOINTS"); es != "" {
+ endpoints := make([]string, 0)
+ for _, e := range strings.Split(es, ",") {
+ endpoints = append(endpoints, strings.TrimSpace(e))
+ }
+ config.Addresses = endpoints
}
- config.Addresses = endpoints
+
+ return elasticsearch.NewClient(config)
}
- if username := os.Getenv("ELASTICSEARCH_USERNAME"); username != "" {
- config.Username = username
- config.Password = os.Getenv("ELASTICSEARCH_PASSWORD")
- } else {
- config.APIKey = os.Getenv("ELASTICSEARCH_API_KEY")
+ buildKibanaAccClient := func() (*kibana.Client, error) {
+ config := kibana.Config{
+ Username: baseConfig.Username,
+ Password: baseConfig.Password,
+ Address: os.Getenv("KIBANA_ENDPOINT"),
+ }
+
+ return kibana.NewClient(config)
}
- es, err := elasticsearch.NewClient(config)
+ es, err := buildEsAccClient()
+ if err != nil {
+ return nil, err
+ }
+
+ kib, err := buildKibanaAccClient()
if err != nil {
return nil, err
}
- return &ApiClient{es, nil, nil, "acceptance-testing"}, nil
+ return &ApiClient{es, nil, kib, "acceptance-testing"}, nil
}
const esConnectionKey string = "elasticsearch_connection"
@@ -377,7 +402,7 @@ func buildEsClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDefaul
return es, diags
}
-func buildKibanaClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDefault bool) (*kibana.Client, diag.Diagnostics) {
+func buildKibanaClient(d *schema.ResourceData, baseConfig BaseConfig) (*kibana.Client, diag.Diagnostics) {
var diags diag.Diagnostics
kibConn, ok := d.GetOk("kibana")
@@ -395,19 +420,20 @@ func buildKibanaClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDe
if kib := kibConn.([]interface{})[0]; kib != nil {
kibConfig := kib.(map[string]interface{})
- if useEnvAsDefault {
- if username := os.Getenv("KIBANA_USERNAME"); username != "" {
- config.Username = strings.TrimSpace(username)
- }
- if password := os.Getenv("KIBANA_PASSWORD"); password != "" {
- config.Password = strings.TrimSpace(password)
- }
+ if username := os.Getenv("KIBANA_USERNAME"); username != "" {
+ config.Username = strings.TrimSpace(username)
+ }
+ if password := os.Getenv("KIBANA_PASSWORD"); password != "" {
+ config.Password = strings.TrimSpace(password)
+ }
+ if endpoint := os.Getenv("KIBANA_ENDPOINT"); endpoint != "" {
+ config.Address = endpoint
}
- if username, ok := kibConfig["username"]; ok {
+ if username, ok := kibConfig["username"]; ok && username != "" {
config.Username = username.(string)
}
- if password, ok := kibConfig["password"]; ok {
+ if password, ok := kibConfig["password"]; ok && password != "" {
config.Password = password.(string)
}
@@ -425,6 +451,10 @@ func buildKibanaClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDe
kib, err := kibana.NewClient(config)
+ if logging.IsDebugOrHigher() {
+ kib.Client.SetDebug(true)
+ }
+
if err != nil {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
@@ -438,16 +468,16 @@ func buildKibanaClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDe
const esKey string = "elasticsearch"
-func newApiClient(d *schema.ResourceData, version string, useEnvAsDefault bool) (*ApiClient, diag.Diagnostics) {
+func newApiClient(d *schema.ResourceData, version string) (*ApiClient, diag.Diagnostics) {
var diags diag.Diagnostics
baseConfig := buildBaseConfig(d, version, esKey)
- esClient, diags := buildEsClient(d, baseConfig, useEnvAsDefault, esKey)
+ esClient, diags := buildEsClient(d, baseConfig, true, esKey)
if diags.HasError() {
return nil, diags
}
- kibanaClient, diags := buildKibanaClient(d, baseConfig, useEnvAsDefault)
+ kibanaClient, diags := buildKibanaClient(d, baseConfig)
if diags.HasError() {
return nil, diags
}
diff --git a/internal/clients/debug.go b/internal/clients/debug.go
index 0f4e55cdf..eca356b99 100644
--- a/internal/clients/debug.go
+++ b/internal/clients/debug.go
@@ -38,11 +38,11 @@ func (l *debugLogger) LogRoundTrip(req *http.Request, resp *http.Response, err e
}
tflog.Debug(ctx, fmt.Sprintf("%s request [%s] executed. Took %s. %#v", l.Name, requestId, duration, err))
- if req != nil {
+ if req != nil && req.Body != nil {
l.logRequest(ctx, req, requestId)
}
- if resp != nil {
+ if resp != nil && resp.Body != nil {
l.logResponse(ctx, resp, requestId)
}
diff --git a/internal/kibana/space.go b/internal/kibana/space.go
new file mode 100644
index 000000000..5ead41040
--- /dev/null
+++ b/internal/kibana/space.go
@@ -0,0 +1,199 @@
+package kibana
+
+import (
+ "context"
+
+ "github.com/disaster37/go-kibana-rest/v8/kbapi"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
+)
+
+func ResourceSpace() *schema.Resource {
+ apikeySchema := map[string]*schema.Schema{
+ "id": {
+ Description: "Internal identifier of the resource.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "space_id": {
+ Description: "The space ID that is part of the Kibana URL when inside the space.",
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ },
+ "name": {
+ Description: "The display name for the space.",
+ Type: schema.TypeString,
+ Required: true,
+ },
+ "description": {
+ Description: "The description for the space.",
+ Type: schema.TypeString,
+ Optional: true,
+ },
+ "disabled_features": {
+ Description: "The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html).",
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: &schema.Schema{
+ Type: schema.TypeString,
+ },
+ },
+ "initials": {
+ Description: "The initials shown in the space avatar. By default, the initials are automatically generated from the space name. Initials must be 1 or 2 characters.",
+ Type: schema.TypeString,
+ Optional: true,
+ ValidateFunc: validation.StringLenBetween(1, 2),
+ },
+ "color": {
+ Description: "The hexadecimal color code used in the space avatar. By default, the color is automatically generated from the space name.",
+ Type: schema.TypeString,
+ Optional: true,
+ },
+ }
+
+ return &schema.Resource{
+ Description: "Creates a Kibana space. See, https://www.elastic.co/guide/en/kibana/master/spaces-api-post.html",
+
+ CreateContext: resourceSpaceUpsert,
+ UpdateContext: resourceSpaceUpsert,
+ ReadContext: resourceSpaceRead,
+ DeleteContext: resourceSpaceDelete,
+
+ Importer: &schema.ResourceImporter{
+ StateContext: schema.ImportStatePassthroughContext,
+ },
+
+ Schema: apikeySchema,
+ }
+}
+
+func resourceSpaceUpsert(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client, diags := clients.NewApiClient(d, meta)
+ if diags.HasError() {
+ return diags
+ }
+
+ kibana, err := client.GetKibanaClient()
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ space := kbapi.KibanaSpace{
+ ID: d.Get("space_id").(string),
+ Name: d.Get("name").(string),
+ }
+
+ if description, ok := d.GetOk("description"); ok {
+ space.Description = description.(string)
+ }
+
+ if disabledFeatures, ok := d.GetOk("disabled_features"); ok {
+ space.DisabledFeatures = disabledFeatures.([]string)
+ }
+
+ if initials, ok := d.GetOk("initials"); ok {
+ space.Initials = initials.(string)
+ }
+
+ if color, ok := d.GetOk("color"); ok {
+ space.Color = color.(string)
+ }
+
+ var spaceResponse *kbapi.KibanaSpace
+
+ if d.IsNewResource() {
+ spaceResponse, err = kibana.KibanaSpaces.Create(&space)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ } else {
+ spaceResponse, err = kibana.KibanaSpaces.Update(&space)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ }
+
+ id, diags := client.ID(ctx, spaceResponse.ID)
+ if diags.HasError() {
+ return diags
+ }
+
+ d.SetId(id.String())
+
+ return resourceSpaceRead(ctx, d, meta)
+}
+
+func resourceSpaceRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client, diags := clients.NewApiClient(d, meta)
+ if diags.HasError() {
+ return diags
+ }
+ compId, diags := clients.CompositeIdFromStr(d.Id())
+ if diags.HasError() {
+ return diags
+ }
+ id := compId.ResourceId
+
+ kibana, err := client.GetKibanaClient()
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ space, err := kibana.KibanaSpaces.Get(id)
+ if space == nil && err == nil {
+ d.SetId("")
+ return diags
+ }
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ // set the fields
+ if err := d.Set("space_id", space.ID); err != nil {
+ return diag.FromErr(err)
+ }
+ if err := d.Set("name", space.Name); err != nil {
+ return diag.FromErr(err)
+ }
+ if err := d.Set("description", space.Description); err != nil {
+ return diag.FromErr(err)
+ }
+ if err := d.Set("disabled_features", space.DisabledFeatures); err != nil {
+ return diag.FromErr(err)
+ }
+ if err := d.Set("initials", space.Initials); err != nil {
+ return diag.FromErr(err)
+ }
+ if err := d.Set("color", space.Color); err != nil {
+ return diag.FromErr(err)
+ }
+
+ return diags
+}
+
+func resourceSpaceDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client, diags := clients.NewApiClient(d, meta)
+ if diags.HasError() {
+ return diags
+ }
+ compId, diags := clients.CompositeIdFromStr(d.Id())
+ if diags.HasError() {
+ return diags
+ }
+
+ kibana, err := client.GetKibanaClient()
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ err = kibana.KibanaSpaces.Delete(compId.ResourceId)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ d.SetId("")
+ return diags
+}
diff --git a/internal/kibana/space_test.go b/internal/kibana/space_test.go
new file mode 100644
index 000000000..0ee16901a
--- /dev/null
+++ b/internal/kibana/space_test.go
@@ -0,0 +1,98 @@
+package kibana_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/acctest"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+)
+
+func TestAccResourceSpace(t *testing.T) {
+ spaceId := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum)
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { acctest.PreCheck(t) },
+ CheckDestroy: checkResourceSpaceDestroy,
+ ProtoV5ProviderFactories: acctest.Providers,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccResourceSpaceCreate(spaceId),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "space_id", spaceId),
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "name", fmt.Sprintf("Name %s", spaceId)),
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "description", "Test Space"),
+ ),
+ },
+ {
+ Config: testAccResourceSpaceUpdate(spaceId),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "space_id", spaceId),
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "name", fmt.Sprintf("Updated %s", spaceId)),
+ resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "description", "Updated space description"),
+ ),
+ },
+ },
+ })
+}
+
+func testAccResourceSpaceCreate(id string) string {
+ return fmt.Sprintf(`
+provider "elasticstack" {
+ elasticsearch {}
+ kibana {}
+}
+
+resource "elasticstack_kibana_space" "test_space" {
+ space_id = "%s"
+ name = "%s"
+ description = "Test Space"
+}
+ `, id, fmt.Sprintf("Name %s", id))
+}
+
+func testAccResourceSpaceUpdate(id string) string {
+ return fmt.Sprintf(`
+provider "elasticstack" {
+ elasticsearch {}
+ kibana {}
+}
+
+resource "elasticstack_kibana_space" "test_space" {
+ space_id = "%s"
+ name = "%s"
+ description = "Updated space description"
+}
+ `, id, fmt.Sprintf("Updated %s", id))
+}
+
+func checkResourceSpaceDestroy(s *terraform.State) error {
+ client, err := clients.NewAcceptanceTestingClient()
+ if err != nil {
+ return err
+ }
+
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "elasticstack_kibana_space" {
+ continue
+ }
+ compId, _ := clients.CompositeIdFromStr(rs.Primary.ID)
+
+ kibanaClient, err := client.GetKibanaClient()
+ if err != nil {
+ return err
+ }
+ res, err := kibanaClient.KibanaSpaces.Get(compId.ResourceId)
+ if err != nil {
+ return err
+ }
+
+ if res != nil {
+ return fmt.Errorf("Space (%s) still exists", compId.ResourceId)
+ }
+ }
+ return nil
+}
diff --git a/internal/schema/connection.go b/internal/schema/connection.go
index fbecd238d..aa20c1e58 100644
--- a/internal/schema/connection.go
+++ b/internal/schema/connection.go
@@ -147,9 +147,9 @@ func GetKibanaConnectionSchema() *schema.Schema {
"endpoints": {
Description: "A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.",
Type: schema.TypeList,
- MaxItems: 1, // Current API restriction
- Required: true,
+ Optional: true,
Sensitive: true,
+ MaxItems: 1, // Current API restriction
Elem: &schema.Schema{
Type: schema.TypeString,
},
diff --git a/provider/provider.go b/provider/provider.go
index ccd29bc81..ea0e7dd49 100644
--- a/provider/provider.go
+++ b/provider/provider.go
@@ -7,6 +7,7 @@ import (
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/ingest"
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/logstash"
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security"
+ "github.com/elastic/terraform-provider-elasticstack/internal/kibana"
providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
@@ -86,6 +87,8 @@ func New(version string) *schema.Provider {
"elasticstack_elasticsearch_snapshot_lifecycle": cluster.ResourceSlm(),
"elasticstack_elasticsearch_snapshot_repository": cluster.ResourceSnapshotRepository(),
"elasticstack_elasticsearch_script": cluster.ResourceScript(),
+
+ "elasticstack_kibana_space": kibana.ResourceSpace(),
},
}
diff --git a/templates/resources/kibana_space.md.tmpl b/templates/resources/kibana_space.md.tmpl
new file mode 100644
index 000000000..5033fd7d2
--- /dev/null
+++ b/templates/resources/kibana_space.md.tmpl
@@ -0,0 +1,23 @@
+---
+subcategory: "Kibana"
+layout: ""
+page_title: "Elasticstack: elasticstack_kibana_space Resource"
+description: |-
+ Creates or updates a Kibana space.
+---
+
+# Resource: elasticstack_kibana_space
+
+Creates or updates a Kibana space. See https://www.elastic.co/guide/en/kibana/master/xpack-spaces.html
+
+## Example Usage
+
+{{ tffile "examples/resources/elasticstack_kibana_space/resource.tf" }}
+
+{{ .SchemaMarkdown | trimspace }}
+
+## Import
+
+Import is supported using the following syntax:
+
+{{ codefile "shell" "examples/resources/elasticstack_kibana_space/import.sh" }}