From a0b6ea74db127108ee31c630b922d3342063aba1 Mon Sep 17 00:00:00 2001 From: Dariusz Porowski <3431813+DariuszPorowski@users.noreply.github.com> Date: Wed, 12 Feb 2025 22:40:09 +0100 Subject: [PATCH] fix(rs-spark_custom_pool): inconsistent result for dynamic_executor_allocation (#240) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # 📥 Pull Request fix #239 ## ❓ What are you trying to address This pull request includes changes to address the inconsistent results for dynamic executor allocation when the `enabled` attribute is set to false and updates the documentation and schema accordingly. ## ✨ Description of new changes ### Code updates: * `internal/services/spark/resource_spark_custom_pool.go`: * Added new imports for `attr`, `types`, and `superint32validator` packages. * Updated the schema for `min_executors` and `max_executors` attributes to be `Computed` and `Optional`, and added validators to handle the `dynamic_executor_allocation` `enabled` attribute. ### Documentation updates: * [`docs/resources/spark_custom_pool.md`](diffhunk://#diff-e2c5956959615a4e86cc330845da0052ce2316efdf948d7bea320eb2365fbab3R81-R83): Added an `Optional` section to the documentation for `max_executors` and `min_executors` attributes. --- .../unreleased/fixed-20250206-214937.yaml | 5 ++ docs/resources/spark_custom_pool.md | 3 ++ internal/services/spark/base_test.go | 22 -------- .../spark/data_spark_custom_pool_test.go | 18 ++++++- .../spark/resource_spark_custom_pool.go | 29 +++++++++- .../spark/resource_spark_custom_pool_test.go | 53 +++++++++++++++---- 6 files changed, 96 insertions(+), 34 deletions(-) create mode 100644 .changes/unreleased/fixed-20250206-214937.yaml diff --git a/.changes/unreleased/fixed-20250206-214937.yaml b/.changes/unreleased/fixed-20250206-214937.yaml new file mode 100644 index 00000000..f232d8cd --- /dev/null +++ b/.changes/unreleased/fixed-20250206-214937.yaml @@ -0,0 +1,5 @@ +kind: fixed +body: Inconsistent result for dynamic_executor_allocation (min_executors/max_executors) when enabled is false. +time: 2025-02-06T21:49:37.8248633+01:00 +custom: + Issue: "239" diff --git a/docs/resources/spark_custom_pool.md b/docs/resources/spark_custom_pool.md index 93614a06..24193874 100644 --- a/docs/resources/spark_custom_pool.md +++ b/docs/resources/spark_custom_pool.md @@ -78,6 +78,9 @@ Required: Required: - `enabled` (Boolean) The status of the dynamic executor allocation. Accepted values: `false` - Disabled, `true` - Enabled. + +Optional: + - `max_executors` (Number) The maximum executors. - `min_executors` (Number) The minimum executors. diff --git a/internal/services/spark/base_test.go b/internal/services/spark/base_test.go index b099a776..981ff7d1 100644 --- a/internal/services/spark/base_test.go +++ b/internal/services/spark/base_test.go @@ -19,28 +19,6 @@ const ( sparkEnvironmentLibrariesTFName = spark.SparkEnvironmentLibrariesTFName ) -func getSparkCustomPoolResourceAttr(t *testing.T, workspaceID, name string) map[string]any { - t.Helper() - - return map[string]any{ - "workspace_id": workspaceID, - "name": name, - "type": "Workspace", - "node_family": "MemoryOptimized", - "node_size": "Small", - "auto_scale": map[string]any{ - "enabled": true, - "min_node_count": 1, - "max_node_count": 3, - }, - "dynamic_executor_allocation": map[string]any{ - "enabled": true, - "min_executors": 1, - "max_executors": 2, - }, - } -} - func environmentResource(t *testing.T, workspaceID string) (resourceHCL, resourceFQN string) { t.Helper() diff --git a/internal/services/spark/data_spark_custom_pool_test.go b/internal/services/spark/data_spark_custom_pool_test.go index 5ac62b6c..2c2c6d19 100644 --- a/internal/services/spark/data_spark_custom_pool_test.go +++ b/internal/services/spark/data_spark_custom_pool_test.go @@ -32,7 +32,23 @@ func TestAcc_SparkCustomPoolDataSource(t *testing.T) { workspaceResourceHCL, at.CompileConfig( testResourceSparkCustomPoolHeader, - getSparkCustomPoolResourceAttr(t, testhelp.RefByFQN(workspaceResourceFQN, "id"), entityName), + map[string]any{ + "workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"), + "name": entityName, + "type": "Workspace", + "node_family": "MemoryOptimized", + "node_size": "Small", + "auto_scale": map[string]any{ + "enabled": true, + "min_node_count": 1, + "max_node_count": 3, + }, + "dynamic_executor_allocation": map[string]any{ + "enabled": true, + "min_executors": 1, + "max_executors": 2, + }, + }, ), at.CompileConfig( testDataSourceSparkCustomPoolHeader, diff --git a/internal/services/spark/resource_spark_custom_pool.go b/internal/services/spark/resource_spark_custom_pool.go index 3cc07703..fe0f406a 100644 --- a/internal/services/spark/resource_spark_custom_pool.go +++ b/internal/services/spark/resource_spark_custom_pool.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -18,10 +19,12 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" fabspark "github.com/microsoft/fabric-sdk-go/fabric/spark" supertypes "github.com/orange-cloudavenue/terraform-plugin-framework-supertypes" + superint32validator "github.com/orange-cloudavenue/terraform-plugin-framework-validators/int32validator" "github.com/microsoft/terraform-provider-fabric/internal/common" "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" @@ -130,11 +133,33 @@ func (r *resourceSparkCustomPool) Schema(ctx context.Context, _ resource.SchemaR }, "min_executors": schema.Int32Attribute{ MarkdownDescription: "The minimum executors.", - Required: true, + Computed: true, + Optional: true, + Validators: []validator.Int32{ + superint32validator.NullIfAttributeIsOneOf( + path.MatchRoot("dynamic_executor_allocation").AtName("enabled"), + []attr.Value{types.BoolValue(false)}, + ), + superint32validator.RequireIfAttributeIsOneOf( + path.MatchRoot("dynamic_executor_allocation").AtName("enabled"), + []attr.Value{types.BoolValue(true)}, + ), + }, }, "max_executors": schema.Int32Attribute{ MarkdownDescription: "The maximum executors.", - Required: true, + Computed: true, + Optional: true, + Validators: []validator.Int32{ + superint32validator.NullIfAttributeIsOneOf( + path.MatchRoot("dynamic_executor_allocation").AtName("enabled"), + []attr.Value{types.BoolValue(false)}, + ), + superint32validator.RequireIfAttributeIsOneOf( + path.MatchRoot("dynamic_executor_allocation").AtName("enabled"), + []attr.Value{types.BoolValue(true)}, + ), + }, }, }, }, diff --git a/internal/services/spark/resource_spark_custom_pool_test.go b/internal/services/spark/resource_spark_custom_pool_test.go index 47108c00..eafcc64a 100644 --- a/internal/services/spark/resource_spark_custom_pool_test.go +++ b/internal/services/spark/resource_spark_custom_pool_test.go @@ -22,15 +22,8 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) { capacityID := capacity["id"].(string) workspaceResourceHCL, workspaceResourceFQN := testhelp.TestAccWorkspaceResource(t, capacityID) - testHelperSparkCustomPoolResource := getSparkCustomPoolResourceAttr(t, testhelp.RefByFQN(workspaceResourceFQN, "id"), "test") - entityCreateName := testhelp.RandomName() - testCaseCreate := testhelp.CopyMap(testHelperSparkCustomPoolResource) - testCaseCreate["name"] = entityCreateName - entityUpdateName := testhelp.RandomName() - testCaseUpdate := testhelp.CopyMap(testHelperSparkCustomPoolResource) - testCaseUpdate["name"] = entityUpdateName resource.Test(t, testhelp.NewTestAccCase(t, &testResourceSparkCustomPoolFQN, nil, []resource.TestStep{ // Create and Read @@ -40,10 +33,32 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) { workspaceResourceHCL, at.CompileConfig( testResourceSparkCustomPoolHeader, - testCaseCreate, + map[string]any{ + "workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"), + "name": entityCreateName, + "type": "Workspace", + "node_family": "MemoryOptimized", + "node_size": "Small", + "auto_scale": map[string]any{ + "enabled": true, + "min_node_count": 1, + "max_node_count": 3, + }, + "dynamic_executor_allocation": map[string]any{ + "enabled": true, + "min_executors": 1, + "max_executors": 2, + }, + }, )), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "name", entityCreateName), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.enabled", "true"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.min_node_count", "1"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.max_node_count", "3"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.enabled", "true"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.min_executors", "1"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.max_executors", "2"), ), }, // Update and Read @@ -53,10 +68,30 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) { workspaceResourceHCL, at.CompileConfig( testResourceSparkCustomPoolHeader, - testCaseUpdate, + map[string]any{ + "workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"), + "name": entityUpdateName, + "type": "Workspace", + "node_family": "MemoryOptimized", + "node_size": "Small", + "auto_scale": map[string]any{ + "enabled": false, + "min_node_count": 1, + "max_node_count": 3, + }, + "dynamic_executor_allocation": map[string]any{ + "enabled": false, + }, + }, )), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "name", entityUpdateName), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.enabled", "false"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.min_node_count", "1"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.max_node_count", "3"), + resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.enabled", "false"), + resource.TestCheckNoResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.min_executors"), + resource.TestCheckNoResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.max_executors"), ), }, },