Skip to content

Commit

Permalink
fix(rs-spark_custom_pool): inconsistent result for dynamic_executor_a…
Browse files Browse the repository at this point in the history
…llocation (#240)

# 📥 Pull Request

fix #239

## ❓ What are you trying to address

This pull request includes changes to address the inconsistent results
for dynamic executor allocation when the `enabled` attribute is set to
false and updates the documentation and schema accordingly.

## ✨ Description of new changes

### Code updates:
* `internal/services/spark/resource_spark_custom_pool.go`: 
* Added new imports for `attr`, `types`, and `superint32validator`
packages.
* Updated the schema for `min_executors` and `max_executors` attributes
to be `Computed` and `Optional`, and added validators to handle the
`dynamic_executor_allocation` `enabled` attribute.

### Documentation updates:
*
[`docs/resources/spark_custom_pool.md`](diffhunk://#diff-e2c5956959615a4e86cc330845da0052ce2316efdf948d7bea320eb2365fbab3R81-R83):
Added an `Optional` section to the documentation for `max_executors` and
`min_executors` attributes.
  • Loading branch information
DariuszPorowski authored Feb 12, 2025
1 parent 79068fb commit a0b6ea7
Show file tree
Hide file tree
Showing 6 changed files with 96 additions and 34 deletions.
5 changes: 5 additions & 0 deletions .changes/unreleased/fixed-20250206-214937.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
kind: fixed
body: Inconsistent result for dynamic_executor_allocation (min_executors/max_executors) when enabled is false.
time: 2025-02-06T21:49:37.8248633+01:00
custom:
Issue: "239"
3 changes: 3 additions & 0 deletions docs/resources/spark_custom_pool.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ Required:
Required:

- `enabled` (Boolean) The status of the dynamic executor allocation. Accepted values: `false` - Disabled, `true` - Enabled.

Optional:

- `max_executors` (Number) The maximum executors.
- `min_executors` (Number) The minimum executors.

Expand Down
22 changes: 0 additions & 22 deletions internal/services/spark/base_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,6 @@ const (
sparkEnvironmentLibrariesTFName = spark.SparkEnvironmentLibrariesTFName
)

func getSparkCustomPoolResourceAttr(t *testing.T, workspaceID, name string) map[string]any {
t.Helper()

return map[string]any{
"workspace_id": workspaceID,
"name": name,
"type": "Workspace",
"node_family": "MemoryOptimized",
"node_size": "Small",
"auto_scale": map[string]any{
"enabled": true,
"min_node_count": 1,
"max_node_count": 3,
},
"dynamic_executor_allocation": map[string]any{
"enabled": true,
"min_executors": 1,
"max_executors": 2,
},
}
}

func environmentResource(t *testing.T, workspaceID string) (resourceHCL, resourceFQN string) {
t.Helper()

Expand Down
18 changes: 17 additions & 1 deletion internal/services/spark/data_spark_custom_pool_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,23 @@ func TestAcc_SparkCustomPoolDataSource(t *testing.T) {
workspaceResourceHCL,
at.CompileConfig(
testResourceSparkCustomPoolHeader,
getSparkCustomPoolResourceAttr(t, testhelp.RefByFQN(workspaceResourceFQN, "id"), entityName),
map[string]any{
"workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"),
"name": entityName,
"type": "Workspace",
"node_family": "MemoryOptimized",
"node_size": "Small",
"auto_scale": map[string]any{
"enabled": true,
"min_node_count": 1,
"max_node_count": 3,
},
"dynamic_executor_allocation": map[string]any{
"enabled": true,
"min_executors": 1,
"max_executors": 2,
},
},
),
at.CompileConfig(
testDataSourceSparkCustomPoolHeader,
Expand Down
29 changes: 27 additions & 2 deletions internal/services/spark/resource_spark_custom_pool.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,20 @@ import (

"github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fabspark "github.com/microsoft/fabric-sdk-go/fabric/spark"
supertypes "github.com/orange-cloudavenue/terraform-plugin-framework-supertypes"
superint32validator "github.com/orange-cloudavenue/terraform-plugin-framework-validators/int32validator"

"github.com/microsoft/terraform-provider-fabric/internal/common"
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
Expand Down Expand Up @@ -130,11 +133,33 @@ func (r *resourceSparkCustomPool) Schema(ctx context.Context, _ resource.SchemaR
},
"min_executors": schema.Int32Attribute{
MarkdownDescription: "The minimum executors.",
Required: true,
Computed: true,
Optional: true,
Validators: []validator.Int32{
superint32validator.NullIfAttributeIsOneOf(
path.MatchRoot("dynamic_executor_allocation").AtName("enabled"),
[]attr.Value{types.BoolValue(false)},
),
superint32validator.RequireIfAttributeIsOneOf(
path.MatchRoot("dynamic_executor_allocation").AtName("enabled"),
[]attr.Value{types.BoolValue(true)},
),
},
},
"max_executors": schema.Int32Attribute{
MarkdownDescription: "The maximum executors.",
Required: true,
Computed: true,
Optional: true,
Validators: []validator.Int32{
superint32validator.NullIfAttributeIsOneOf(
path.MatchRoot("dynamic_executor_allocation").AtName("enabled"),
[]attr.Value{types.BoolValue(false)},
),
superint32validator.RequireIfAttributeIsOneOf(
path.MatchRoot("dynamic_executor_allocation").AtName("enabled"),
[]attr.Value{types.BoolValue(true)},
),
},
},
},
},
Expand Down
53 changes: 44 additions & 9 deletions internal/services/spark/resource_spark_custom_pool_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,8 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) {
capacityID := capacity["id"].(string)

workspaceResourceHCL, workspaceResourceFQN := testhelp.TestAccWorkspaceResource(t, capacityID)
testHelperSparkCustomPoolResource := getSparkCustomPoolResourceAttr(t, testhelp.RefByFQN(workspaceResourceFQN, "id"), "test")

entityCreateName := testhelp.RandomName()
testCaseCreate := testhelp.CopyMap(testHelperSparkCustomPoolResource)
testCaseCreate["name"] = entityCreateName

entityUpdateName := testhelp.RandomName()
testCaseUpdate := testhelp.CopyMap(testHelperSparkCustomPoolResource)
testCaseUpdate["name"] = entityUpdateName

resource.Test(t, testhelp.NewTestAccCase(t, &testResourceSparkCustomPoolFQN, nil, []resource.TestStep{
// Create and Read
Expand All @@ -40,10 +33,32 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) {
workspaceResourceHCL,
at.CompileConfig(
testResourceSparkCustomPoolHeader,
testCaseCreate,
map[string]any{
"workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"),
"name": entityCreateName,
"type": "Workspace",
"node_family": "MemoryOptimized",
"node_size": "Small",
"auto_scale": map[string]any{
"enabled": true,
"min_node_count": 1,
"max_node_count": 3,
},
"dynamic_executor_allocation": map[string]any{
"enabled": true,
"min_executors": 1,
"max_executors": 2,
},
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "name", entityCreateName),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.enabled", "true"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.min_node_count", "1"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.max_node_count", "3"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.enabled", "true"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.min_executors", "1"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.max_executors", "2"),
),
},
// Update and Read
Expand All @@ -53,10 +68,30 @@ func TestAcc_SparkCustomPoolResource_CRUD(t *testing.T) {
workspaceResourceHCL,
at.CompileConfig(
testResourceSparkCustomPoolHeader,
testCaseUpdate,
map[string]any{
"workspace_id": testhelp.RefByFQN(workspaceResourceFQN, "id"),
"name": entityUpdateName,
"type": "Workspace",
"node_family": "MemoryOptimized",
"node_size": "Small",
"auto_scale": map[string]any{
"enabled": false,
"min_node_count": 1,
"max_node_count": 3,
},
"dynamic_executor_allocation": map[string]any{
"enabled": false,
},
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "name", entityUpdateName),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.enabled", "false"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.min_node_count", "1"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "auto_scale.max_node_count", "3"),
resource.TestCheckResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.enabled", "false"),
resource.TestCheckNoResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.min_executors"),
resource.TestCheckNoResourceAttr(testResourceSparkCustomPoolFQN, "dynamic_executor_allocation.max_executors"),
),
},
},
Expand Down

0 comments on commit a0b6ea7

Please sign in to comment.