Skip to content

Commit

Permalink
refactor(fabricitem): use generics for properties (#140)
Browse files Browse the repository at this point in the history
# 📥 Pull Request

## ❓ What are you trying to address

This pull request includes several changes to the
`fabric_spark_job_definition` by adding the extended properties.

Additionally, PR contains internal refactoring for Fabric Items to
handle extended properties using Go Generics. This will be the base for
refactoring of exiting items to use it and for any future items with
properties.

---------

Co-authored-by: Pablo Zaidenvoren <[email protected]>
  • Loading branch information
DariuszPorowski and PabloZaiden authored Dec 20, 2024
1 parent 258ee38 commit 7aa509c
Show file tree
Hide file tree
Showing 28 changed files with 1,800 additions and 95 deletions.
5 changes: 5 additions & 0 deletions .changes/unreleased/added-20241219-121511.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
kind: added
body: Added extended properties to `fabric_spark_job_definition` Resource.
time: 2024-12-19T12:15:11.1738255-08:00
custom:
Issue: "158"
5 changes: 5 additions & 0 deletions .changes/unreleased/changed-20241211-011622.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
kind: added
body: Added extended properties to `fabric_spark_job_definition` Data Source.
time: 2024-12-11T01:16:22.8183258-08:00
custom:
Issue: "157"
9 changes: 9 additions & 0 deletions docs/data-sources/spark_job_definition.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ output "example_definition_content_object" {
- `definition` (Attributes Map) Definition parts. Possible path keys: `SparkJobDefinitionV1.json`. (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Spark Job Definition description.
- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`.
- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties))

<a id="nestedatt--timeouts"></a>

Expand All @@ -97,3 +98,11 @@ Read-Only:

- `content` (String) Gzip base64 content of definition part.
Use [`provider::fabric::content_decode`](../functions/content_decode.md) function to decode content.

<a id="nestedatt--properties"></a>

### Nested Schema for `properties`

Read-Only:

- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory.
9 changes: 9 additions & 0 deletions docs/data-sources/spark_job_definitions.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,13 @@ Read-Only:
- `description` (String) The Spark Job Definition description.
- `display_name` (String) The Spark Job Definition display name.
- `id` (String) The Spark Job Definition ID.
- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.

<a id="nestedatt--values--properties"></a>

### Nested Schema for `values.properties`

Read-Only:

- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory.
9 changes: 9 additions & 0 deletions docs/resources/spark_job_definition.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ resource "fabric_spark_job_definition" "example_definition_update" {

- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`.
- `id` (String) The Spark Job Definition ID.
- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties))

<a id="nestedatt--definition"></a>

Expand Down Expand Up @@ -104,6 +105,14 @@ Optional:
- `read` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Read operations occur during any refresh or planning operation when refresh is enabled.
- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).

<a id="nestedatt--properties"></a>

### Nested Schema for `properties`

Read-Only:

- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory.

## Import

Import is supported using the following syntax:
Expand Down
218 changes: 218 additions & 0 deletions internal/pkg/fabricitem/data_item_definition_properties.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,218 @@
// Copyright (c) Microsoft Corporation
// SPDX-License-Identifier: MPL-2.0

package fabricitem

import (
"context"
"fmt"
"net/http"

"github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/microsoft/fabric-sdk-go/fabric"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"

"github.com/microsoft/terraform-provider-fabric/internal/common"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)

// Ensure the implementation satisfies the expected interfaces.
var (
_ datasource.DataSourceWithConfigValidators = (*DataSourceFabricItemDefinitionProperties[struct{}, struct{}])(nil)
_ datasource.DataSourceWithConfigure = (*DataSourceFabricItemDefinitionProperties[struct{}, struct{}])(nil)
)

type DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct {
DataSourceFabricItemDefinition
PropertiesSchema schema.SingleNestedAttribute
PropertiesSetter func(ctx context.Context, from *Titemprop, to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
ItemGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], errNotFound fabcore.ResponseError, fabricItem *FabricItemProperties[Titemprop]) error
}

func NewDataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any](config DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) datasource.DataSource {
return &config
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { //revive:disable-line:confusing-naming
resp.TypeName = req.ProviderTypeName + "_" + d.TFName
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { //revive:disable-line:confusing-naming
resp.Schema = GetDataSourceFabricItemDefinitionPropertiesSchema1(ctx, *d)
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
if d.IsDisplayNameUnique {
return []datasource.ConfigValidator{
datasourcevalidator.Conflicting(
path.MatchRoot("id"),
path.MatchRoot("display_name"),
),
datasourcevalidator.ExactlyOneOf(
path.MatchRoot("id"),
path.MatchRoot("display_name"),
),
}
}

return []datasource.ConfigValidator{}
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { //revive:disable-line:confusing-naming
if req.ProviderData == nil {
return
}

pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
if !ok {
resp.Diagnostics.AddError(
common.ErrorDataSourceConfigType,
fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
)

return
}

d.pConfigData = pConfigData
d.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "READ", map[string]any{
"action": "start",
})
tflog.Trace(ctx, "READ", map[string]any{
"config": req.Config,
})

var data DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]

if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
return
}

timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}

ctx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()

if data.ID.ValueString() != "" {
diags = d.getByID(ctx, &data)
} else {
diags = d.getByDisplayName(ctx, &data)
}

if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}

data.Format = types.StringNull()

if d.FormatTypeDefault != "" {
data.Format = types.StringValue(d.FormatTypeDefault)
}

resp.Diagnostics.Append(resp.State.Set(ctx, data)...)

if data.OutputDefinition.IsNull() || data.OutputDefinition.IsUnknown() {
data.OutputDefinition = types.BoolValue(false)
}

if data.OutputDefinition.ValueBool() {
if resp.Diagnostics.Append(d.getDefinition(ctx, &data)...); resp.Diagnostics.HasError() {
return
}

tflog.Debug(ctx, "Definition parts content is gzip base64. Use `provider::fabric::content_decode` function to decode content.")

resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
}

tflog.Debug(ctx, "READ", map[string]any{
"action": "end",
})

if resp.Diagnostics.HasError() {
return
}
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByID(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", d.Name, model.ID.ValueString()))

var fabricItem FabricItemProperties[Titemprop]

err := d.ItemGetter(ctx, *d.pConfigData.FabricClient, *model, &fabricItem)
if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
return diags
}

model.set(fabricItem)

diags := d.PropertiesSetter(ctx, fabricItem.Properties, model)
if diags.HasError() {
return diags
}

return nil
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByDisplayName(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
tflog.Trace(ctx, fmt.Sprintf("getting %s by Display Name: %s", d.Name, model.DisplayName.ValueString()))

errNotFoundCode := fabcore.ErrCommon.EntityNotFound.Error()
errNotFoundMsg := fmt.Sprintf("Unable to find %s with 'display_name': %s in the Workspace ID: %s", d.Name, model.DisplayName.ValueString(), model.WorkspaceID.ValueString())

errNotFound := fabcore.ResponseError{
ErrorCode: errNotFoundCode,
StatusCode: http.StatusNotFound,
ErrorResponse: &fabcore.ErrorResponse{
ErrorCode: &errNotFoundCode,
Message: &errNotFoundMsg,
},
}

var fabricItem FabricItemProperties[Titemprop]

err := d.ItemListGetter(ctx, *d.pConfigData.FabricClient, *model, errNotFound, &fabricItem)
if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
return diags
}

model.set(fabricItem)

diags := d.PropertiesSetter(ctx, fabricItem.Properties, model)
if diags.HasError() {
return diags
}

return nil
}

func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getDefinition(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
tflog.Trace(ctx, fmt.Sprintf("getting %s definition (WorkspaceID: %s ItemID: %s)", d.Name, model.WorkspaceID.ValueString(), model.ID.ValueString()))

respGetOpts := &fabcore.ItemsClientBeginGetItemDefinitionOptions{}

if !model.Format.IsNull() {
respGetOpts.Format = model.Format.ValueStringPointer()
}

respGet, err := d.client.GetItemDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), respGetOpts)
if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
return diags
}

return model.setDefinition(ctx, *respGet.Definition)
}
Loading

0 comments on commit 7aa509c

Please sign in to comment.