diff --git a/.changes/unreleased/added-20241219-121511.yaml b/.changes/unreleased/added-20241219-121511.yaml new file mode 100644 index 00000000..b53644a9 --- /dev/null +++ b/.changes/unreleased/added-20241219-121511.yaml @@ -0,0 +1,5 @@ +kind: added +body: Added extended properties to `fabric_spark_job_definition` Resource. +time: 2024-12-19T12:15:11.1738255-08:00 +custom: + Issue: "158" diff --git a/.changes/unreleased/changed-20241211-011622.yaml b/.changes/unreleased/changed-20241211-011622.yaml new file mode 100644 index 00000000..c01c6fa0 --- /dev/null +++ b/.changes/unreleased/changed-20241211-011622.yaml @@ -0,0 +1,5 @@ +kind: added +body: Added extended properties to `fabric_spark_job_definition` Data Source. +time: 2024-12-11T01:16:22.8183258-08:00 +custom: + Issue: "157" diff --git a/docs/data-sources/spark_job_definition.md b/docs/data-sources/spark_job_definition.md index e1ae45cf..935ea756 100644 --- a/docs/data-sources/spark_job_definition.md +++ b/docs/data-sources/spark_job_definition.md @@ -80,6 +80,7 @@ output "example_definition_content_object" { - `definition` (Attributes Map) Definition parts. Possible path keys: `SparkJobDefinitionV1.json`. (see [below for nested schema](#nestedatt--definition)) - `description` (String) The Spark Job Definition description. - `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`. +- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties)) @@ -97,3 +98,11 @@ Read-Only: - `content` (String) Gzip base64 content of definition part. Use [`provider::fabric::content_decode`](../functions/content_decode.md) function to decode content. + + + +### Nested Schema for `properties` + +Read-Only: + +- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory. diff --git a/docs/data-sources/spark_job_definitions.md b/docs/data-sources/spark_job_definitions.md index 5c228f8c..4a69ef93 100644 --- a/docs/data-sources/spark_job_definitions.md +++ b/docs/data-sources/spark_job_definitions.md @@ -56,4 +56,13 @@ Read-Only: - `description` (String) The Spark Job Definition description. - `display_name` (String) The Spark Job Definition display name. - `id` (String) The Spark Job Definition ID. +- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--values--properties)) - `workspace_id` (String) The Workspace ID. + + + +### Nested Schema for `values.properties` + +Read-Only: + +- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory. diff --git a/docs/resources/spark_job_definition.md b/docs/resources/spark_job_definition.md index 2e83c38e..c08ab096 100644 --- a/docs/resources/spark_job_definition.md +++ b/docs/resources/spark_job_definition.md @@ -74,6 +74,7 @@ resource "fabric_spark_job_definition" "example_definition_update" { - `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`. - `id` (String) The Spark Job Definition ID. +- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties)) @@ -104,6 +105,14 @@ Optional: - `read` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Read operations occur during any refresh or planning operation when refresh is enabled. - `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + + +### Nested Schema for `properties` + +Read-Only: + +- `onelake_root_path` (String) OneLake path to the Spark Job Definition root directory. + ## Import Import is supported using the following syntax: diff --git a/internal/pkg/fabricitem/data_item_definition_properties.go b/internal/pkg/fabricitem/data_item_definition_properties.go new file mode 100644 index 00000000..a74acded --- /dev/null +++ b/internal/pkg/fabricitem/data_item_definition_properties.go @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/microsoft/fabric-sdk-go/fabric" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" + + "github.com/microsoft/terraform-provider-fabric/internal/common" + "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils" + pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSourceWithConfigValidators = (*DataSourceFabricItemDefinitionProperties[struct{}, struct{}])(nil) + _ datasource.DataSourceWithConfigure = (*DataSourceFabricItemDefinitionProperties[struct{}, struct{}])(nil) +) + +type DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { + DataSourceFabricItemDefinition + PropertiesSchema schema.SingleNestedAttribute + PropertiesSetter func(ctx context.Context, from *Titemprop, to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics + ItemGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error + ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], errNotFound fabcore.ResponseError, fabricItem *FabricItemProperties[Titemprop]) error +} + +func NewDataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any](config DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) datasource.DataSource { + return &config +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { //revive:disable-line:confusing-naming + resp.TypeName = req.ProviderTypeName + "_" + d.TFName +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { //revive:disable-line:confusing-naming + resp.Schema = GetDataSourceFabricItemDefinitionPropertiesSchema1(ctx, *d) +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ConfigValidators(_ context.Context) []datasource.ConfigValidator { + if d.IsDisplayNameUnique { + return []datasource.ConfigValidator{ + datasourcevalidator.Conflicting( + path.MatchRoot("id"), + path.MatchRoot("display_name"), + ), + datasourcevalidator.ExactlyOneOf( + path.MatchRoot("id"), + path.MatchRoot("display_name"), + ), + } + } + + return []datasource.ConfigValidator{} +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { //revive:disable-line:confusing-naming + if req.ProviderData == nil { + return + } + + pConfigData, ok := req.ProviderData.(*pconfig.ProviderData) + if !ok { + resp.Diagnostics.AddError( + common.ErrorDataSourceConfigType, + fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData), + ) + + return + } + + d.pConfigData = pConfigData + d.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient() +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { //revive:disable-line:confusing-naming + tflog.Debug(ctx, "READ", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "READ", map[string]any{ + "config": req.Config, + }) + + var data DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() { + return + } + + timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + if data.ID.ValueString() != "" { + diags = d.getByID(ctx, &data) + } else { + diags = d.getByDisplayName(ctx, &data) + } + + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + data.Format = types.StringNull() + + if d.FormatTypeDefault != "" { + data.Format = types.StringValue(d.FormatTypeDefault) + } + + resp.Diagnostics.Append(resp.State.Set(ctx, data)...) + + if data.OutputDefinition.IsNull() || data.OutputDefinition.IsUnknown() { + data.OutputDefinition = types.BoolValue(false) + } + + if data.OutputDefinition.ValueBool() { + if resp.Diagnostics.Append(d.getDefinition(ctx, &data)...); resp.Diagnostics.HasError() { + return + } + + tflog.Debug(ctx, "Definition parts content is gzip base64. Use `provider::fabric::content_decode` function to decode content.") + + resp.Diagnostics.Append(resp.State.Set(ctx, data)...) + } + + tflog.Debug(ctx, "READ", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByID(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { + tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", d.Name, model.ID.ValueString())) + + var fabricItem FabricItemProperties[Titemprop] + + err := d.ItemGetter(ctx, *d.pConfigData.FabricClient, *model, &fabricItem) + if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() { + return diags + } + + model.set(fabricItem) + + diags := d.PropertiesSetter(ctx, fabricItem.Properties, model) + if diags.HasError() { + return diags + } + + return nil +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByDisplayName(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { + tflog.Trace(ctx, fmt.Sprintf("getting %s by Display Name: %s", d.Name, model.DisplayName.ValueString())) + + errNotFoundCode := fabcore.ErrCommon.EntityNotFound.Error() + errNotFoundMsg := fmt.Sprintf("Unable to find %s with 'display_name': %s in the Workspace ID: %s", d.Name, model.DisplayName.ValueString(), model.WorkspaceID.ValueString()) + + errNotFound := fabcore.ResponseError{ + ErrorCode: errNotFoundCode, + StatusCode: http.StatusNotFound, + ErrorResponse: &fabcore.ErrorResponse{ + ErrorCode: &errNotFoundCode, + Message: &errNotFoundMsg, + }, + } + + var fabricItem FabricItemProperties[Titemprop] + + err := d.ItemListGetter(ctx, *d.pConfigData.FabricClient, *model, errNotFound, &fabricItem) + if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() { + return diags + } + + model.set(fabricItem) + + diags := d.PropertiesSetter(ctx, fabricItem.Properties, model) + if diags.HasError() { + return diags + } + + return nil +} + +func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getDefinition(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { + tflog.Trace(ctx, fmt.Sprintf("getting %s definition (WorkspaceID: %s ItemID: %s)", d.Name, model.WorkspaceID.ValueString(), model.ID.ValueString())) + + respGetOpts := &fabcore.ItemsClientBeginGetItemDefinitionOptions{} + + if !model.Format.IsNull() { + respGetOpts.Format = model.Format.ValueStringPointer() + } + + respGet, err := d.client.GetItemDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), respGetOpts) + if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() { + return diags + } + + return model.setDefinition(ctx, *respGet.Definition) +} diff --git a/internal/pkg/fabricitem/data_items_properties.go b/internal/pkg/fabricitem/data_items_properties.go new file mode 100644 index 00000000..918c0605 --- /dev/null +++ b/internal/pkg/fabricitem/data_items_properties.go @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + "fmt" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" + "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/microsoft/fabric-sdk-go/fabric" + + "github.com/microsoft/terraform-provider-fabric/internal/common" + "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" + "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils" + pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSourceWithConfigure = (*DataSourceFabricItemsProperties[struct{}, struct{}])(nil) +) + +type DataSourceFabricItemsProperties[Ttfprop, Titemprop any] struct { + DataSourceFabricItems + PropertiesSchema schema.SingleNestedAttribute + PropertiesSetter func(ctx context.Context, from *Titemprop, to *FabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics + ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop], fabricItems *[]FabricItemProperties[Titemprop]) error +} + +func NewDataSourceFabricItemsProperties[Ttfprop, Titemprop any](config DataSourceFabricItemsProperties[Ttfprop, Titemprop]) datasource.DataSource { + return &config +} + +func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { //revive:disable-line:confusing-naming + resp.TypeName = req.ProviderTypeName + "_" + d.TFName +} + +func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { //revive:disable-line:confusing-naming + attributes := map[string]schema.Attribute{ + "workspace_id": schema.StringAttribute{ + MarkdownDescription: "The Workspace ID.", + Computed: true, + CustomType: customtypes.UUIDType{}, + }, + "id": schema.StringAttribute{ + MarkdownDescription: fmt.Sprintf("The %s ID.", d.Name), + Computed: true, + CustomType: customtypes.UUIDType{}, + }, + "display_name": schema.StringAttribute{ + MarkdownDescription: fmt.Sprintf("The %s display name.", d.Name), + Computed: true, + }, + "description": schema.StringAttribute{ + MarkdownDescription: fmt.Sprintf("The %s description.", d.Name), + Computed: true, + }, + } + + attributes["properties"] = d.PropertiesSchema + + resp.Schema = schema.Schema{ + MarkdownDescription: d.MarkdownDescription, + Attributes: map[string]schema.Attribute{ + "workspace_id": schema.StringAttribute{ + MarkdownDescription: "The Workspace ID.", + Required: true, + CustomType: customtypes.UUIDType{}, + }, + "values": schema.ListNestedAttribute{ + Computed: true, + MarkdownDescription: fmt.Sprintf("The list of %s.", d.Names), + CustomType: supertypes.NewListNestedObjectTypeOf[FabricItemPropertiesModel[Ttfprop, Titemprop]](ctx), + NestedObject: schema.NestedAttributeObject{ + Attributes: attributes, + }, + }, + "timeouts": timeouts.Attributes(ctx), + }, + } +} + +func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { //revive:disable-line:confusing-naming + if req.ProviderData == nil { + return + } + + pConfigData, ok := req.ProviderData.(*pconfig.ProviderData) + if !ok { + resp.Diagnostics.AddError( + common.ErrorDataSourceConfigType, + fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData), + ) + + return + } + + d.pConfigData = pConfigData +} + +func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { //revive:disable-line:confusing-naming + tflog.Debug(ctx, "READ", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "READ", map[string]any{ + "config": req.Config, + }) + + var data DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() { + return + } + + timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + if resp.Diagnostics.Append(d.list(ctx, &data)...); resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, data)...) + + tflog.Debug(ctx, "READ", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) list(ctx context.Context, model *DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { + tflog.Trace(ctx, fmt.Sprintf("getting %ss", d.Name)) + + var fabricItems []FabricItemProperties[Titemprop] + + err := d.ItemListGetter(ctx, *d.pConfigData.FabricClient, *model, &fabricItems) + if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() { + return diags + } + + return model.setValues(ctx, fabricItems, d.PropertiesSetter) +} diff --git a/internal/pkg/fabricitem/data_schema.go b/internal/pkg/fabricitem/data_schema.go index f6903b9f..7cdbb7b1 100644 --- a/internal/pkg/fabricitem/data_schema.go +++ b/internal/pkg/fabricitem/data_schema.go @@ -27,7 +27,7 @@ func GetDataSourceFabricItemSchema(ctx context.Context, d DataSourceFabricItem) func GetDataSourceFabricItemDefinitionSchema(ctx context.Context, d DataSourceFabricItemDefinition) schema.Schema { attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique) - for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d) { + for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) { attributes[key] = value } @@ -47,11 +47,25 @@ func GetDataSourceFabricItemPropertiesSchema(ctx context.Context, d DataSourceFa } } -func GetDataSourceFabricItemPropertiesDefinitionSchema(ctx context.Context, d DataSourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema { +func GetDataSourceFabricItemDefinitionPropertiesSchema(ctx context.Context, d DataSourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema { attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique) attributes["properties"] = properties - for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d) { + for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) { + attributes[key] = value + } + + return schema.Schema{ + MarkdownDescription: d.MarkdownDescription, + Attributes: attributes, + } +} + +func GetDataSourceFabricItemDefinitionPropertiesSchema1[Ttfprop, Titemprop any](ctx context.Context, d DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema { + attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique) + attributes["properties"] = d.PropertiesSchema + + for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) { attributes[key] = value } @@ -104,17 +118,17 @@ func getDataSourceFabricItemBaseAttributes(ctx context.Context, itemName string, } // Helper function to get Fabric Item data-source definition attributes. -func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, d DataSourceFabricItemDefinition) map[string]schema.Attribute { +func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name string, formatTypes, definitionPathKeys []string) map[string]schema.Attribute { attributes := make(map[string]schema.Attribute) - if len(d.FormatTypes) > 0 { + if len(formatTypes) > 0 { attributes["format"] = schema.StringAttribute{ - MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", d.Name, utils.ConvertStringSlicesToString(d.FormatTypes, true, false)), + MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false)), Computed: true, } } else { attributes["format"] = schema.StringAttribute{ - MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", d.Name, DefinitionFormatNotApplicable), + MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable), Computed: true, } } @@ -128,8 +142,8 @@ func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, d DataSour definitionMarkdownDescription := "Definition parts." - if len(d.DefinitionPathKeys) > 0 { - definitionMarkdownDescription = definitionMarkdownDescription + " Possible path keys: " + utils.ConvertStringSlicesToString(d.DefinitionPathKeys, true, false) + "." + if len(definitionPathKeys) > 0 { + definitionMarkdownDescription = definitionMarkdownDescription + " Possible path keys: " + utils.ConvertStringSlicesToString(definitionPathKeys, true, false) + "." } attributes["definition"] = schema.MapNestedAttribute{ diff --git a/internal/pkg/fabricitem/models.go b/internal/pkg/fabricitem/models.go index 12564d1a..e49db23f 100644 --- a/internal/pkg/fabricitem/models.go +++ b/internal/pkg/fabricitem/models.go @@ -4,6 +4,9 @@ package fabricitem import ( + "reflect" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" "github.com/hashicorp/terraform-plugin-framework/types" fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" @@ -23,3 +26,66 @@ func (to *baseFabricItemModel) set(from fabcore.Item) { to.DisplayName = types.StringPointerValue(from.DisplayName) to.Description = types.StringPointerValue(from.Description) } + +type FabricItemPropertiesModel[Ttfprop, Titemprop any] struct { //revive:disable-line:exported + WorkspaceID customtypes.UUID `tfsdk:"workspace_id"` + ID customtypes.UUID `tfsdk:"id"` + DisplayName types.String `tfsdk:"display_name"` + Description types.String `tfsdk:"description"` + Properties supertypes.SingleNestedObjectValueOf[Ttfprop] `tfsdk:"properties"` +} + +func (to *FabricItemPropertiesModel[Ttfprop, Titemprop]) set(from FabricItemProperties[Titemprop]) { //revive:disable-line:confusing-naming + to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID) + to.ID = customtypes.NewUUIDPointerValue(from.ID) + to.DisplayName = types.StringPointerValue(from.DisplayName) + to.Description = types.StringPointerValue(from.Description) +} + +type FabricItemProperties[Titemprop any] struct { //revive:disable-line:exported + fabcore.Item + Properties *Titemprop +} + +func (to *FabricItemProperties[Titemprop]) Set(from any) { + fromValue := reflect.ValueOf(from) + if fromValue.Kind() == reflect.Pointer { + fromValue = fromValue.Elem() + } + + to.WorkspaceID = getFieldStringValue(fromValue, "WorkspaceID") + to.ID = getFieldStringValue(fromValue, "ID") + to.DisplayName = getFieldStringValue(fromValue, "DisplayName") + to.Description = getFieldStringValue(fromValue, "Description") + to.Properties = getFieldStructValue[Titemprop](fromValue, "Properties") +} + +func getFieldStringValue(v reflect.Value, fieldName string) *string { + field := v.FieldByName(fieldName) + if field.Kind() == reflect.Pointer { + field = field.Elem() + } + + if field.IsValid() && field.Kind() == reflect.String { + if str, ok := field.Interface().(string); ok { + return &str + } + } + + return nil +} + +func getFieldStructValue[Titemprop any](v reflect.Value, fieldName string) *Titemprop { + field := v.FieldByName(fieldName) + if field.Kind() == reflect.Pointer { + field = field.Elem() + } + + if field.IsValid() && field.CanInterface() { + if value, ok := field.Interface().(Titemprop); ok { + return &value + } + } + + return nil +} diff --git a/internal/pkg/fabricitem/models_data_item_definition_properties.go b/internal/pkg/fabricitem/models_data_item_definition_properties.go new file mode 100644 index 00000000..ea0faf44 --- /dev/null +++ b/internal/pkg/fabricitem/models_data_item_definition_properties.go @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" + "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" +) + +type DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop any] struct { + FabricItemPropertiesModel[Ttfprop, Titemprop] + Format types.String `tfsdk:"format"` + OutputDefinition types.Bool `tfsdk:"output_definition"` + Definition supertypes.MapNestedObjectValueOf[DataSourceFabricItemDefinitionPartModel] `tfsdk:"definition"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +func (to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) setDefinition(ctx context.Context, from fabcore.ItemDefinition) diag.Diagnostics { + defParts := make(map[string]*DataSourceFabricItemDefinitionPartModel, len(from.Parts)) + + for _, part := range from.Parts { + newPart := &DataSourceFabricItemDefinitionPartModel{} + + if diags := newPart.Set(*part.Payload); diags.HasError() { + return diags + } + + defParts[*part.Path] = newPart + } + + return to.Definition.Set(ctx, defParts) +} diff --git a/internal/pkg/fabricitem/models_data_items_properties.go b/internal/pkg/fabricitem/models_data_items_properties.go new file mode 100644 index 00000000..420515ff --- /dev/null +++ b/internal/pkg/fabricitem/models_data_items_properties.go @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" + "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/diag" + + "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" +) + +type DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop any] struct { + WorkspaceID customtypes.UUID `tfsdk:"workspace_id"` + Values supertypes.ListNestedObjectValueOf[FabricItemPropertiesModel[Ttfprop, Titemprop]] `tfsdk:"values"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +func (to *DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop]) setValues(ctx context.Context, from []FabricItemProperties[Titemprop], propertiesSetter func(ctx context.Context, from *Titemprop, to *FabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics) diag.Diagnostics { + slice := make([]*FabricItemPropertiesModel[Ttfprop, Titemprop], 0, len(from)) + + for _, entity := range from { + var entityModel FabricItemPropertiesModel[Ttfprop, Titemprop] + entityModel.set(entity) + + diags := propertiesSetter(ctx, entity.Properties, &entityModel) + if diags.HasError() { + return diags + } + + slice = append(slice, &entityModel) + } + + return to.Values.Set(ctx, slice) +} diff --git a/internal/pkg/fabricitem/models_resource_item_definition_properties.go b/internal/pkg/fabricitem/models_resource_item_definition_properties.go new file mode 100644 index 00000000..37abc206 --- /dev/null +++ b/internal/pkg/fabricitem/models_resource_item_definition_properties.go @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + + azto "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" + + "github.com/microsoft/terraform-provider-fabric/internal/common" + "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" + "github.com/microsoft/terraform-provider-fabric/internal/pkg/transforms" +) + +type ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop any] struct { + // FabricItemPropertiesModel[Titemprop, Titemprop] + baseFabricItemModel + Properties supertypes.SingleNestedObjectValueOf[Ttfprop] `tfsdk:"properties"` + Format types.String `tfsdk:"format"` + DefinitionUpdateEnabled types.Bool `tfsdk:"definition_update_enabled"` + Definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel] `tfsdk:"definition"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +func (to *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) set(from FabricItemProperties[Titemprop]) { //revive:disable-line:confusing-naming + to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID) + to.ID = customtypes.NewUUIDPointerValue(from.ID) + to.DisplayName = types.StringPointerValue(from.DisplayName) + to.Description = types.StringPointerValue(from.Description) +} + +type FabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { //revive:disable-line:exported + fabcore.ItemDefinition +} + +func (to *FabricItemDefinitionProperties[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], update bool, definitionEmpty string, definitionPaths []string) diag.Diagnostics { //revive:disable-line:flag-parameter,confusing-naming + if from.Format.ValueString() != DefinitionFormatNotApplicable { + to.Format = from.Format.ValueStringPointer() + } + + to.Parts = []fabcore.ItemDefinitionPart{} + + defParts, diags := from.Definition.Get(ctx) + if diags.HasError() { + return diags + } + + if (len(defParts) == 0) && len(definitionPaths) > 0 && update { + content := definitionEmpty + + if err := transforms.Base64Encode(&content); err != nil { + diags.AddError( + common.ErrorBase64EncodeHeader, + err.Error(), + ) + + return diags + } + + to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{ + Path: azto.Ptr(definitionPaths[0]), + Payload: &content, + PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64), + }) + + return nil + } + + for defPartKey, defPartValue := range defParts { + if !update || (update && from.DefinitionUpdateEnabled.ValueBool()) { + payloadB64, _, diags := transforms.SourceFileToPayload(ctx, defPartValue.Source, defPartValue.Tokens) + if diags.HasError() { + return diags + } + + to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{ + Path: azto.Ptr(defPartKey), + Payload: payloadB64, + PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64), + }) + } + } + + return nil +} + +type requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { + fabcore.CreateItemRequest +} + +func (to *requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], itemType fabcore.ItemType) diag.Diagnostics { //revive:disable-line:confusing-naming + to.DisplayName = from.DisplayName.ValueStringPointer() + to.Description = from.Description.ValueStringPointer() + to.Type = azto.Ptr(itemType) + + if !from.Definition.IsNull() && !from.Definition.IsUnknown() { + var def FabricItemDefinitionProperties[Ttfprop, Titemprop] + + if diags := def.set(ctx, from, false, "", []string{}); diags.HasError() { + return diags + } + + to.Definition = &def.ItemDefinition + } + + return nil +} + +type requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { + fabcore.UpdateItemRequest +} + +func (to *requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]) set(from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) { //revive:disable-line:confusing-naming + to.DisplayName = from.DisplayName.ValueStringPointer() + to.Description = from.Description.ValueStringPointer() +} + +type requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop any] struct { + fabcore.UpdateItemDefinitionRequest +} + +func (to *requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], definitionEmpty string, definitionPaths []string) diag.Diagnostics { //revive:disable-line:confusing-naming + var def FabricItemDefinitionProperties[Ttfprop, Titemprop] + + if diags := def.set(ctx, from, true, definitionEmpty, definitionPaths); diags.HasError() { + return diags + } + + to.Definition = &def.ItemDefinition + + return nil +} diff --git a/internal/pkg/fabricitem/resource_item_definition.go b/internal/pkg/fabricitem/resource_item_definition.go index 1130f2f5..97c4672e 100644 --- a/internal/pkg/fabricitem/resource_item_definition.go +++ b/internal/pkg/fabricitem/resource_item_definition.go @@ -51,7 +51,7 @@ type ResourceFabricItemDefinition struct { DefinitionEmpty string } -func NewResourceFabricItemDefinition(config ResourceFabricItemDefinition) resource.Resource { //revive:disable-line:argument-limit +func NewResourceFabricItemDefinition(config ResourceFabricItemDefinition) resource.Resource { return &config } diff --git a/internal/pkg/fabricitem/resource_item_definition_properties.go b/internal/pkg/fabricitem/resource_item_definition_properties.go new file mode 100644 index 00000000..affc75b0 --- /dev/null +++ b/internal/pkg/fabricitem/resource_item_definition_properties.go @@ -0,0 +1,427 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fabricitem + +import ( + "context" + "fmt" + "reflect" + "strings" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/microsoft/fabric-sdk-go/fabric" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" + + "github.com/microsoft/terraform-provider-fabric/internal/common" + "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" + "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils" + pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.ResourceWithModifyPlan = (*ResourceFabricItemDefinitionProperties[struct{}, struct{}])(nil) + _ resource.ResourceWithConfigure = (*ResourceFabricItemDefinitionProperties[struct{}, struct{}])(nil) + _ resource.ResourceWithImportState = (*ResourceFabricItemDefinitionProperties[struct{}, struct{}])(nil) +) + +type ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { + ResourceFabricItemDefinition + PropertiesSchema schema.SingleNestedAttribute + PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics + ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error +} + +func NewResourceFabricItemDefinitionProperties[Ttfprop, Titemprop any](config ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) resource.Resource { + return &config +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { //revive:disable-line:confusing-naming + resp.TypeName = req.ProviderTypeName + "_" + r.TFName +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { + tflog.Debug(ctx, "MODIFY PLAN", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "MODIFY PLAN", map[string]any{ + "config": req.Config, + "plan": req.Plan, + "state": req.State, + }) + + if !req.State.Raw.IsNull() && !req.Plan.Raw.IsNull() { + var plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + + if resp.Diagnostics.HasError() { + return + } + + var reqUpdate requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop] + + doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdate) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + if doUpdateDefinition { + resp.Diagnostics.AddWarning( + common.WarningItemDefinitionUpdateHeader, + fmt.Sprintf(common.WarningItemDefinitionUpdateDetails, r.Name), + ) + } + } + + tflog.Debug(ctx, "MODIFY PLAN", map[string]any{ + "action": "end", + }) +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { //revive:disable-line:confusing-naming + resp.Schema = GetResourceFabricItemDefinitionPropertiesSchema1(ctx, *r) +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { //revive:disable-line:confusing-naming + if req.ProviderData == nil { + return + } + + pConfigData, ok := req.ProviderData.(*pconfig.ProviderData) + if !ok { + resp.Diagnostics.AddError( + common.ErrorResourceConfigType, + fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData), + ) + + return + } + + r.pConfigData = pConfigData + r.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient() +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + tflog.Debug(ctx, "CREATE", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "CREATE", map[string]any{ + "config": req.Config, + "plan": req.Plan, + }) + + var plan ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() { + return + } + + timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + var reqCreate requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(reqCreate.set(ctx, plan, r.Type)...); resp.Diagnostics.HasError() { + return + } + + respCreate, err := r.client.CreateItem(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateItemRequest, nil) + if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() { + return + } + + plan.ID = customtypes.NewUUIDValue(*respCreate.ID) + plan.WorkspaceID = customtypes.NewUUIDValue(*respCreate.WorkspaceID) + + if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + + tflog.Debug(ctx, "CREATE", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { //revive:disable-line:confusing-naming + tflog.Debug(ctx, "READ", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "READ", map[string]any{ + "state": req.State, + }) + + var state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() { + return + } + + timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + diags = r.get(ctx, &state) + if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) { + resp.State.RemoveResource(ctx) + + resp.Diagnostics.Append(diags...) + + return + } + + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, state)...) + + tflog.Debug(ctx, "READ", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + tflog.Debug(ctx, "UPDATE", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "UPDATE", map[string]any{ + "config": req.Config, + "plan": req.Plan, + "state": req.State, + }) + + var plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + + if resp.Diagnostics.HasError() { + return + } + + timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + var reqUpdatePlan requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop] + + if r.checkUpdateItem(plan, state, &reqUpdatePlan) { + tflog.Trace(ctx, fmt.Sprintf("updating %s (WorkspaceID: %s ItemID: %s)", r.Name, plan.WorkspaceID.ValueString(), plan.ID.ValueString())) + + _, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdatePlan.UpdateItemRequest, nil) + if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() { + return + } + + if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + } + + var reqUpdateDefinition requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop] + + doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdateDefinition) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + if doUpdateDefinition { + tflog.Trace(ctx, fmt.Sprintf("updating %s definition", r.Name)) + + _, err := r.client.UpdateItemDefinition(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdateDefinition.UpdateItemDefinitionRequest, nil) + if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() { + return + } + } + + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + + tflog.Debug(ctx, "UPDATE", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + tflog.Debug(ctx, "DELETE", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "DELETE", map[string]any{ + "state": req.State, + }) + + var state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop] + + if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() { + return + } + + timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout) + if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() { + return + } + + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + _, err := r.client.DeleteItem(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil) + if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() { + return + } + + tflog.Debug(ctx, "DELETE", map[string]any{ + "action": "end", + }) +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + tflog.Debug(ctx, "IMPORT", map[string]any{ + "action": "start", + }) + tflog.Trace(ctx, "IMPORT", map[string]any{ + "id": req.ID, + }) + + workspaceID, fabricItemID, found := strings.Cut(req.ID, "/") + if !found { + resp.Diagnostics.AddError( + common.ErrorImportIdentifierHeader, + fmt.Sprintf( + common.ErrorImportIdentifierDetails, + fmt.Sprintf("WorkspaceID/%sID", string(r.Type)), + ), + ) + + return + } + + uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID) + resp.Diagnostics.Append(diags...) + + uuidFabricItemID, diags := customtypes.NewUUIDValueMust(fabricItemID) + resp.Diagnostics.Append(diags...) + + if resp.Diagnostics.HasError() { + return + } + + var timeout timeouts.Value + if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() { + return + } + + var definitionUpdateEnabled types.Bool + if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition_update_enabled"), &definitionUpdateEnabled)...); resp.Diagnostics.HasError() { + return + } + + var definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel] + if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition"), &definition)...); resp.Diagnostics.HasError() { + return + } + + state := ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]{ + baseFabricItemModel: baseFabricItemModel{ + ID: uuidFabricItemID, + WorkspaceID: uuidWorkspaceID, + }, + DefinitionUpdateEnabled: definitionUpdateEnabled, + Definition: definition, + Timeouts: timeout, + } + + if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, state)...) + + tflog.Debug(ctx, "IMPORT", map[string]any{ + "action": "end", + }) + + if resp.Diagnostics.HasError() { + return + } +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) get(ctx context.Context, model *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { + tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", r.Name, model.ID.ValueString())) + + var fabricItem FabricItemProperties[Titemprop] + + err := r.ItemGetter(ctx, *r.pConfigData.FabricClient, *model, &fabricItem) + if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() { + return diags + } + + model.set(fabricItem) + + diags := r.PropertiesSetter(ctx, fabricItem.Properties, model) + if diags.HasError() { + return diags + } + + return nil +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) checkUpdateItem(plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], reqUpdatePlan *requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]) bool { + var reqUpdateState requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop] + + reqUpdatePlan.set(plan) + reqUpdateState.set(state) + + return !reflect.DeepEqual(reqUpdatePlan.UpdateItemRequest, reqUpdateState.UpdateItemRequest) +} + +func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) checkUpdateDefinition(ctx context.Context, plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], reqUpdate *requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]) (bool, diag.Diagnostics) { + if !plan.Definition.Equal(state.Definition) && plan.DefinitionUpdateEnabled.ValueBool() { + if diags := reqUpdate.set(ctx, plan, r.DefinitionEmpty, r.DefinitionPathKeys); diags.HasError() { + return false, diags + } + + if len(reqUpdate.Definition.Parts) > 0 && !plan.Definition.Equal(state.Definition) { + return true, nil + } + } + + return false, nil +} diff --git a/internal/pkg/fabricitem/resource_schema.go b/internal/pkg/fabricitem/resource_schema.go index 3a44fca6..8d867243 100644 --- a/internal/pkg/fabricitem/resource_schema.go +++ b/internal/pkg/fabricitem/resource_schema.go @@ -37,7 +37,7 @@ func GetResourceFabricItemSchema(ctx context.Context, r ResourceFabricItem) sche func GetResourceFabricItemDefinitionSchema(ctx context.Context, r ResourceFabricItemDefinition) schema.Schema { attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed) - for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r) { + for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) { attributes[key] = value } @@ -57,11 +57,25 @@ func GetResourceFabricItemPropertiesSchema(ctx context.Context, itemName, markdo } } -func GetResourceFabricItemPropertiesDefinitionSchema(ctx context.Context, r ResourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema { +func GetResourceFabricItemDefinitionPropertiesSchema(ctx context.Context, r ResourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema { attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed) attributes["properties"] = properties - for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r) { + for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) { + attributes[key] = value + } + + return schema.Schema{ + MarkdownDescription: r.MarkdownDescription, + Attributes: attributes, + } +} + +func GetResourceFabricItemDefinitionPropertiesSchema1[Ttfprop, Titemprop any](ctx context.Context, r ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema { + attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed) + attributes["properties"] = r.PropertiesSchema + + for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) { attributes[key] = value } @@ -133,7 +147,7 @@ func getResourceFabricItemBaseAttributes(ctx context.Context, name string, displ } // Helper function to get Fabric Item definition attributes. -func getResourceFabricItemDefinitionAttributes(ctx context.Context, r ResourceFabricItemDefinition) map[string]schema.Attribute { +func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, formatTypeDefault string, formatTypes []string, definitionPathDocsURL string, definitionPathKeys []string, definitionPathKeysValidator []validator.Map, definitionRequired bool) map[string]schema.Attribute { //revive:disable-line:flag-parameter attributes := make(map[string]schema.Attribute) attributes["definition_update_enabled"] = schema.BoolAttribute{ @@ -143,34 +157,34 @@ func getResourceFabricItemDefinitionAttributes(ctx context.Context, r ResourceFa Default: booldefault.StaticBool(true), } - if len(r.FormatTypes) > 0 { + if len(formatTypes) > 0 { attributes["format"] = schema.StringAttribute{ - MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", r.Name, utils.ConvertStringSlicesToString(r.FormatTypes, true, false)), + MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false)), Computed: true, - Default: stringdefault.StaticString(r.FormatTypeDefault), + Default: stringdefault.StaticString(formatTypeDefault), } } else { attributes["format"] = schema.StringAttribute{ - MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", r.Name, DefinitionFormatNotApplicable), + MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable), Computed: true, Default: stringdefault.StaticString(DefinitionFormatNotApplicable), } } - if r.DefinitionRequired { + if definitionRequired { attributes["definition"] = schema.MapNestedAttribute{ - MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(r.DefinitionPathKeys, true, false), r.Name, r.DefinitionPathDocsURL), + MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL), Required: true, CustomType: supertypes.NewMapNestedObjectTypeOf[ResourceFabricItemDefinitionPartModel](ctx), - Validators: r.DefinitionPathKeysValidator, + Validators: definitionPathKeysValidator, NestedObject: getResourceFabricItemDefinitionPartSchema(ctx), } } else { attributes["definition"] = schema.MapNestedAttribute{ - MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(r.DefinitionPathKeys, true, false), r.Name, r.DefinitionPathDocsURL), + MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL), Optional: true, CustomType: supertypes.NewMapNestedObjectTypeOf[ResourceFabricItemDefinitionPartModel](ctx), - Validators: r.DefinitionPathKeysValidator, + Validators: definitionPathKeysValidator, NestedObject: getResourceFabricItemDefinitionPartSchema(ctx), } } diff --git a/internal/pkg/utils/errors.go b/internal/pkg/utils/errors.go index 92c315b0..203cb8cb 100644 --- a/internal/pkg/utils/errors.go +++ b/internal/pkg/utils/errors.go @@ -69,7 +69,7 @@ func IsErrNotFound(resourceID string, diags *diag.Diagnostics, err error) bool { return false } -func GetDiagsFromError(ctx context.Context, err error, operation Operation, errIs error) diag.Diagnostics { //nolint:gocognit +func GetDiagsFromError(ctx context.Context, err error, operation Operation, errIs error) diag.Diagnostics { //nolint:gocognit, gocognit if err == nil { return nil } diff --git a/internal/provider/provider.go b/internal/provider/provider.go index 292dec94..e9c89e05 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -352,7 +352,7 @@ func (p *FabricProvider) Configure(ctx context.Context, req provider.ConfigureRe tflog.Info(ctx, "Configured Microsoft Fabric client", map[string]any{"success": true}) } -func (p *FabricProvider) Resources(_ context.Context) []func() resource.Resource { +func (p *FabricProvider) Resources(ctx context.Context) []func() resource.Resource { return []func() resource.Resource{ datapipeline.NewResourceDataPipeline, domain.NewResourceDomain, @@ -371,7 +371,7 @@ func (p *FabricProvider) Resources(_ context.Context) []func() resource.Resource spark.NewResourceSparkCustomPool, spark.NewResourceSparkEnvironmentSettings, spark.NewResourceSparkWorkspaceSettings, - sparkjobdefinition.NewResourceSparkJobDefinition, + func() resource.Resource { return sparkjobdefinition.NewResourceSparkJobDefinition(ctx) }, warehouse.NewResourceWarehouse, workspace.NewResourceWorkspace, workspace.NewResourceWorkspaceRoleAssignment, @@ -379,7 +379,7 @@ func (p *FabricProvider) Resources(_ context.Context) []func() resource.Resource } } -func (p *FabricProvider) DataSources(_ context.Context) []func() datasource.DataSource { +func (p *FabricProvider) DataSources(ctx context.Context) []func() datasource.DataSource { return []func() datasource.DataSource{ capacity.NewDataSourceCapacity, capacity.NewDataSourceCapacities, @@ -419,8 +419,8 @@ func (p *FabricProvider) DataSources(_ context.Context) []func() datasource.Data spark.NewDataSourceSparkCustomPool, spark.NewDataSourceSparkEnvironmentSettings, spark.NewDataSourceSparkWorkspaceSettings, - sparkjobdefinition.NewDataSourceSparkJobDefinition, - sparkjobdefinition.NewDataSourceSparkJobDefinitions, + func() datasource.DataSource { return sparkjobdefinition.NewDataSourceSparkJobDefinition(ctx) }, + func() datasource.DataSource { return sparkjobdefinition.NewDataSourceSparkJobDefinitions(ctx) }, sqlendpoint.NewDataSourceSQLEndpoints, warehouse.NewDataSourceWarehouse, warehouse.NewDataSourceWarehouses, diff --git a/internal/services/report/resource_report_test.go b/internal/services/report/resource_report_test.go index a09a4351..57964a52 100644 --- a/internal/services/report/resource_report_test.go +++ b/internal/services/report/resource_report_test.go @@ -218,7 +218,7 @@ func TestUnit_ReportResource_CRUD(t *testing.T) { fakes.FakeServer.Upsert(entityAfter) fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) - testHelperDefinition[`"definition.pbir"`].(map[string]any)["tokens"].(map[string]any)["SemanticModelID"] = *semanticModel.ID //nolint:forcetypeassert + testHelperDefinition[`"definition.pbir"`].(map[string]any)["tokens"].(map[string]any)["SemanticModelID"] = *semanticModel.ID resource.Test(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{ // error - create - existing entity diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition.go b/internal/services/sparkjobdefinition/data_spark_job_definition.go index 5ae137f8..7fd54d14 100644 --- a/internal/services/sparkjobdefinition/data_spark_job_definition.go +++ b/internal/services/sparkjobdefinition/data_spark_job_definition.go @@ -4,24 +4,103 @@ package sparkjobdefinition import ( + "context" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/microsoft/fabric-sdk-go/fabric" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" + fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition" "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem" ) -func NewDataSourceSparkJobDefinition() datasource.DataSource { - config := fabricitem.DataSourceFabricItemDefinition{ - Type: ItemType, - Name: ItemName, - TFName: ItemTFName, - MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" + - "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" + - ItemDocsSPNSupport, - IsDisplayNameUnique: true, - FormatTypeDefault: ItemFormatTypeDefault, - FormatTypes: ItemFormatTypes, - DefinitionPathKeys: ItemDefinitionPaths, +func NewDataSourceSparkJobDefinition(ctx context.Context) datasource.DataSource { + propertiesSchema := schema.SingleNestedAttribute{ + MarkdownDescription: "The " + ItemName + " properties.", + Computed: true, + CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx), + Attributes: map[string]schema.Attribute{ + "onelake_root_path": schema.StringAttribute{ + MarkdownDescription: "OneLake path to the Spark Job Definition root directory.", + Computed: true, + }, + }, + } + + propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.DataSourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics { + properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx) + + if from != nil { + propertiesModel := &sparkJobDefinitionPropertiesModel{} + propertiesModel.set(from) + + diags := properties.Set(ctx, propertiesModel) + if diags.HasError() { + return diags + } + } + + to.Properties = properties + + return nil + } + + itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties], fabricItem *fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties]) error { + client := fabsparkjobdefinition.NewClientFactoryWithClient(fabricClient).NewItemsClient() + + respGet, err := client.GetSparkJobDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil) + if err != nil { + return err + } + + fabricItem.Set(respGet.SparkJobDefinition) + + return nil + } + + itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties]) error { + client := fabsparkjobdefinition.NewClientFactoryWithClient(fabricClient).NewItemsClient() + + pager := client.NewListSparkJobDefinitionsPager(model.WorkspaceID.ValueString(), nil) + for pager.More() { + page, err := pager.NextPage(ctx) + if err != nil { + return err + } + + for _, entity := range page.Value { + if *entity.DisplayName == model.DisplayName.ValueString() { + fabricItem.Set(entity) + + return nil + } + } + } + + return &errNotFound + } + + config := fabricitem.DataSourceFabricItemDefinitionProperties[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]{ + DataSourceFabricItemDefinition: fabricitem.DataSourceFabricItemDefinition{ + Type: ItemType, + Name: ItemName, + TFName: ItemTFName, + MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" + + "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" + + ItemDocsSPNSupport, + IsDisplayNameUnique: true, + FormatTypeDefault: ItemFormatTypeDefault, + FormatTypes: ItemFormatTypes, + DefinitionPathKeys: ItemDefinitionPaths, + }, + PropertiesSchema: propertiesSchema, + PropertiesSetter: propertiesSetter, + ItemGetter: itemGetter, + ItemListGetter: itemListGetter, } - return fabricitem.NewDataSourceFabricItemDefinition(config) + return fabricitem.NewDataSourceFabricItemDefinitionProperties(config) } diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go index 0b6c1489..b1b01f8b 100644 --- a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go +++ b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go @@ -23,11 +23,11 @@ var ( func TestUnit_SparkJobDefinitionDataSource(t *testing.T) { workspaceID := testhelp.RandomUUID() - entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) + entity := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) fakes.FakeServer.Upsert(entity) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{ // error - no attributes @@ -173,6 +173,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) { resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID), resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName), resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription), + resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.onelake_root_path"), ), }, // read by id - not found @@ -200,6 +201,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) { resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID), resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName), resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription), + resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.onelake_root_path"), ), }, // read by name - not found @@ -213,5 +215,23 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) { ), ExpectError: regexp.MustCompile(common.ErrorReadHeader), }, + // read by id with definition + { + Config: at.CompileConfig( + testDataSourceItemHeader, + map[string]any{ + "workspace_id": workspaceID, + "id": entityID, + "output_definition": true, + }, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(testDataSourceItemFQN, "workspace_id", workspaceID), + resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID), + resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName), + resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription), + resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "definition.SparkJobDefinitionV1.json.content"), + ), + }, })) } diff --git a/internal/services/sparkjobdefinition/data_spark_job_definitions.go b/internal/services/sparkjobdefinition/data_spark_job_definitions.go index 16ec68ab..fe77242a 100644 --- a/internal/services/sparkjobdefinition/data_spark_job_definitions.go +++ b/internal/services/sparkjobdefinition/data_spark_job_definitions.go @@ -4,21 +4,86 @@ package sparkjobdefinition import ( + "context" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/microsoft/fabric-sdk-go/fabric" + fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition" "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem" ) -func NewDataSourceSparkJobDefinitions() datasource.DataSource { - config := fabricitem.DataSourceFabricItems{ - Type: ItemType, - Name: ItemName, - Names: ItemsName, - TFName: ItemsTFName, - MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" + - "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" + - ItemDocsSPNSupport, +func NewDataSourceSparkJobDefinitions(ctx context.Context) datasource.DataSource { + propertiesSchema := schema.SingleNestedAttribute{ + MarkdownDescription: "The " + ItemName + " properties.", + Computed: true, + CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx), + Attributes: map[string]schema.Attribute{ + "onelake_root_path": schema.StringAttribute{ + MarkdownDescription: "OneLake path to the Spark Job Definition root directory.", + Computed: true, + }, + }, + } + + propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.FabricItemPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics { + properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx) + + if from != nil { + propertiesModel := &sparkJobDefinitionPropertiesModel{} + propertiesModel.set(from) + + diags := properties.Set(ctx, propertiesModel) + if diags.HasError() { + return diags + } + } + + to.Properties = properties + + return nil + } + + itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties], fabricItems *[]fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties]) error { + client := fabsparkjobdefinition.NewClientFactoryWithClient(fabricClient).NewItemsClient() + + fabItems := make([]fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties], 0) + + respList, err := client.ListSparkJobDefinitions(ctx, model.WorkspaceID.ValueString(), nil) + if err != nil { + return err + } + + for _, entity := range respList { + var fabricItem fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties] + + fabricItem.Set(entity) + + fabItems = append(fabItems, fabricItem) + } + + *fabricItems = fabItems + + return nil + } + + config := fabricitem.DataSourceFabricItemsProperties[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]{ + DataSourceFabricItems: fabricitem.DataSourceFabricItems{ + Type: ItemType, + Name: ItemName, + Names: ItemsName, + TFName: ItemsTFName, + MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" + + "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" + + ItemDocsSPNSupport, + }, + PropertiesSchema: propertiesSchema, + PropertiesSetter: propertiesSetter, + ItemListGetter: itemListGetter, } - return fabricitem.NewDataSourceFabricItems(config) + return fabricitem.NewDataSourceFabricItemsProperties(config) } diff --git a/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go b/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go index 2610fab4..daefe6d1 100644 --- a/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go +++ b/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go @@ -22,11 +22,11 @@ var ( func TestUnit_SparkJobDefinitionsDataSource(t *testing.T) { workspaceID := testhelp.RandomUUID() - entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) + entity := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) fakes.FakeServer.Upsert(entity) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{ // error - no attributes diff --git a/internal/services/sparkjobdefinition/models.go b/internal/services/sparkjobdefinition/models.go new file mode 100644 index 00000000..c91c16c8 --- /dev/null +++ b/internal/services/sparkjobdefinition/models.go @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package sparkjobdefinition + +import ( + fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition" + + "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes" +) + +type sparkJobDefinitionPropertiesModel struct { + OneLakeRootPath customtypes.URL `tfsdk:"onelake_root_path"` +} + +func (to *sparkJobDefinitionPropertiesModel) set(from *fabsparkjobdefinition.Properties) { + to.OneLakeRootPath = customtypes.NewURLPointerValue(from.OneLakeRootPath) +} diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition.go b/internal/services/sparkjobdefinition/resource_spark_job_definition.go index b9eb2837..e09f01b4 100644 --- a/internal/services/sparkjobdefinition/resource_spark_job_definition.go +++ b/internal/services/sparkjobdefinition/resource_spark_job_definition.go @@ -4,36 +4,91 @@ package sparkjobdefinition import ( + "context" + + supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes" "github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/microsoft/fabric-sdk-go/fabric" + fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition" "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem" ) -func NewResourceSparkJobDefinition() resource.Resource { - config := fabricitem.ResourceFabricItemDefinition{ - Type: ItemType, - Name: ItemName, - NameRenameAllowed: true, - TFName: ItemTFName, - MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" + - "Use this resource to manage a [" + ItemName + "](" + ItemDocsURL + ").\n\n" + - ItemDocsSPNSupport, - DisplayNameMaxLength: 123, - DescriptionMaxLength: 256, - FormatTypeDefault: ItemFormatTypeDefault, - FormatTypes: ItemFormatTypes, - DefinitionPathDocsURL: ItemDefinitionPathDocsURL, - DefinitionPathKeys: ItemDefinitionPaths, - DefinitionPathKeysValidator: []validator.Map{ - mapvalidator.SizeAtMost(1), - mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)), +func NewResourceSparkJobDefinition(ctx context.Context) resource.Resource { + propertiesSchema := schema.SingleNestedAttribute{ + MarkdownDescription: "The " + ItemName + " properties.", + Computed: true, + CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx), + Attributes: map[string]schema.Attribute{ + "onelake_root_path": schema.StringAttribute{ + MarkdownDescription: "OneLake path to the Spark Job Definition root directory.", + Computed: true, + }, + }, + } + + propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.ResourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics { + properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx) + + if from != nil { + propertiesModel := &sparkJobDefinitionPropertiesModel{} + propertiesModel.set(from) + + diags := properties.Set(ctx, propertiesModel) + if diags.HasError() { + return diags + } + } + + to.Properties = properties + + return nil + } + + itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties], fabricItem *fabricitem.FabricItemProperties[fabsparkjobdefinition.Properties]) error { + client := fabsparkjobdefinition.NewClientFactoryWithClient(fabricClient).NewItemsClient() + + respGet, err := client.GetSparkJobDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil) + if err != nil { + return err + } + + fabricItem.Set(respGet.SparkJobDefinition) + + return nil + } + + config := fabricitem.ResourceFabricItemDefinitionProperties[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]{ + ResourceFabricItemDefinition: fabricitem.ResourceFabricItemDefinition{ + Type: ItemType, + Name: ItemName, + NameRenameAllowed: true, + TFName: ItemTFName, + MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" + + "Use this resource to manage a [" + ItemName + "](" + ItemDocsURL + ").\n\n" + + ItemDocsSPNSupport, + DisplayNameMaxLength: 123, + DescriptionMaxLength: 256, + FormatTypeDefault: ItemFormatTypeDefault, + FormatTypes: ItemFormatTypes, + DefinitionPathDocsURL: ItemDefinitionPathDocsURL, + DefinitionPathKeys: ItemDefinitionPaths, + DefinitionPathKeysValidator: []validator.Map{ + mapvalidator.SizeAtMost(1), + mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)), + }, + DefinitionRequired: false, + DefinitionEmpty: ItemDefinitionEmpty, }, - DefinitionRequired: false, - DefinitionEmpty: ItemDefinitionEmpty, + PropertiesSchema: propertiesSchema, + PropertiesSetter: propertiesSetter, + ItemGetter: itemGetter, } - return fabricitem.NewResourceFabricItemDefinition(config) + return fabricitem.NewResourceFabricItemDefinitionProperties(config) } diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go index f00f0c76..eba7fd27 100644 --- a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go +++ b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go @@ -112,11 +112,11 @@ func TestUnit_SparkJobDefinitionResource_Attributes(t *testing.T) { func TestUnit_SparkJobDefinitionResource_ImportState(t *testing.T) { workspaceID := testhelp.RandomUUID() - entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) + entity := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) fakes.FakeServer.Upsert(entity) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) testCase := at.JoinConfigs( testHelperLocals, @@ -182,14 +182,14 @@ func TestUnit_SparkJobDefinitionResource_ImportState(t *testing.T) { func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) { workspaceID := testhelp.RandomUUID() - entityExist := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) - entityBefore := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) - entityAfter := fakes.NewRandomItemWithWorkspace(itemType, workspaceID) + entityExist := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) + entityBefore := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) + entityAfter := fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) fakes.FakeServer.Upsert(entityExist) fakes.FakeServer.Upsert(entityAfter) - fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID)) + fakes.FakeServer.Upsert(fakes.NewRandomSparkJobDefinitionWithWorkspace(workspaceID)) resource.Test(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{ // error - create - existing entity diff --git a/internal/testhelp/fakes/fabric_sparkjobdefinition.go b/internal/testhelp/fakes/fabric_sparkjobdefinition.go new file mode 100644 index 00000000..325eed76 --- /dev/null +++ b/internal/testhelp/fakes/fabric_sparkjobdefinition.go @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation +// SPDX-License-Identifier: MPL-2.0 + +package fakes + +import ( + "net/http" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" + fabfake "github.com/microsoft/fabric-sdk-go/fabric/fake" + fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition" + + "github.com/microsoft/terraform-provider-fabric/internal/testhelp" +) + +type operationsSparkJobDefinition struct{} + +// CreateDefinition implements concreteDefinitionOperations. +func (o *operationsSparkJobDefinition) CreateDefinition(data fabsparkjobdefinition.CreateSparkJobDefinitionRequest) *fabsparkjobdefinition.PublicDefinition { + return data.Definition +} + +// TransformDefinition implements concreteDefinitionOperations. +func (o *operationsSparkJobDefinition) TransformDefinition(entity *fabsparkjobdefinition.PublicDefinition) fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionDefinitionResponse { + return fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionDefinitionResponse{ + Response: fabsparkjobdefinition.Response{ + Definition: entity, + }, + } +} + +// UpdateDefinition implements concreteDefinitionOperations. +func (o *operationsSparkJobDefinition) UpdateDefinition(_ *fabsparkjobdefinition.PublicDefinition, data fabsparkjobdefinition.UpdateSparkJobDefinitionDefinitionRequest) *fabsparkjobdefinition.PublicDefinition { + return data.Definition +} + +// CreateWithParentID implements concreteOperations. +func (o *operationsSparkJobDefinition) CreateWithParentID(parentID string, data fabsparkjobdefinition.CreateSparkJobDefinitionRequest) fabsparkjobdefinition.SparkJobDefinition { + entity := NewRandomSparkJobDefinitionWithWorkspace(parentID) + entity.DisplayName = data.DisplayName + entity.Description = data.Description + + return entity +} + +// Filter implements concreteOperations. +func (o *operationsSparkJobDefinition) Filter(entities []fabsparkjobdefinition.SparkJobDefinition, parentID string) []fabsparkjobdefinition.SparkJobDefinition { + ret := make([]fabsparkjobdefinition.SparkJobDefinition, 0) + + for _, entity := range entities { + if *entity.WorkspaceID == parentID { + ret = append(ret, entity) + } + } + + return ret +} + +// GetID implements concreteOperations. +func (o *operationsSparkJobDefinition) GetID(entity fabsparkjobdefinition.SparkJobDefinition) string { + return generateID(*entity.WorkspaceID, *entity.ID) +} + +// TransformCreate implements concreteOperations. +func (o *operationsSparkJobDefinition) TransformCreate(entity fabsparkjobdefinition.SparkJobDefinition) fabsparkjobdefinition.ItemsClientCreateSparkJobDefinitionResponse { + return fabsparkjobdefinition.ItemsClientCreateSparkJobDefinitionResponse{ + SparkJobDefinition: entity, + } +} + +// TransformGet implements concreteOperations. +func (o *operationsSparkJobDefinition) TransformGet(entity fabsparkjobdefinition.SparkJobDefinition) fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionResponse { + return fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionResponse{ + SparkJobDefinition: entity, + } +} + +// TransformList implements concreteOperations. +func (o *operationsSparkJobDefinition) TransformList(entities []fabsparkjobdefinition.SparkJobDefinition) fabsparkjobdefinition.ItemsClientListSparkJobDefinitionsResponse { + return fabsparkjobdefinition.ItemsClientListSparkJobDefinitionsResponse{ + SparkJobDefinitions: fabsparkjobdefinition.SparkJobDefinitions{ + Value: entities, + }, + } +} + +// TransformUpdate implements concreteOperations. +func (o *operationsSparkJobDefinition) TransformUpdate(entity fabsparkjobdefinition.SparkJobDefinition) fabsparkjobdefinition.ItemsClientUpdateSparkJobDefinitionResponse { + return fabsparkjobdefinition.ItemsClientUpdateSparkJobDefinitionResponse{ + SparkJobDefinition: entity, + } +} + +// Update implements concreteOperations. +func (o *operationsSparkJobDefinition) Update(base fabsparkjobdefinition.SparkJobDefinition, data fabsparkjobdefinition.UpdateSparkJobDefinitionRequest) fabsparkjobdefinition.SparkJobDefinition { + base.Description = data.Description + base.DisplayName = data.DisplayName + + return base +} + +// Validate implements concreteOperations. +func (o *operationsSparkJobDefinition) Validate(newEntity fabsparkjobdefinition.SparkJobDefinition, existing []fabsparkjobdefinition.SparkJobDefinition) (int, error) { + for _, entity := range existing { + if *entity.DisplayName == *newEntity.DisplayName { + return http.StatusConflict, fabfake.SetResponseError(http.StatusConflict, fabcore.ErrItem.ItemDisplayNameAlreadyInUse.Error(), fabcore.ErrItem.ItemDisplayNameAlreadyInUse.Error()) + } + } + + return http.StatusCreated, nil +} + +func configureSparkJobDefinition(server *fakeServer) fabsparkjobdefinition.SparkJobDefinition { + type concreteEntityOperations interface { + parentIDOperations[ + fabsparkjobdefinition.SparkJobDefinition, + fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionResponse, + fabsparkjobdefinition.ItemsClientUpdateSparkJobDefinitionResponse, + fabsparkjobdefinition.ItemsClientCreateSparkJobDefinitionResponse, + fabsparkjobdefinition.ItemsClientListSparkJobDefinitionsResponse, + fabsparkjobdefinition.CreateSparkJobDefinitionRequest, + fabsparkjobdefinition.UpdateSparkJobDefinitionRequest] + } + + type concreteDefinitionOperations interface { + definitionOperations[ + fabsparkjobdefinition.PublicDefinition, + fabsparkjobdefinition.CreateSparkJobDefinitionRequest, + fabsparkjobdefinition.UpdateSparkJobDefinitionDefinitionRequest, + fabsparkjobdefinition.ItemsClientGetSparkJobDefinitionDefinitionResponse, + fabsparkjobdefinition.ItemsClientUpdateSparkJobDefinitionDefinitionResponse] + } + + var entityOperations concreteEntityOperations = &operationsSparkJobDefinition{} + + var definitionOperations concreteDefinitionOperations = &operationsSparkJobDefinition{} + + handler := newTypedHandler(server, entityOperations) + + configureEntityWithParentID( + handler, + entityOperations, + &server.ServerFactory.SparkJobDefinition.ItemsServer.GetSparkJobDefinition, + &server.ServerFactory.SparkJobDefinition.ItemsServer.UpdateSparkJobDefinition, + &server.ServerFactory.SparkJobDefinition.ItemsServer.BeginCreateSparkJobDefinition, + &server.ServerFactory.SparkJobDefinition.ItemsServer.NewListSparkJobDefinitionsPager, + &server.ServerFactory.SparkJobDefinition.ItemsServer.DeleteSparkJobDefinition) + + configureDefinitions( + handler, + entityOperations, + definitionOperations, + &server.ServerFactory.SparkJobDefinition.ItemsServer.BeginCreateSparkJobDefinition, + &server.ServerFactory.SparkJobDefinition.ItemsServer.BeginGetSparkJobDefinitionDefinition, + &server.ServerFactory.SparkJobDefinition.ItemsServer.BeginUpdateSparkJobDefinitionDefinition) + + return fabsparkjobdefinition.SparkJobDefinition{} +} + +func NewRandomSparkJobDefinition() fabsparkjobdefinition.SparkJobDefinition { + return fabsparkjobdefinition.SparkJobDefinition{ + ID: to.Ptr(testhelp.RandomUUID()), + DisplayName: to.Ptr(testhelp.RandomName()), + Description: to.Ptr(testhelp.RandomName()), + WorkspaceID: to.Ptr(testhelp.RandomUUID()), + Type: to.Ptr(fabsparkjobdefinition.ItemTypeSparkJobDefinition), + Properties: &fabsparkjobdefinition.Properties{ + OneLakeRootPath: to.Ptr(testhelp.RandomURI()), + }, + } +} + +func NewRandomSparkJobDefinitionWithWorkspace(workspaceID string) fabsparkjobdefinition.SparkJobDefinition { + result := NewRandomSparkJobDefinition() + result.WorkspaceID = &workspaceID + + return result +} + +func NewRandomSparkJobDefinitionDefinition() fabsparkjobdefinition.PublicDefinition { + defPart := fabsparkjobdefinition.PublicDefinitionPart{ + PayloadType: to.Ptr(fabsparkjobdefinition.PayloadTypeInlineBase64), + Path: to.Ptr("SparkJobDefinitionV1.json"), + Payload: to.Ptr("ew0KICAiZXhlY3V0YWJsZUZpbGUiOiBudWxsLA0KICAiZGVmYXVsdExha2Vob3VzZUFydGlmYWN0SWQiOiBudWxsLA0KICAibWFpbkNsYXNzIjogbnVsbCwNCiAgImFkZGl0aW9uYWxMYWtlaG91c2VJZHMiOiBbXSwNCiAgInJldHJ5UG9saWN5IjogbnVsbCwNCiAgImNvbW1hbmRMaW5lQXJndW1lbnRzIjogbnVsbCwNCiAgImFkZGl0aW9uYWxMaWJyYXJ5VXJpcyI6IG51bGwsDQogICJsYW5ndWFnZSI6IG51bGwsDQogICJlbnZpcm9ubWVudEFydGlmYWN0SWQiOiBudWxsDQp9"), + } + + var defParts []fabsparkjobdefinition.PublicDefinitionPart + + defParts = append(defParts, defPart) + + return fabsparkjobdefinition.PublicDefinition{ + Parts: defParts, + } +} diff --git a/internal/testhelp/fakes/fake_server.go b/internal/testhelp/fakes/fake_server.go index 31146846..b6f671e9 100644 --- a/internal/testhelp/fakes/fake_server.go +++ b/internal/testhelp/fakes/fake_server.go @@ -40,6 +40,7 @@ func newFakeServer() *fakeServer { handleEntity(server, configureNotebook) handleEntity(server, configureReport) handleEntity(server, configureSemanticModel) + handleEntity(server, configureSparkJobDefinition) handleEntity(server, configureWarehouse) handleEntity(server, configureWorkspace) diff --git a/internal/testhelp/fakes/fake_typedhandler.go b/internal/testhelp/fakes/fake_typedhandler.go index c5dfb78d..7913ce07 100644 --- a/internal/testhelp/fakes/fake_typedhandler.go +++ b/internal/testhelp/fakes/fake_typedhandler.go @@ -5,6 +5,8 @@ package fakes import ( "context" + "reflect" + "strings" azfake "github.com/Azure/azure-sdk-for-go/sdk/azcore/fake" fabcore "github.com/microsoft/fabric-sdk-go/fabric/core" @@ -101,9 +103,17 @@ func generateID(parentID, childID string) string { func (h *typedHandler[TEntity]) Elements() []TEntity { ret := make([]TEntity, 0) - for _, element := range h.elements { - if element, ok := element.(TEntity); ok { - ret = append(ret, element) + // if it is a FabricItem, return all the elements as fabric items + if h.entityTypeIsFabricItem() { + for _, element := range h.elements { + item := asFabricItem(element) + ret = append(ret, h.getFabricItemAsTEntity(item)) + } + } else { + for _, element := range h.elements { + if element, ok := element.(TEntity); ok { + ret = append(ret, element) + } } } @@ -119,6 +129,12 @@ func (h *typedHandler[TEntity]) Delete(id string) { if h.identifier.GetID(typedElement) != id { newElements = append(newElements, element) } + } else if h.entityTypeCanBeConvertedToFabricItem() { + // if it wasn't found, try to find it as fabric item + item := asFabricItem(element) + if !strings.HasSuffix(id, *item.ID) { + newElements = append(newElements, element) + } } else { newElements = append(newElements, element) } @@ -140,18 +156,58 @@ func (h *typedHandler[TEntity]) Upsert(element TEntity) { // Get gets an element by ID. func (h *typedHandler[TEntity]) Get(id string) TEntity { - pointer := h.getPointer(id) + // check if TEntity is FabricItem + if h.entityTypeIsFabricItem() { + for _, element := range h.elements { + item := asFabricItem(element) + if strings.HasSuffix(id, *item.ID) { + //nolint + return element.(TEntity) + } + } - if pointer == nil { panic("Element not found") // lintignore:R009 } - return *pointer + // if it is not a FabricItem, find the element by ID + pointer := h.getPointer(id) + if pointer != nil { + return *pointer + } + + // if it still wasn't found, try to find it if they were inserted as fabric items + if h.entityTypeCanBeConvertedToFabricItem() { + for _, element := range h.elements { + item := asFabricItem(element) + if strings.HasSuffix(id, *item.ID) { + return h.getFabricItemAsTEntity(item) + } + } + } + + // if that didn't work, panic + panic("Element not found") // lintignore:R009 } // Contains returns true if the element exists. func (h *typedHandler[TEntity]) Contains(id string) bool { - return h.getPointer(id) != nil + found := h.getPointer(id) != nil + + if found { + return true + } + + // if it wasn't found, try to find it as fabric item + if h.entityTypeCanBeConvertedToFabricItem() { + for _, element := range h.elements { + item := asFabricItem(element) + if strings.HasSuffix(id, *item.ID) { + return true + } + } + } + + return false } // getPointer gets a pointer to an element by ID. @@ -166,3 +222,74 @@ func (h *typedHandler[TEntity]) getPointer(id string) *TEntity { return nil } + +func (h *typedHandler[TEntity]) getFabricItemAsTEntity(item fabcore.Item) TEntity { + var entity TEntity + + h.setReflectedStringPropertyValue(&entity, "ID", *item.ID) + h.setReflectedStringPropertyValue(&entity, "WorkspaceID", *item.WorkspaceID) + h.setReflectedStringPropertyValue(&entity, "DisplayName", *item.DisplayName) + h.setReflectedStringPropertyValue(&entity, "Description", *item.Description) + h.setReflectedStringPropertyValue(&entity, "Type", string(*item.Type)) + + return entity +} + +// asFabricItem converts an element to a fabric item. +func asFabricItem(element any) fabcore.Item { + itemType := fabcore.ItemType(*getReflectedStringPropertyValue(element, "Type")) + + item := fabcore.Item{ + Type: &itemType, + Description: getReflectedStringPropertyValue(element, "Description"), + DisplayName: getReflectedStringPropertyValue(element, "DisplayName"), + ID: getReflectedStringPropertyValue(element, "ID"), + WorkspaceID: getReflectedStringPropertyValue(element, "WorkspaceID"), + } + + return item +} + +// getReflectedStringPropertyValue gets a string property value from a reflected object. +func getReflectedStringPropertyValue(element any, propertyName string) *string { + reflectedValue := reflect.ValueOf(element) + propertyValue := reflectedValue.FieldByName(propertyName) + + str := propertyValue.Elem().String() + + return &str +} + +// setReflectedStringPropertyValue sets a string property value on a reflected object. +func (h *typedHandler[TEntity]) setReflectedStringPropertyValue(entity *TEntity, propertyName, value string) { + reflectedValue := reflect.ValueOf(entity).Elem() + propertyValue := reflectedValue.FieldByName(propertyName) + + // create a new pointer to the type of the property + ptr := reflect.New(propertyValue.Type().Elem()) + ptr.Elem().SetString(value) + + // set the value as a pointer + propertyValue.Set(ptr) +} + +func (h *typedHandler[TEntity]) entityTypeIsFabricItem() bool { + var entity TEntity + + return reflect.TypeOf(entity) == reflect.TypeOf(fabcore.Item{}) +} + +func (h *typedHandler[TEntity]) entityTypeCanBeConvertedToFabricItem() bool { + var entity TEntity + + requiredPropertyNames := []string{"ID", "WorkspaceID", "DisplayName", "Description", "Type"} + + for _, propertyName := range requiredPropertyNames { + // check if the property exists + if !reflect.ValueOf(entity).FieldByName(propertyName).IsValid() { + return false + } + } + + return true +}