diff --git a/.changes/unreleased/added-20241223-094847.yaml b/.changes/unreleased/added-20241223-094847.yaml
new file mode 100644
index 00000000..3e7c6a5c
--- /dev/null
+++ b/.changes/unreleased/added-20241223-094847.yaml
@@ -0,0 +1,7 @@
+kind: added
+body: |
+ Added support for multi-format Notebook Resource/Data-Source.
+ By using `format` attribute, you can now define the format of the Notebook Resource/Data-Source. Accepted values are `jpynb`, and `py`.
+time: 2024-12-23T09:48:47.1324573-08:00
+custom:
+ Issue: "168"
diff --git a/.changes/unreleased/added-20241223-094848.yaml b/.changes/unreleased/added-20241223-094848.yaml
new file mode 100644
index 00000000..d1873e39
--- /dev/null
+++ b/.changes/unreleased/added-20241223-094848.yaml
@@ -0,0 +1,7 @@
+kind: added
+body: |
+ Added support for multi-format Report Resource/Data-Source.
+ By using `format` attribute, you can now define the format of the Report Resource/Data-Source. Accepted values are `PBIR`, and `PBIR-Legacy`.
+time: 2024-12-23T09:48:48.1324573-08:00
+custom:
+ Issue: "171"
diff --git a/.changes/unreleased/added-20241223-094849.yaml b/.changes/unreleased/added-20241223-094849.yaml
new file mode 100644
index 00000000..8b79eae7
--- /dev/null
+++ b/.changes/unreleased/added-20241223-094849.yaml
@@ -0,0 +1,7 @@
+kind: added
+body: |
+ Added support for multi-format Semantic Model Resource/Data-Source.
+ By using `format` attribute, you can now define the format of the Semantic Model Resource/Data-Source. Accepted values are `TMSL`, and `TMDL`.
+time: 2024-12-23T09:48:49.1324573-08:00
+custom:
+ Issue: "171"
diff --git a/.changes/unreleased/breaking-20241223-094847.yaml b/.changes/unreleased/breaking-20241223-094847.yaml
new file mode 100644
index 00000000..e4d13a4e
--- /dev/null
+++ b/.changes/unreleased/breaking-20241223-094847.yaml
@@ -0,0 +1,7 @@
+kind: breaking
+body: |
+ The `format` attribute is now REQUIRED for Resources/Data-Sources with definition support.
+ Currently applicable to the following Resources/Data-Sources: Report, Notebook, Semantic Model, and Spark Job Definition.
+time: 2024-12-23T09:48:47.1324573-08:00
+custom:
+ Issue: "171"
diff --git a/docs/data-sources/eventhouse.md b/docs/data-sources/eventhouse.md
index 2e060250..1444594d 100644
--- a/docs/data-sources/eventhouse.md
+++ b/docs/data-sources/eventhouse.md
@@ -66,6 +66,7 @@ output "example_definition_content_object" {
### Optional
- `display_name` (String) The Eventhouse display name.
+- `format` (String) The Eventhouse format. Possible values: `Default`
- `id` (String) The Eventhouse ID.
- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
@@ -75,9 +76,8 @@ output "example_definition_content_object" {
### Read-Only
-- `definition` (Attributes Map) Definition parts. Possible path keys: `EventhouseProperties.json`. (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Possible path keys: **Default** format: `EventhouseProperties.json` (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Eventhouse description.
-- `format` (String) The Eventhouse format. Possible values: `NotApplicable`
- `properties` (Attributes) The Eventhouse properties. (see [below for nested schema](#nestedatt--properties))
diff --git a/docs/data-sources/notebook.md b/docs/data-sources/notebook.md
index bcbc6358..a3b3552a 100644
--- a/docs/data-sources/notebook.md
+++ b/docs/data-sources/notebook.md
@@ -36,6 +36,7 @@ data "fabric_notebook" "example_by_id" {
data "fabric_notebook" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "ipynb"
output_definition = true
}
@@ -68,6 +69,7 @@ output "example_definition_content_object" {
### Optional
- `display_name` (String) The Notebook display name.
+- `format` (String) The Notebook format. Possible values: `ipynb`, `py`
- `id` (String) The Notebook ID.
- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
@@ -77,9 +79,8 @@ output "example_definition_content_object" {
### Read-Only
-- `definition` (Attributes Map) Definition parts. Possible path keys: `notebook-content.ipynb`. (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Possible path keys: **ipynb** format: `notebook-content.ipynb` **py** format: `notebook-content.py` (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Notebook description.
-- `format` (String) The Notebook format. Possible values: `ipynb`.
diff --git a/docs/data-sources/report.md b/docs/data-sources/report.md
index d1a463b1..c820d3f6 100644
--- a/docs/data-sources/report.md
+++ b/docs/data-sources/report.md
@@ -29,6 +29,7 @@ data "fabric_report" "example" {
data "fabric_report" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "PBIR-Legacy"
output_definition = true
}
@@ -56,6 +57,7 @@ output "example_definition_report_object" {
### Optional
+- `format` (String) The Report format. Possible values: `PBIR`, `PBIR-Legacy`
- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
!> Your terraform state file may grow a lot if you output definition content. Only use it when you must use data from the definition.
@@ -64,10 +66,9 @@ output "example_definition_report_object" {
### Read-Only
-- `definition` (Attributes Map) Definition parts. Possible path keys: `report.json`, `definition.pbir`, `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`. (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Possible path keys: **PBIR** format: `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`, `definition.pbir`, `definition/pages/*.json`, `definition/report.json`, `definition/version.json` **PBIR-Legacy** format: `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`, `definition.pbir`, `report.json` (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Report description.
- `display_name` (String) The Report display name.
-- `format` (String) The Report format. Possible values: `PBIR-Legacy`.
diff --git a/docs/data-sources/semantic_model.md b/docs/data-sources/semantic_model.md
index ddd44af7..ed4b7951 100644
--- a/docs/data-sources/semantic_model.md
+++ b/docs/data-sources/semantic_model.md
@@ -29,6 +29,7 @@ data "fabric_semantic_model" "example" {
data "fabric_semantic_model" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "TMSL"
output_definition = true
}
@@ -53,6 +54,7 @@ output "example_definition_bim_object" {
### Optional
+- `format` (String) The Semantic Model format. Possible values: `TMDL`, `TMSL`
- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
!> Your terraform state file may grow a lot if you output definition content. Only use it when you must use data from the definition.
@@ -61,10 +63,9 @@ output "example_definition_bim_object" {
### Read-Only
-- `definition` (Attributes Map) Definition parts. Possible path keys: `model.bim`, `definition.pbism`, `diagramLayout.json`. (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Possible path keys: **TMDL** format: `definition.pbism`, `definition/database.tmdl`, `definition/model.tmdl`, `definition/tables/*.tmdl`, `diagramLayp.json` **TMSL** format: `definition.pbism`, `diagramLayp.json`, `model.bim` (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Semantic Model description.
- `display_name` (String) The Semantic Model display name.
-- `format` (String) The Semantic Model format. Possible values: `TMSL`.
diff --git a/docs/data-sources/spark_job_definition.md b/docs/data-sources/spark_job_definition.md
index 935ea756..3d6cc75e 100644
--- a/docs/data-sources/spark_job_definition.md
+++ b/docs/data-sources/spark_job_definition.md
@@ -36,6 +36,7 @@ data "fabric_spark_job_definition" "example_by_name" {
data "fabric_spark_job_definition" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "SparkJobDefinitionV1"
output_definition = true
}
@@ -68,6 +69,7 @@ output "example_definition_content_object" {
### Optional
- `display_name` (String) The Spark Job Definition display name.
+- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`
- `id` (String) The Spark Job Definition ID.
- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
@@ -77,9 +79,8 @@ output "example_definition_content_object" {
### Read-Only
-- `definition` (Attributes Map) Definition parts. Possible path keys: `SparkJobDefinitionV1.json`. (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Possible path keys: **SparkJobDefinitionV1** format: `SparkJobDefinitionV1.json` (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Spark Job Definition description.
-- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`.
- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties))
diff --git a/docs/resources/data_pipeline.md b/docs/resources/data_pipeline.md
index e7e11fc9..533ecace 100644
--- a/docs/resources/data_pipeline.md
+++ b/docs/resources/data_pipeline.md
@@ -70,14 +70,14 @@ resource "fabric_data_pipeline" "example_definition_update" {
### Optional
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `pipeline-content.json`. Read more about [Data Pipeline definition part paths](https://learn.microsoft.com/fabric/data-factory/pipeline-rest-api). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Data Pipeline definition part paths](https://learn.microsoft.com/fabric/data-factory/pipeline-rest-api). Accepted path keys: **Default** format: `pipeline-content.json` (see [below for nested schema](#nestedatt--definition))
- `definition_update_enabled` (Boolean) Update definition on change of source content. Default: `true`.
- `description` (String) The Data Pipeline description.
+- `format` (String) The Data Pipeline format. Possible values: `Default`
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
-- `format` (String) The Data Pipeline format. Possible values: `NotApplicable`
- `id` (String) The Data Pipeline ID.
diff --git a/docs/resources/eventhouse.md b/docs/resources/eventhouse.md
index 4b2dae98..74938e9e 100644
--- a/docs/resources/eventhouse.md
+++ b/docs/resources/eventhouse.md
@@ -64,14 +64,14 @@ resource "fabric_eventhouse" "example_definition_update" {
### Optional
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `EventhouseProperties.json`. Read more about [Eventhouse definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/eventhouse-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Eventhouse definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/eventhouse-definition). Accepted path keys: **Default** format: `EventhouseProperties.json` (see [below for nested schema](#nestedatt--definition))
- `definition_update_enabled` (Boolean) Update definition on change of source content. Default: `true`.
- `description` (String) The Eventhouse description.
+- `format` (String) The Eventhouse format. Possible values: `Default`
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
-- `format` (String) The Eventhouse format. Possible values: `NotApplicable`
- `id` (String) The Eventhouse ID.
- `properties` (Attributes) The Eventhouse properties. (see [below for nested schema](#nestedatt--properties))
diff --git a/docs/resources/notebook.md b/docs/resources/notebook.md
index ae607466..1cb89c82 100644
--- a/docs/resources/notebook.md
+++ b/docs/resources/notebook.md
@@ -31,6 +31,7 @@ resource "fabric_notebook" "example_definition_bootstrap" {
description = "example with definition bootstrapping"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "ipynb"
definition = {
"notebook-content.ipynb" = {
source = "${local.path}/notebook.ipynb.tmpl"
@@ -43,6 +44,7 @@ resource "fabric_notebook" "example_definition_update" {
display_name = "example"
description = "example with definition update when source or tokens changed"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "ipynb"
definition = {
"notebook-content.ipynb" = {
source = "${local.path}/notebook.ipynb.tmpl"
@@ -65,14 +67,14 @@ resource "fabric_notebook" "example_definition_update" {
### Optional
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `notebook-content.ipynb`. Read more about [Notebook definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/notebook-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Notebook definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/notebook-definition). Accepted path keys: **ipynb** format: `notebook-content.ipynb` **py** format: `notebook-content.py` (see [below for nested schema](#nestedatt--definition))
- `definition_update_enabled` (Boolean) Update definition on change of source content. Default: `true`.
- `description` (String) The Notebook description.
+- `format` (String) The Notebook format. Possible values: `ipynb`, `py`
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
-- `format` (String) The Notebook format. Possible values: `ipynb`.
- `id` (String) The Notebook ID.
diff --git a/docs/resources/report.md b/docs/resources/report.md
index 84123323..87b978e7 100644
--- a/docs/resources/report.md
+++ b/docs/resources/report.md
@@ -24,6 +24,7 @@ resource "fabric_report" "example_bootstrap" {
display_name = "example"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "PBIR-Legacy"
definition = {
"report.json" = {
source = "${local.path}/report.json"
@@ -44,6 +45,7 @@ resource "fabric_report" "example_bootstrap" {
resource "fabric_report" "example_update" {
display_name = "example with update"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "PBIR-Legacy"
definition = {
"report.json" = {
source = "${local.path}/report.json"
@@ -66,8 +68,9 @@ resource "fabric_report" "example_update" {
### Required
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `report.json`, `definition.pbir`, `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`. Read more about [Report definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/report-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Report definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/report-definition). Accepted path keys: **PBIR** format: `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`, `definition.pbir`, `definition/pages/*.json`, `definition/report.json`, `definition/version.json` **PBIR-Legacy** format: `StaticResources/RegisteredResources/*`, `StaticResources/SharedResources/*`, `definition.pbir`, `report.json` (see [below for nested schema](#nestedatt--definition))
- `display_name` (String) The Report display name.
+- `format` (String) The Report format. Possible values: `PBIR`, `PBIR-Legacy`
- `workspace_id` (String) The Workspace ID.
### Optional
@@ -78,7 +81,6 @@ resource "fabric_report" "example_update" {
### Read-Only
-- `format` (String) The Report format. Possible values: `PBIR-Legacy`.
- `id` (String) The Report ID.
diff --git a/docs/resources/semantic_model.md b/docs/resources/semantic_model.md
index f3aa4e58..b706caf1 100644
--- a/docs/resources/semantic_model.md
+++ b/docs/resources/semantic_model.md
@@ -24,6 +24,7 @@ resource "fabric_semantic_model" "example_bootstrap" {
display_name = "example"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "TMSL"
definition = {
"model.bim" = {
source = "${local.path}/model.bim.tmpl"
@@ -38,6 +39,7 @@ resource "fabric_semantic_model" "example_bootstrap" {
resource "fabric_semantic_model" "example_update" {
display_name = "example with update"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "TMSL"
definition = {
"model.bim" = {
source = "${local.path}/model.bim.tmpl"
@@ -57,8 +59,9 @@ resource "fabric_semantic_model" "example_update" {
### Required
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `model.bim`, `definition.pbism`, `diagramLayout.json`. Read more about [Semantic Model definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/semantic-model-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Semantic Model definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/semantic-model-definition). Accepted path keys: **TMDL** format: `definition.pbism`, `definition/database.tmdl`, `definition/model.tmdl`, `definition/tables/*.tmdl`, `diagramLayp.json` **TMSL** format: `definition.pbism`, `diagramLayp.json`, `model.bim` (see [below for nested schema](#nestedatt--definition))
- `display_name` (String) The Semantic Model display name.
+- `format` (String) The Semantic Model format. Possible values: `TMDL`, `TMSL`
- `workspace_id` (String) The Workspace ID.
### Optional
@@ -69,7 +72,6 @@ resource "fabric_semantic_model" "example_update" {
### Read-Only
-- `format` (String) The Semantic Model format. Possible values: `TMSL`.
- `id` (String) The Semantic Model ID.
diff --git a/docs/resources/spark_job_definition.md b/docs/resources/spark_job_definition.md
index 047c6665..50a53016 100644
--- a/docs/resources/spark_job_definition.md
+++ b/docs/resources/spark_job_definition.md
@@ -31,6 +31,7 @@ resource "fabric_spark_job_definition" "example_definition_bootstrap" {
description = "example with definition bootstrapping"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "SparkJobDefinitionV1"
definition = {
"SparkJobDefinitionV1.json" = {
source = "${local.path}/SparkJobDefinitionV1.json.tmpl"
@@ -43,6 +44,7 @@ resource "fabric_spark_job_definition" "example_definition_update" {
display_name = "example3"
description = "example with definition update when source or tokens changed"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "SparkJobDefinitionV1"
definition = {
"SparkJobDefinitionV1.json" = {
source = "${local.path}/SparkJobDefinitionV1.json.tmpl"
@@ -65,14 +67,14 @@ resource "fabric_spark_job_definition" "example_definition_update" {
### Optional
-- `definition` (Attributes Map) Definition parts. Accepted path keys: `SparkJobDefinitionV1.json`. Read more about [Spark Job Definition definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/spark-job-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition` (Attributes Map) Definition parts. Read more about [Spark Job Definition definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/spark-job-definition). Accepted path keys: **SparkJobDefinitionV1** format: `SparkJobDefinitionV1.json` (see [below for nested schema](#nestedatt--definition))
- `definition_update_enabled` (Boolean) Update definition on change of source content. Default: `true`.
- `description` (String) The Spark Job Definition description.
+- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
-- `format` (String) The Spark Job Definition format. Possible values: `SparkJobDefinitionV1`.
- `id` (String) The Spark Job Definition ID.
- `properties` (Attributes) The Spark Job Definition properties. (see [below for nested schema](#nestedatt--properties))
diff --git a/examples/data-sources/fabric_notebook/data-source.tf b/examples/data-sources/fabric_notebook/data-source.tf
index f3faf46b..e7210117 100644
--- a/examples/data-sources/fabric_notebook/data-source.tf
+++ b/examples/data-sources/fabric_notebook/data-source.tf
@@ -15,6 +15,7 @@ data "fabric_notebook" "example_by_id" {
data "fabric_notebook" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "ipynb"
output_definition = true
}
diff --git a/examples/data-sources/fabric_report/data-source.tf b/examples/data-sources/fabric_report/data-source.tf
index 677ba505..6fa4b5e6 100644
--- a/examples/data-sources/fabric_report/data-source.tf
+++ b/examples/data-sources/fabric_report/data-source.tf
@@ -8,6 +8,7 @@ data "fabric_report" "example" {
data "fabric_report" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "PBIR-Legacy"
output_definition = true
}
diff --git a/examples/data-sources/fabric_semantic_model/data-source.tf b/examples/data-sources/fabric_semantic_model/data-source.tf
index bc1d2b3a..a965feed 100644
--- a/examples/data-sources/fabric_semantic_model/data-source.tf
+++ b/examples/data-sources/fabric_semantic_model/data-source.tf
@@ -8,6 +8,7 @@ data "fabric_semantic_model" "example" {
data "fabric_semantic_model" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "TMSL"
output_definition = true
}
diff --git a/examples/data-sources/fabric_spark_job_definition/data-source.tf b/examples/data-sources/fabric_spark_job_definition/data-source.tf
index 0540e7a1..a3aaa5f6 100644
--- a/examples/data-sources/fabric_spark_job_definition/data-source.tf
+++ b/examples/data-sources/fabric_spark_job_definition/data-source.tf
@@ -15,6 +15,7 @@ data "fabric_spark_job_definition" "example_by_name" {
data "fabric_spark_job_definition" "example_definition" {
id = "11111111-1111-1111-1111-111111111111"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "SparkJobDefinitionV1"
output_definition = true
}
diff --git a/examples/resources/fabric_notebook/resource.tf b/examples/resources/fabric_notebook/resource.tf
index 2f6e58dd..80f6fc2f 100644
--- a/examples/resources/fabric_notebook/resource.tf
+++ b/examples/resources/fabric_notebook/resource.tf
@@ -10,6 +10,7 @@ resource "fabric_notebook" "example_definition_bootstrap" {
description = "example with definition bootstrapping"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "ipynb"
definition = {
"notebook-content.ipynb" = {
source = "${local.path}/notebook.ipynb.tmpl"
@@ -22,6 +23,7 @@ resource "fabric_notebook" "example_definition_update" {
display_name = "example"
description = "example with definition update when source or tokens changed"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "ipynb"
definition = {
"notebook-content.ipynb" = {
source = "${local.path}/notebook.ipynb.tmpl"
diff --git a/examples/resources/fabric_report/resource.tf b/examples/resources/fabric_report/resource.tf
index 846b03f5..4faf413e 100644
--- a/examples/resources/fabric_report/resource.tf
+++ b/examples/resources/fabric_report/resource.tf
@@ -3,6 +3,7 @@ resource "fabric_report" "example_bootstrap" {
display_name = "example"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "PBIR-Legacy"
definition = {
"report.json" = {
source = "${local.path}/report.json"
@@ -23,6 +24,7 @@ resource "fabric_report" "example_bootstrap" {
resource "fabric_report" "example_update" {
display_name = "example with update"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "PBIR-Legacy"
definition = {
"report.json" = {
source = "${local.path}/report.json"
diff --git a/examples/resources/fabric_semantic_model/resource.tf b/examples/resources/fabric_semantic_model/resource.tf
index 8d536ef3..abe5e017 100644
--- a/examples/resources/fabric_semantic_model/resource.tf
+++ b/examples/resources/fabric_semantic_model/resource.tf
@@ -3,6 +3,7 @@ resource "fabric_semantic_model" "example_bootstrap" {
display_name = "example"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "TMSL"
definition = {
"model.bim" = {
source = "${local.path}/model.bim.tmpl"
@@ -17,6 +18,7 @@ resource "fabric_semantic_model" "example_bootstrap" {
resource "fabric_semantic_model" "example_update" {
display_name = "example with update"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "TMSL"
definition = {
"model.bim" = {
source = "${local.path}/model.bim.tmpl"
diff --git a/examples/resources/fabric_spark_job_definition/resource.tf b/examples/resources/fabric_spark_job_definition/resource.tf
index e71733e3..511033ca 100644
--- a/examples/resources/fabric_spark_job_definition/resource.tf
+++ b/examples/resources/fabric_spark_job_definition/resource.tf
@@ -10,6 +10,7 @@ resource "fabric_spark_job_definition" "example_definition_bootstrap" {
description = "example with definition bootstrapping"
workspace_id = "00000000-0000-0000-0000-000000000000"
definition_update_enabled = false
+ format = "SparkJobDefinitionV1"
definition = {
"SparkJobDefinitionV1.json" = {
source = "${local.path}/SparkJobDefinitionV1.json.tmpl"
@@ -22,6 +23,7 @@ resource "fabric_spark_job_definition" "example_definition_update" {
display_name = "example3"
description = "example with definition update when source or tokens changed"
workspace_id = "00000000-0000-0000-0000-000000000000"
+ format = "SparkJobDefinitionV1"
definition = {
"SparkJobDefinitionV1.json" = {
source = "${local.path}/SparkJobDefinitionV1.json.tmpl"
diff --git a/internal/framework/validators/patterns_if_attribute_is_one_of.go b/internal/framework/validators/patterns_if_attribute_is_one_of.go
new file mode 100644
index 00000000..21c69a0a
--- /dev/null
+++ b/internal/framework/validators/patterns_if_attribute_is_one_of.go
@@ -0,0 +1,179 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package validators
+
+import (
+ "context"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/tfsdk"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+var _ validator.String = PatternsIfAttributeIsOneOfValidator{}
+
+type PatternsIfAttributeIsOneOfValidator struct {
+ pathExpression path.Expression
+ exceptedValues []attr.Value
+ patterns []string
+ message string
+}
+
+type PatternsIfAttributeIsOneOfRequest struct {
+ Config tfsdk.Config
+ ConfigValue types.String
+ Path path.Path
+ PathExpression path.Expression
+ ExceptedValues []attr.Value
+}
+
+type PatternsIfAttributeIsOneOfResponse struct {
+ Diagnostics diag.Diagnostics
+}
+
+func PatternsIfAttributeIsOneOf(p path.Expression, exceptedValue []attr.Value, patterns []string, message string) PatternsIfAttributeIsOneOfValidator {
+ return PatternsIfAttributeIsOneOfValidator{
+ pathExpression: p,
+ exceptedValues: exceptedValue,
+ patterns: patterns,
+ message: message,
+ }
+}
+
+func (v PatternsIfAttributeIsOneOfValidator) Description(_ context.Context) string {
+ if v.message != "" {
+ return v.message
+ }
+
+ return fmt.Sprintf("value must match expression patterns '%s'", strings.Join(v.patterns, ", "))
+}
+
+func (v PatternsIfAttributeIsOneOfValidator) MarkdownDescription(ctx context.Context) string {
+ return v.Description(ctx)
+}
+
+func (v PatternsIfAttributeIsOneOfValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) {
+ validateReq := PatternsIfAttributeIsOneOfRequest{
+ Config: req.Config,
+ ConfigValue: req.ConfigValue,
+ Path: req.Path,
+ PathExpression: req.PathExpression,
+ }
+ validateResp := &PatternsIfAttributeIsOneOfResponse{}
+
+ v.Validate(ctx, validateReq, validateResp)
+
+ resp.Diagnostics.Append(validateResp.Diagnostics...)
+}
+
+func (v PatternsIfAttributeIsOneOfValidator) Validate(ctx context.Context, req PatternsIfAttributeIsOneOfRequest, resp *PatternsIfAttributeIsOneOfResponse) {
+ // If attribute configuration is unknown, there is nothing else to validate
+ if req.ConfigValue.IsUnknown() {
+ return
+ }
+
+ paths, diags := req.Config.PathMatches(ctx, req.PathExpression.Merge(v.pathExpression))
+ resp.Diagnostics.Append(diags...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ if len(paths) == 0 {
+ resp.Diagnostics.AddError(
+ fmt.Sprintf("Invalid configuration for attribute %s", req.Path),
+ "Path must be set",
+ )
+
+ return
+ }
+
+ for _, p := range paths {
+ var mpVal attr.Value
+
+ diags = req.Config.GetAttribute(ctx, p, &mpVal)
+ if diags.HasError() {
+ resp.Diagnostics.AddError(
+ fmt.Sprintf("Invalid configuration for attribute %s", req.Path),
+ fmt.Sprintf("Unable to retrieve attribute path: %q", p),
+ )
+
+ return
+ }
+
+ // If the target attribute configuration is unknown or null, there is nothing else to validate
+ if mpVal.IsNull() || mpVal.IsUnknown() {
+ return
+ }
+
+ for _, expectedValue := range v.exceptedValues {
+ // If the value of the target attribute is equal to one of the expected values, we need to validate the value of the current attribute
+ if mpVal.Equal(expectedValue) {
+ if req.ConfigValue.IsNull() {
+ resp.Diagnostics.Append(
+ validatordiag.InvalidAttributeValueDiagnostic(
+ req.Path,
+ "is empty, "+v.Description(ctx),
+ req.ConfigValue.ValueString(),
+ ),
+ )
+
+ return
+ }
+
+ re, err := v.convertPatternsToRegexp(v.patterns)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ fmt.Sprintf("Invalid configuration for attribute %s", req.Path),
+ fmt.Sprintf("Unable to compile regular expression: %q", err),
+ )
+
+ return
+ }
+
+ value := req.ConfigValue.ValueString()
+
+ if !re.MatchString(value) {
+ resp.Diagnostics.Append(validatordiag.InvalidAttributeValueMatchDiagnostic(
+ req.Path,
+ v.Description(ctx),
+ value,
+ ))
+ }
+ }
+ }
+ }
+}
+
+func (v PatternsIfAttributeIsOneOfValidator) convertPatternsToRegexp(patterns []string) (*regexp.Regexp, error) {
+ p := make([]string, 0)
+
+ p = append(p, "^(")
+
+ for _, pattern := range patterns {
+ p = append(p, regexp.QuoteMeta(pattern))
+ if pattern != patterns[len(patterns)-1] {
+ p = append(p, "|")
+ }
+ }
+
+ p = append(p, ")$")
+
+ out := strings.Join(p, "")
+ out = strings.ReplaceAll(out, `\*`, ".+")
+
+ re, err := regexp.Compile(out)
+ if err != nil {
+ return nil, err
+ }
+
+ return re, nil
+}
diff --git a/internal/framework/validators/patterns_if_attribute_is_one_of_test.go b/internal/framework/validators/patterns_if_attribute_is_one_of_test.go
new file mode 100644
index 00000000..7cde214e
--- /dev/null
+++ b/internal/framework/validators/patterns_if_attribute_is_one_of_test.go
@@ -0,0 +1,292 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package validators_test
+
+import (
+ "context"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/tfsdk"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/validators"
+)
+
+func TestUnit_PatternsIfAttributeIsOneOfValidator(t *testing.T) { //nolint:maintidx
+ t.Parallel()
+
+ type testCase struct {
+ req validators.PatternsIfAttributeIsOneOfRequest
+ in path.Expression
+ inPath path.Path
+ exceptedValues []attr.Value
+ patterns []string
+ message string
+ expError bool
+ expErrorMessage string
+ }
+
+ testCases := map[string]testCase{
+ "multi-not-match": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringValue("foo value"),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, "foo value"),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"foo", "bar", "baz", "test/*.json"},
+ message: "",
+ expError: true,
+ expErrorMessage: `value must match expression patterns 'foo, bar, baz, test/*.json'`,
+ },
+ "multi-match": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringValue("test/foo.json"),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, "foo value"),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"foo", "bar", "baz", "test/*.json"},
+ message: "",
+ expError: false,
+ },
+ "one-not-match": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringValue("foo value"),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, "foo value"),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"baz"},
+ message: "",
+ expError: true,
+ expErrorMessage: `value must match expression patterns 'baz'`,
+ },
+ "one-match": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringValue("foo value"),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, "foo value"),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"foo value"},
+ message: "",
+ expError: false,
+ },
+ "custom-msg-err": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringValue("foo value"),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, "foo value"),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"baz"},
+ message: "message value",
+ expError: true,
+ expErrorMessage: "message value",
+ },
+ "self-is-null": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringNull(),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, nil),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"baz"},
+ message: "",
+ expError: true,
+ expErrorMessage: `is empty, value must match expression patterns 'baz'`,
+ },
+ "self-is-unknown": {
+ req: validators.PatternsIfAttributeIsOneOfRequest{
+ ConfigValue: types.StringUnknown(),
+ Path: path.Root("foo"),
+ PathExpression: path.MatchRoot("foo"),
+ Config: tfsdk.Config{
+ Schema: schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "foo": schema.StringAttribute{},
+ "bar": schema.StringAttribute{},
+ },
+ },
+ Raw: tftypes.NewValue(tftypes.Object{
+ AttributeTypes: map[string]tftypes.Type{
+ "foo": tftypes.String,
+ "bar": tftypes.String,
+ },
+ }, map[string]tftypes.Value{
+ "foo": tftypes.NewValue(tftypes.String, nil),
+ "bar": tftypes.NewValue(tftypes.String, "bar value"),
+ }),
+ },
+ },
+ in: path.MatchRoot("bar"),
+ inPath: path.Root("foo"),
+ exceptedValues: []attr.Value{
+ types.StringValue("bar value"),
+ },
+ patterns: []string{"baz"},
+ message: "",
+ expError: false,
+ },
+ }
+
+ for name, test := range testCases {
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ resp := &validators.PatternsIfAttributeIsOneOfResponse{}
+
+ validators.PatternsIfAttributeIsOneOf(test.in, test.exceptedValues, test.patterns, test.message).Validate(context.TODO(), test.req, resp)
+
+ if test.expError && resp.Diagnostics.HasError() {
+ d1 := validatordiag.InvalidAttributeValueDiagnostic(test.inPath, test.expErrorMessage, test.req.ConfigValue.ValueString())
+ d2 := validatordiag.InvalidAttributeValueMatchDiagnostic(test.inPath, test.expErrorMessage, test.req.ConfigValue.ValueString())
+
+ if !resp.Diagnostics.Contains(d1) && !resp.Diagnostics.Contains(d2) {
+ t.Fatalf("expected error(s) to contain (%s), got none. Error message is: (%s)", test.expErrorMessage, resp.Diagnostics.Errors())
+ }
+ }
+
+ if !test.expError && resp.Diagnostics.HasError() {
+ t.Fatalf("unexpected error(s): %s", resp)
+ }
+
+ if test.expError && !resp.Diagnostics.HasError() {
+ t.Fatal("expected error(s), got none")
+ }
+ })
+ }
+}
diff --git a/internal/pkg/fabricitem/base.go b/internal/pkg/fabricitem/base.go
index bcb57633..6cd2749e 100644
--- a/internal/pkg/fabricitem/base.go
+++ b/internal/pkg/fabricitem/base.go
@@ -4,7 +4,7 @@
package fabricitem
const (
- DefinitionFormatNotApplicable = "NotApplicable"
- PreviewDataSource = "\n\n~> This data-source is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration."
- PreviewResource = "\n\n~> This resource is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration."
+ DefinitionFormatDefault = "Default"
+ PreviewDataSource = "\n\n~> This data-source is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration."
+ PreviewResource = "\n\n~> This resource is in **preview**. To access it, you must explicitly enable the `preview` mode in the provider level configuration."
)
diff --git a/internal/pkg/fabricitem/data_item_definition.go b/internal/pkg/fabricitem/data_item_definition.go
index c7c73f0f..df02542a 100644
--- a/internal/pkg/fabricitem/data_item_definition.go
+++ b/internal/pkg/fabricitem/data_item_definition.go
@@ -35,9 +35,7 @@ type DataSourceFabricItemDefinition struct {
TFName string
MarkdownDescription string
IsDisplayNameUnique bool
- FormatTypeDefault string
- FormatTypes []string
- DefinitionPathKeys []string
+ DefinitionFormats []DefinitionFormat
IsPreview bool
}
@@ -130,12 +128,6 @@ func (d *DataSourceFabricItemDefinition) Read(ctx context.Context, req datasourc
return
}
- data.Format = types.StringNull()
-
- if d.FormatTypeDefault != "" {
- data.Format = types.StringValue(d.FormatTypeDefault)
- }
-
resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
if data.OutputDefinition.IsNull() || data.OutputDefinition.IsUnknown() {
@@ -213,7 +205,11 @@ func (d *DataSourceFabricItemDefinition) getDefinition(ctx context.Context, mode
respGetOpts := &fabcore.ItemsClientBeginGetItemDefinitionOptions{}
if !model.Format.IsNull() {
- respGetOpts.Format = model.Format.ValueStringPointer()
+ apiFormat := getDefinitionFormatAPI(d.DefinitionFormats, model.Format.ValueString())
+
+ if apiFormat != "" {
+ respGetOpts.Format = &apiFormat
+ }
}
respGet, err := d.client.GetItemDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), respGetOpts)
diff --git a/internal/pkg/fabricitem/data_item_definition_properties.go b/internal/pkg/fabricitem/data_item_definition_properties.go
index 2ff9fa1a..bad705aa 100644
--- a/internal/pkg/fabricitem/data_item_definition_properties.go
+++ b/internal/pkg/fabricitem/data_item_definition_properties.go
@@ -126,12 +126,6 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx
return
}
- data.Format = types.StringNull()
-
- if d.FormatTypeDefault != "" {
- data.Format = types.StringValue(d.FormatTypeDefault)
- }
-
resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
if data.OutputDefinition.IsNull() || data.OutputDefinition.IsUnknown() {
@@ -205,7 +199,11 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getDefini
respGetOpts := &fabcore.ItemsClientBeginGetItemDefinitionOptions{}
if !model.Format.IsNull() {
- respGetOpts.Format = model.Format.ValueStringPointer()
+ apiFormat := getDefinitionFormatAPI(d.DefinitionFormats, model.Format.ValueString())
+
+ if apiFormat != "" {
+ respGetOpts.Format = &apiFormat
+ }
}
respGet, err := d.client.GetItemDefinition(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), respGetOpts)
diff --git a/internal/pkg/fabricitem/data_schema.go b/internal/pkg/fabricitem/data_schema.go
index 9746769f..42f96e39 100644
--- a/internal/pkg/fabricitem/data_schema.go
+++ b/internal/pkg/fabricitem/data_schema.go
@@ -8,8 +8,14 @@ import (
"fmt"
"github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
supertypes "github.com/orange-cloudavenue/terraform-plugin-framework-supertypes"
+ superstringvalidator "github.com/orange-cloudavenue/terraform-plugin-framework-validators/stringvalidator"
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
@@ -27,7 +33,7 @@ func getDataSourceFabricItemSchema(ctx context.Context, d DataSourceFabricItem)
func getDataSourceFabricItemDefinitionSchema(ctx context.Context, d DataSourceFabricItemDefinition) schema.Schema {
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
- for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) {
+ for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.DefinitionFormats) {
attributes[key] = value
}
@@ -51,7 +57,7 @@ func getDataSourceFabricItemDefinitionPropertiesSchema[Ttfprop, Titemprop any](c
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
attributes["properties"] = getDataSourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, d.Name, d.PropertiesAttributes)
- for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) {
+ for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.DefinitionFormats) {
attributes[key] = value
}
@@ -105,21 +111,25 @@ func getDataSourceFabricItemBaseAttributes(ctx context.Context, itemName string,
}
// Helper function to get Fabric Item data-source definition attributes.
-func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name string, formatTypes, definitionPathKeys []string) map[string]schema.Attribute {
+func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name string, definitionFormats []DefinitionFormat) map[string]schema.Attribute {
attributes := make(map[string]schema.Attribute)
- if len(formatTypes) > 0 {
- attributes["format"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false)),
- Computed: true,
- }
- } else {
- attributes["format"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable),
- Computed: true,
- }
+ formatTypes := getDefinitionFormats(definitionFormats)
+ definitionFormatsDocs := getDefinitionFormatsPathsDocs(definitionFormats)
+
+ // format attribute
+ attrFormat := schema.StringAttribute{}
+
+ attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: %s", name, utils.ConvertStringSlicesToString(formatTypes, true, true))
+ attrFormat.Optional = true
+ attrFormat.Validators = []validator.String{
+ stringvalidator.OneOf(formatTypes...),
+ superstringvalidator.RequireIfAttributeIsOneOf(path.MatchRoot("output_definition"), []attr.Value{types.BoolValue(true)}),
}
+ attributes["format"] = attrFormat
+
+ // output_definition attribute
attributes["output_definition"] = schema.BoolAttribute{
MarkdownDescription: "Output definition parts as gzip base64 content? Default: `false`\n\n" +
"!> Your terraform state file may grow a lot if you output definition content. Only use it when you must use data from the definition.",
@@ -141,11 +151,7 @@ func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name strin
},
}
- if len(definitionPathKeys) > 0 {
- attrDefinition.MarkdownDescription = "Definition parts. Possible path keys: " + utils.ConvertStringSlicesToString(definitionPathKeys, true, false) + "."
- } else {
- attrDefinition.MarkdownDescription = "Definition parts."
- }
+ attrDefinition.MarkdownDescription = "Definition parts. Possible path keys: " + definitionFormatsDocs
attributes["definition"] = attrDefinition
diff --git a/internal/pkg/fabricitem/definition.go b/internal/pkg/fabricitem/definition.go
new file mode 100644
index 00000000..9b41c721
--- /dev/null
+++ b/internal/pkg/fabricitem/definition.go
@@ -0,0 +1,114 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "maps"
+ "slices"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ superstringvalidator "github.com/orange-cloudavenue/terraform-plugin-framework-validators/stringvalidator"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+)
+
+type DefinitionFormat struct {
+ Type string
+ API string
+ Paths []string
+}
+
+func getDefinitionFormats(values []DefinitionFormat) []string {
+ results := make([]string, len(values))
+
+ for i, value := range values {
+ results[i] = value.Type
+ }
+
+ return slices.Sorted(slices.Values(results))
+}
+
+func getDefinitionFormatsPaths(values []DefinitionFormat) map[string][]string {
+ results := make(map[string][]string, len(values))
+
+ for v := range slices.Values(values) {
+ results[v.Type] = slices.Sorted(slices.Values(v.Paths))
+ }
+
+ sortedResults := make(map[string][]string, len(results))
+ for _, k := range slices.Sorted(maps.Keys(results)) {
+ sortedResults[k] = results[k]
+ }
+
+ return sortedResults
+}
+
+func getDefinitionFormatsPathsDocs(values []DefinitionFormat) string {
+ elements := getDefinitionFormatsPaths(values)
+
+ var results string
+
+ i := 0
+
+ for _, k := range slices.Sorted(maps.Keys(elements)) {
+ results += "**" + k + "** format: "
+ results += utils.ConvertStringSlicesToString(elements[k], true, true)
+
+ if i != len(elements)-1 {
+ results += " "
+ }
+
+ i++
+ }
+
+ return results
+}
+
+func GetDefinitionFormatPaths(values []DefinitionFormat, format string) []string {
+ for _, value := range values {
+ if value.Type == format {
+ return value.Paths
+ }
+ }
+
+ return nil
+}
+
+func getDefinitionFormatAPI(values []DefinitionFormat, format string) string {
+ for _, value := range values {
+ if value.Type == format {
+ return value.API
+ }
+ }
+
+ return ""
+}
+
+func DefinitionPathKeysValidator(values []DefinitionFormat) []validator.String {
+ results := make([]validator.String, 0, len(values))
+
+ for _, value := range values {
+ paths := []superstringvalidator.OneOfWithDescriptionIfAttributeIsOneOfValues{}
+
+ for _, p := range value.Paths {
+ paths = append(paths, superstringvalidator.OneOfWithDescriptionIfAttributeIsOneOfValues{
+ Value: p,
+ Description: p,
+ })
+ }
+
+ stringValidator := superstringvalidator.OneOfWithDescriptionIfAttributeIsOneOf(
+ path.MatchRoot("format"),
+ []attr.Value{types.StringValue(value.Type)},
+ paths...,
+ )
+
+ results = append(results, stringValidator)
+ }
+
+ return results
+}
diff --git a/internal/pkg/fabricitem/models_resource_item.go b/internal/pkg/fabricitem/models_resource_item.go
index fc98cfca..fa8cca46 100644
--- a/internal/pkg/fabricitem/models_resource_item.go
+++ b/internal/pkg/fabricitem/models_resource_item.go
@@ -40,11 +40,11 @@ func (to *requestCreateFabricItem) setType(v fabcore.ItemType) {
to.Type = &v
}
-func (to *requestCreateFabricItem) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool) diag.Diagnostics {
+func (to *requestCreateFabricItem) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool, definitionFormats []DefinitionFormat) diag.Diagnostics {
if !definition.IsNull() && !definition.IsUnknown() {
var def fabricItemDefinition
- def.setFormat(format)
+ def.setFormat(format, definitionFormats)
if diags := def.setParts(ctx, definition, "", []string{}, definitionUpdateEnabled, false); diags.HasError() {
return diags
@@ -104,9 +104,9 @@ func fabricItemCheckUpdate(planDisplayName, planDescription, stateDisplayName, s
return !reflect.DeepEqual(reqUpdatePlan.UpdateItemRequest, reqUpdateState.UpdateItemRequest)
}
-func fabricItemCheckUpdateDefinition(ctx context.Context, planDefinition, stateDefinition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], planFormat types.String, planDefinitionUpdateEnabled types.Bool, definitionEmpty string, definitionPaths []string, reqUpdate *requestUpdateFabricItemDefinition) (bool, diag.Diagnostics) {
+func fabricItemCheckUpdateDefinition(ctx context.Context, planDefinition, stateDefinition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], planFormat types.String, planDefinitionUpdateEnabled types.Bool, definitionEmpty string, definitionFormats []DefinitionFormat, reqUpdate *requestUpdateFabricItemDefinition) (bool, diag.Diagnostics) {
if !planDefinition.Equal(stateDefinition) && planDefinitionUpdateEnabled.ValueBool() {
- if diags := reqUpdate.setDefinition(ctx, planDefinition, planFormat, planDefinitionUpdateEnabled, definitionEmpty, definitionPaths); diags.HasError() {
+ if diags := reqUpdate.setDefinition(ctx, planDefinition, planFormat, planDefinitionUpdateEnabled, definitionEmpty, definitionFormats); diags.HasError() {
return false, diags
}
diff --git a/internal/pkg/fabricitem/models_resource_item_definition.go b/internal/pkg/fabricitem/models_resource_item_definition.go
index ccb25ba2..1c622fb3 100644
--- a/internal/pkg/fabricitem/models_resource_item_definition.go
+++ b/internal/pkg/fabricitem/models_resource_item_definition.go
@@ -35,9 +35,13 @@ type fabricItemDefinition struct {
fabcore.ItemDefinition
}
-func (to *fabricItemDefinition) setFormat(v types.String) {
- if v.ValueString() != DefinitionFormatNotApplicable && v.ValueString() != "" {
- to.Format = v.ValueStringPointer()
+func (to *fabricItemDefinition) setFormat(v types.String, definitionFormats []DefinitionFormat) {
+ if v.ValueString() != DefinitionFormatDefault && v.ValueString() != "" {
+ apiFormat := getDefinitionFormatAPI(definitionFormats, v.ValueString())
+
+ if apiFormat != "" {
+ to.Format = &apiFormat
+ }
}
}
@@ -62,7 +66,7 @@ func (to *fabricItemDefinition) setParts(ctx context.Context, definition superty
}
to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{
- Path: azto.Ptr(definitionPaths[0]),
+ Path: &definitionPaths[0],
Payload: &content,
PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64),
})
@@ -78,7 +82,7 @@ func (to *fabricItemDefinition) setParts(ctx context.Context, definition superty
}
to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{
- Path: azto.Ptr(defPartKey),
+ Path: &defPartKey,
Payload: payloadB64,
PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64),
})
@@ -92,12 +96,14 @@ type requestUpdateFabricItemDefinition struct {
fabcore.UpdateItemDefinitionRequest
}
-func (to *requestUpdateFabricItemDefinition) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool, definitionEmpty string, definitionPaths []string) diag.Diagnostics {
+func (to *requestUpdateFabricItemDefinition) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool, definitionEmpty string, definitionFormats []DefinitionFormat) diag.Diagnostics {
var def fabricItemDefinition
- def.setFormat(format)
+ def.setFormat(format, definitionFormats)
+
+ definitionPathKeys := GetDefinitionFormatPaths(definitionFormats, format.ValueString())
- if diags := def.setParts(ctx, definition, definitionEmpty, definitionPaths, definitionUpdateEnabled, true); diags.HasError() {
+ if diags := def.setParts(ctx, definition, definitionEmpty, definitionPathKeys, definitionUpdateEnabled, true); diags.HasError() {
return diags
}
diff --git a/internal/pkg/fabricitem/resource_item_config_definition_properties.go b/internal/pkg/fabricitem/resource_item_config_definition_properties.go
index fa8d73d9..2370d5d7 100644
--- a/internal/pkg/fabricitem/resource_item_config_definition_properties.go
+++ b/internal/pkg/fabricitem/resource_item_config_definition_properties.go
@@ -74,7 +74,7 @@ func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfcon
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -166,7 +166,7 @@ func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfcon
reqCreate.setDescription(plan.Description)
reqCreate.setType(r.Type)
- if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionFormats)...); resp.Diagnostics.HasError() {
return
}
@@ -292,7 +292,7 @@ func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfcon
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
diff --git a/internal/pkg/fabricitem/resource_item_definition.go b/internal/pkg/fabricitem/resource_item_definition.go
index e843c81c..e1996f66 100644
--- a/internal/pkg/fabricitem/resource_item_definition.go
+++ b/internal/pkg/fabricitem/resource_item_definition.go
@@ -41,13 +41,11 @@ type ResourceFabricItemDefinition struct {
MarkdownDescription string
DisplayNameMaxLength int
DescriptionMaxLength int
- FormatTypeDefault string
- FormatTypes []string
DefinitionPathDocsURL string
- DefinitionPathKeys []string
DefinitionPathKeysValidator []validator.Map
DefinitionRequired bool
DefinitionEmpty string
+ DefinitionFormats []DefinitionFormat
IsPreview bool
}
@@ -81,7 +79,7 @@ func (r *ResourceFabricItemDefinition) ModifyPlan(ctx context.Context, req resou
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -160,7 +158,7 @@ func (r *ResourceFabricItemDefinition) Create(ctx context.Context, req resource.
reqCreate.setDescription(plan.Description)
reqCreate.setType(r.Type)
- if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionFormats)...); resp.Diagnostics.HasError() {
return
}
@@ -272,7 +270,7 @@ func (r *ResourceFabricItemDefinition) Update(ctx context.Context, req resource.
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
diff --git a/internal/pkg/fabricitem/resource_item_definition_properties.go b/internal/pkg/fabricitem/resource_item_definition_properties.go
index a31e65ca..d112d66d 100644
--- a/internal/pkg/fabricitem/resource_item_definition_properties.go
+++ b/internal/pkg/fabricitem/resource_item_definition_properties.go
@@ -69,7 +69,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -148,7 +148,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx
reqCreate.setDescription(plan.Description)
reqCreate.setType(r.Type)
- if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionFormats)...); resp.Diagnostics.HasError() {
return
}
@@ -267,7 +267,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx
var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionFormats, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
diff --git a/internal/pkg/fabricitem/resource_schema.go b/internal/pkg/fabricitem/resource_schema.go
index dc07252e..ed073fea 100644
--- a/internal/pkg/fabricitem/resource_schema.go
+++ b/internal/pkg/fabricitem/resource_schema.go
@@ -22,6 +22,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
supertypes "github.com/orange-cloudavenue/terraform-plugin-framework-supertypes"
+ superstringvalidator "github.com/orange-cloudavenue/terraform-plugin-framework-validators/stringvalidator"
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/framework/planmodifiers"
@@ -40,7 +41,7 @@ func getResourceFabricItemSchema(ctx context.Context, r ResourceFabricItem) sche
func getResourceFabricItemDefinitionSchema(ctx context.Context, r ResourceFabricItemDefinition) schema.Schema {
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.DefinitionPathDocsURL, r.DefinitionFormats, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
attributes[key] = value
}
@@ -64,7 +65,7 @@ func getResourceFabricItemDefinitionPropertiesSchema[Ttfprop, Titemprop any](ctx
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.DefinitionPathDocsURL, r.DefinitionFormats, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
attributes[key] = value
}
@@ -98,7 +99,7 @@ func getResourceFabricItemConfigDefinitionPropertiesSchema[Ttfprop, Titemprop, T
attributes["configuration"] = attrConfiguration
attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, true) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.DefinitionPathDocsURL, r.DefinitionFormats, r.DefinitionPathKeysValidator, r.DefinitionRequired, true) {
attributes[key] = value
}
@@ -188,7 +189,7 @@ func getResourceFabricItemBaseAttributes(ctx context.Context, name string, displ
}
// Helper function to get Fabric Item definition attributes.
-func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, formatTypeDefault string, formatTypes []string, definitionPathDocsURL string, definitionPathKeys []string, definitionPathKeysValidator []validator.Map, definitionRequired, alongConfiguration bool) map[string]schema.Attribute { //revive:disable-line:flag-parameter,argument-limit
+func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, definitionPathDocsURL string, definitionFormats []DefinitionFormat, definitionPathKeysValidator []validator.Map, definitionRequired, alongConfiguration bool) map[string]schema.Attribute { //revive:disable-line:flag-parameter,argument-limit
attributes := make(map[string]schema.Attribute)
attrDefinitionUpdateEnabled := schema.BoolAttribute{}
@@ -206,27 +207,31 @@ func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, format
attributes["definition_update_enabled"] = attrDefinitionUpdateEnabled
+ formatTypes := getDefinitionFormats(definitionFormats)
+ definitionFormatsDocs := getDefinitionFormatsPathsDocs(definitionFormats)
+
attrFormat := schema.StringAttribute{}
- attrFormat.Computed = true
- if len(formatTypes) > 0 {
- attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false))
- attrFormat.Default = stringdefault.StaticString(formatTypeDefault)
+ attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: %s", name, utils.ConvertStringSlicesToString(formatTypes, true, true))
+ attrFormat.Validators = []validator.String{
+ stringvalidator.OneOf(utils.ConvertEnumsToStringSlices(formatTypes, true)...),
+ superstringvalidator.RequireIfAttributeIsSet(path.MatchRoot("definition")),
+ }
+
+ if definitionRequired {
+ attrFormat.Required = true
} else {
- attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable)
- attrFormat.Default = stringdefault.StaticString(DefinitionFormatNotApplicable)
+ attrFormat.Optional = true
}
if alongConfiguration {
- attrFormat.Validators = []validator.String{
- stringvalidator.ConflictsWith(path.MatchRoot("configuration")),
- }
+ attrFormat.Validators = append(attrFormat.Validators, stringvalidator.ConflictsWith(path.MatchRoot("configuration")))
}
attributes["format"] = attrFormat
attrDefinition := schema.MapNestedAttribute{}
- attrDefinition.MarkdownDescription = fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL)
+ attrDefinition.MarkdownDescription = fmt.Sprintf("Definition parts. Read more about [%s definition part paths](%s). Accepted path keys: %s", name, definitionPathDocsURL, definitionFormatsDocs)
attrDefinition.CustomType = supertypes.NewMapNestedObjectTypeOf[resourceFabricItemDefinitionPartModel](ctx)
attrDefinition.Validators = definitionPathKeysValidator
attrDefinition.NestedObject = getResourceFabricItemDefinitionPartSchema(ctx)
diff --git a/internal/pkg/utils/utils.go b/internal/pkg/utils/utils.go
index 6832ce75..398058d6 100644
--- a/internal/pkg/utils/utils.go
+++ b/internal/pkg/utils/utils.go
@@ -7,6 +7,7 @@ import (
"crypto/sha256"
"encoding/hex"
"fmt"
+ "maps"
"slices"
"strings"
)
@@ -48,7 +49,7 @@ func ConvertEnumsToStringSlices[T any](values []T, sorting bool) []string { //re
}
if sorting {
- slices.Sort(result)
+ result = slices.Sorted(slices.Values(result))
}
return result
@@ -76,19 +77,12 @@ func ConvertStringSlicesToString[T any](values []T, backticks, sorting bool, sep
// SortMapStringByKeys sorts a map[string]string by keys.
func SortMapStringByKeys[T any](m map[string]T) map[string]T {
- sortedKeys := make([]string, 0, len(m))
- for k := range m {
- sortedKeys = append(sortedKeys, k)
+ result := make(map[string]T, len(m))
+ for _, k := range slices.Sorted(maps.Keys(m)) {
+ result[k] = m[k]
}
- slices.Sort(sortedKeys)
-
- sortedMap := make(map[string]T)
- for _, k := range sortedKeys {
- sortedMap[k] = m[k]
- }
-
- return sortedMap
+ return result
}
func Sha256(content string) string {
diff --git a/internal/provider/utils/values.go b/internal/provider/utils/values.go
index cf6877df..5fe05cad 100644
--- a/internal/provider/utils/values.go
+++ b/internal/provider/utils/values.go
@@ -16,10 +16,10 @@ import (
func GetValueOrFileValue(attValue, attFile string, value, file types.String) (string, error) {
valueResult := value.ValueString()
- if path := file.ValueString(); path != "" {
- fileRaw, err := os.ReadFile(path)
+ if p := file.ValueString(); p != "" {
+ fileRaw, err := os.ReadFile(p)
if err != nil {
- return "", fmt.Errorf("reading '%s' from file %q: %w", attFile, path, err)
+ return "", fmt.Errorf("reading '%s' from file %q: %w", attFile, p, err)
}
fileResult := strings.TrimSpace(string(fileRaw))
@@ -36,10 +36,10 @@ func GetValueOrFileValue(attValue, attFile string, value, file types.String) (st
func GetCertOrFileCert(attValue, attFile string, value, file types.String) (string, error) {
valueResult := strings.TrimSpace(value.ValueString())
- if path := file.ValueString(); path != "" {
- b64, err := auth.ConvertFileToBase64(path)
+ if p := file.ValueString(); p != "" {
+ b64, err := auth.ConvertFileToBase64(p)
if err != nil {
- return "", fmt.Errorf("reading '%s' from file %q: %w", attFile, path, err)
+ return "", fmt.Errorf("reading '%s' from file %q: %w", attFile, p, err)
}
fileResult := strings.TrimSpace(b64)
diff --git a/internal/services/datapipeline/base.go b/internal/services/datapipeline/base.go
index 9468b355..1f20d249 100644
--- a/internal/services/datapipeline/base.go
+++ b/internal/services/datapipeline/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -22,4 +23,10 @@ const (
ItemPreview = true
)
-var ItemDefinitionPaths = []string{"pipeline-content.json"} //nolint:gochecknoglobals
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: fabricitem.DefinitionFormatDefault,
+ API: "",
+ Paths: []string{"pipeline-content.json"},
+ },
+}
diff --git a/internal/services/datapipeline/resource_data_pipeline.go b/internal/services/datapipeline/resource_data_pipeline.go
index cea02ff1..61e3899c 100644
--- a/internal/services/datapipeline/resource_data_pipeline.go
+++ b/internal/services/datapipeline/resource_data_pipeline.go
@@ -23,16 +23,14 @@ func NewResourceDataPipeline() resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: "",
- FormatTypes: []string{},
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPaths,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtMost(1),
- mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)),
+ mapvalidator.KeysAre(stringvalidator.OneOf(fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, fabricitem.DefinitionFormatDefault)...)),
},
DefinitionRequired: false,
DefinitionEmpty: ItemDefinitionEmpty,
+ DefinitionFormats: itemDefinitionFormats,
IsPreview: ItemPreview,
}
diff --git a/internal/services/eventhouse/base.go b/internal/services/eventhouse/base.go
index 8d30857e..a7a36c08 100644
--- a/internal/services/eventhouse/base.go
+++ b/internal/services/eventhouse/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,12 +18,14 @@ const (
ItemType = fabcore.ItemTypeEventhouse
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse"
- ItemFormatTypeDefault = ""
ItemDefinitionEmpty = `{}`
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/eventhouse-definition"
)
-var (
- ItemFormatTypes = []string{} //nolint:gochecknoglobals
- ItemDefinitionPaths = []string{"EventhouseProperties.json"} //nolint:gochecknoglobals
-)
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: fabricitem.DefinitionFormatDefault,
+ API: "",
+ Paths: []string{"EventhouseProperties.json"},
+ },
+}
diff --git a/internal/services/eventhouse/data_eventhouse.go b/internal/services/eventhouse/data_eventhouse.go
index 78e43261..170a94e1 100644
--- a/internal/services/eventhouse/data_eventhouse.go
+++ b/internal/services/eventhouse/data_eventhouse.go
@@ -78,9 +78,7 @@ func NewDataSourceEventhouse(ctx context.Context) datasource.DataSource {
"Use this data source to fetch an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
IsDisplayNameUnique: true,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
- DefinitionPathKeys: ItemDefinitionPaths,
+ DefinitionFormats: itemDefinitionFormats,
},
PropertiesAttributes: getDataSourceEventhousePropertiesAttributes(ctx),
PropertiesSetter: propertiesSetter,
diff --git a/internal/services/eventhouse/data_eventhouse_test.go b/internal/services/eventhouse/data_eventhouse_test.go
index 36b46e05..f78abd04 100644
--- a/internal/services/eventhouse/data_eventhouse_test.go
+++ b/internal/services/eventhouse/data_eventhouse_test.go
@@ -237,6 +237,7 @@ func TestAcc_EventhouseDataSource(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"id": entityID,
+ "format": "Default",
"output_definition": true,
},
),
diff --git a/internal/services/eventhouse/resource_eventhouse.go b/internal/services/eventhouse/resource_eventhouse.go
index 91ad8da0..e27ce9d8 100644
--- a/internal/services/eventhouse/resource_eventhouse.go
+++ b/internal/services/eventhouse/resource_eventhouse.go
@@ -7,7 +7,6 @@ import (
"context"
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
@@ -61,16 +60,14 @@ func NewResourceEventhouse(ctx context.Context) resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPaths,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtMost(1),
- mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)),
+ mapvalidator.KeysAre(fabricitem.DefinitionPathKeysValidator(itemDefinitionFormats)...),
},
DefinitionRequired: false,
DefinitionEmpty: ItemDefinitionEmpty,
+ DefinitionFormats: itemDefinitionFormats,
},
PropertiesAttributes: getResourceEventhousePropertiesAttributes(ctx),
PropertiesSetter: propertiesSetter,
diff --git a/internal/services/eventhouse/resource_eventhouse_test.go b/internal/services/eventhouse/resource_eventhouse_test.go
index 1997e80d..9c805476 100644
--- a/internal/services/eventhouse/resource_eventhouse_test.go
+++ b/internal/services/eventhouse/resource_eventhouse_test.go
@@ -58,6 +58,7 @@ func TestUnit_EventhouseResource_Attributes(t *testing.T) {
map[string]any{
"workspace_id": "invalid uuid",
"display_name": "test",
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -74,6 +75,7 @@ func TestUnit_EventhouseResource_Attributes(t *testing.T) {
"workspace_id": "00000000-0000-0000-0000-000000000000",
"display_name": "test",
"unexpected_attr": "test",
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -88,6 +90,7 @@ func TestUnit_EventhouseResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"display_name": "test",
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -102,6 +105,7 @@ func TestUnit_EventhouseResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"workspace_id": "00000000-0000-0000-0000-000000000000",
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -125,6 +129,7 @@ func TestUnit_EventhouseResource_ImportState(t *testing.T) {
map[string]any{
"workspace_id": *entity.WorkspaceID,
"display_name": *entity.DisplayName,
+ "format": "Default",
"definition": testHelperDefinition,
},
))
@@ -202,6 +207,7 @@ func TestUnit_EventhouseResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityExist.WorkspaceID,
"display_name": *entityExist.DisplayName,
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -217,6 +223,7 @@ func TestUnit_EventhouseResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityBefore.DisplayName,
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -239,6 +246,7 @@ func TestUnit_EventhouseResource_CRUD(t *testing.T) {
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityAfter.DisplayName,
"description": *entityAfter.Description,
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -324,6 +332,7 @@ func TestAcc_EventhouseDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityCreateDisplayName,
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
@@ -347,6 +356,7 @@ func TestAcc_EventhouseDefinitionResource_CRUD(t *testing.T) {
"workspace_id": workspaceID,
"display_name": entityUpdateDisplayName,
"description": entityUpdateDescription,
+ "format": "Default",
"definition": testHelperDefinition,
},
)),
diff --git a/internal/services/kqldatabase/base.go b/internal/services/kqldatabase/base.go
index e54e5de9..c82c7a7b 100644
--- a/internal/services/kqldatabase/base.go
+++ b/internal/services/kqldatabase/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,11 +18,14 @@ const (
ItemType = fabcore.ItemTypeKQLDatabase
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/create-database"
- ItemFormatTypeDefault = ""
+ ItemFormatTypeDefault = fabricitem.DefinitionFormatDefault
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/kql-database-definition"
)
-var (
- ItemFormatTypes = []string{""} //nolint:gochecknoglobals
- ItemDefinitionPaths = []string{"DatabaseProperties.json", "DatabaseSchema.kql"} //nolint:gochecknoglobals
-)
+// var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+// {
+// Type: fabricitem.DefinitionFormatDefault,
+// API: "",
+// Paths: []string{"DatabaseProperties.json", "DatabaseSchema.kql"},
+// },
+// }
diff --git a/internal/services/notebook/base.go b/internal/services/notebook/base.go
index 881e382d..61f26998 100644
--- a/internal/services/notebook/base.go
+++ b/internal/services/notebook/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,12 +18,19 @@ const (
ItemType = fabcore.ItemTypeNotebook
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook"
- ItemFormatTypeDefault = "ipynb"
ItemDefinitionEmptyIPYNB = `{"cells":[{"cell_type":"code","metadata":{},"source":["# Welcome to your notebook"]}],"metadata":{"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":5}`
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/notebook-definition"
)
-var (
- ItemFormatTypes = []string{"ipynb"} //nolint:gochecknoglobals
- ItemDefinitionPathsIPYNB = []string{"notebook-content.ipynb"} //nolint:gochecknoglobals
-)
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: "ipynb",
+ API: "ipynb",
+ Paths: []string{"notebook-content.ipynb"},
+ },
+ {
+ Type: "py",
+ API: "",
+ Paths: []string{"notebook-content.py"},
+ },
+}
diff --git a/internal/services/notebook/data_notebook.go b/internal/services/notebook/data_notebook.go
index d3c9e299..8fedbe98 100644
--- a/internal/services/notebook/data_notebook.go
+++ b/internal/services/notebook/data_notebook.go
@@ -18,9 +18,7 @@ func NewDataSourceNotebook() datasource.DataSource {
"Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
IsDisplayNameUnique: true,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
- DefinitionPathKeys: ItemDefinitionPathsIPYNB,
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewDataSourceFabricItemDefinition(config)
diff --git a/internal/services/notebook/data_notebook_test.go b/internal/services/notebook/data_notebook_test.go
index cd7a7816..3bd2700d 100644
--- a/internal/services/notebook/data_notebook_test.go
+++ b/internal/services/notebook/data_notebook_test.go
@@ -173,6 +173,7 @@ func TestAcc_NotebookDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckNoResourceAttr(testDataSourceItemFQN, "definition"),
),
},
// read by id - not found
@@ -200,6 +201,7 @@ func TestAcc_NotebookDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckNoResourceAttr(testDataSourceItemFQN, "definition"),
),
},
// read by name - not found
@@ -213,5 +215,55 @@ func TestAcc_NotebookDataSource(t *testing.T) {
),
ExpectError: regexp.MustCompile(common.ErrorReadHeader),
},
+ // read by id with definition - default
+ {
+ Config: at.CompileConfig(
+ testDataSourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "id": entityID,
+ "output_definition": true,
+ },
+ ),
+ ExpectError: regexp.MustCompile("Invalid configuration for attribute format"),
+ },
+ // read by id with definition - py
+ {
+ Config: at.CompileConfig(
+ testDataSourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "id": entityID,
+ "output_definition": true,
+ "format": "py",
+ },
+ ),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "workspace_id", workspaceID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "definition.notebook-content.py.content"),
+ ),
+ },
+ // read by id with definition - ipynb
+ {
+ Config: at.CompileConfig(
+ testDataSourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "id": entityID,
+ "output_definition": true,
+ "format": "ipynb",
+ },
+ ),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "workspace_id", workspaceID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "definition.notebook-content.ipynb.content"),
+ ),
+ },
}))
}
diff --git a/internal/services/notebook/resource_notebook.go b/internal/services/notebook/resource_notebook.go
index 48e72528..78e1527a 100644
--- a/internal/services/notebook/resource_notebook.go
+++ b/internal/services/notebook/resource_notebook.go
@@ -5,7 +5,6 @@ package notebook
import (
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
@@ -23,16 +22,14 @@ func NewResourceNotebook() resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPathsIPYNB,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtMost(1),
- mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPathsIPYNB...)),
+ mapvalidator.KeysAre(fabricitem.DefinitionPathKeysValidator(itemDefinitionFormats)...),
},
DefinitionRequired: false,
DefinitionEmpty: ItemDefinitionEmptyIPYNB,
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewResourceFabricItemDefinition(config)
diff --git a/internal/services/notebook/resource_notebook_test.go b/internal/services/notebook/resource_notebook_test.go
index 0839a038..32b1bd2f 100644
--- a/internal/services/notebook/resource_notebook_test.go
+++ b/internal/services/notebook/resource_notebook_test.go
@@ -28,12 +28,18 @@ var testHelperLocals = at.CompileLocalsConfig(map[string]any{
"path": testhelp.GetFixturesDirPath("notebook"),
})
-var testHelperDefinition = map[string]any{
+var testHelperDefinitionIPYNB = map[string]any{
`"notebook-content.ipynb"`: map[string]any{
"source": "${local.path}/notebook.ipynb.tmpl",
},
}
+var testHelperDefinitionPY = map[string]any{
+ `"notebook-content.py"`: map[string]any{
+ "source": "${local.path}/notebook.py.tmpl",
+ },
+}
+
func TestUnit_NotebookResource_Attributes(t *testing.T) {
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -58,7 +64,8 @@ func TestUnit_NotebookResource_Attributes(t *testing.T) {
map[string]any{
"workspace_id": "invalid uuid",
"display_name": "test",
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
ExpectError: regexp.MustCompile(customtypes.UUIDTypeErrorInvalidStringHeader),
@@ -74,7 +81,8 @@ func TestUnit_NotebookResource_Attributes(t *testing.T) {
"workspace_id": "00000000-0000-0000-0000-000000000000",
"display_name": "test",
"unexpected_attr": "test",
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
ExpectError: regexp.MustCompile(`An argument named "unexpected_attr" is not expected here`),
@@ -88,7 +96,8 @@ func TestUnit_NotebookResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"display_name": "test",
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found.`),
@@ -102,7 +111,8 @@ func TestUnit_NotebookResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"workspace_id": "00000000-0000-0000-0000-000000000000",
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
ExpectError: regexp.MustCompile(`The argument "display_name" is required, but no definition was found.`),
@@ -125,7 +135,8 @@ func TestUnit_NotebookResource_ImportState(t *testing.T) {
map[string]any{
"workspace_id": *entity.WorkspaceID,
"display_name": *entity.DisplayName,
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
))
@@ -202,7 +213,8 @@ func TestUnit_NotebookResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityExist.WorkspaceID,
"display_name": *entityExist.DisplayName,
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
ExpectError: regexp.MustCompile(common.ErrorCreateHeader),
@@ -217,7 +229,8 @@ func TestUnit_NotebookResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityBefore.DisplayName,
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
@@ -236,7 +249,8 @@ func TestUnit_NotebookResource_CRUD(t *testing.T) {
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityAfter.DisplayName,
"description": *entityAfter.Description,
- "definition": testHelperDefinition,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
@@ -268,7 +282,110 @@ func TestAcc_NotebookResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityCreateDisplayName,
- "definition": testHelperDefinition,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ ),
+ },
+ // Update and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityUpdateDisplayName,
+ "description": entityUpdateDescription,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ ),
+ },
+ },
+ ))
+}
+
+func TestAcc_NotebookDefinitionIPYNBResource_CRUD(t *testing.T) {
+ workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspaceID := workspace["id"].(string)
+
+ entityCreateDisplayName := testhelp.RandomName()
+ entityUpdateDisplayName := testhelp.RandomName()
+ entityUpdateDescription := testhelp.RandomName()
+
+ resource.Test(t, testhelp.NewTestAccCase(t, &testResourceItemFQN, nil, []resource.TestStep{
+ // Create and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityCreateDisplayName,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ ),
+ },
+ // Update and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityUpdateDisplayName,
+ "description": entityUpdateDescription,
+ "format": "ipynb",
+ "definition": testHelperDefinitionIPYNB,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ ),
+ },
+ },
+ ))
+}
+
+func TestAcc_NotebookDefinitionPYResource_CRUD(t *testing.T) {
+ workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspaceID := workspace["id"].(string)
+
+ entityCreateDisplayName := testhelp.RandomName()
+ entityUpdateDisplayName := testhelp.RandomName()
+ entityUpdateDescription := testhelp.RandomName()
+
+ resource.Test(t, testhelp.NewTestAccCase(t, &testResourceItemFQN, nil, []resource.TestStep{
+ // Create and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityCreateDisplayName,
+ "format": "py",
+ "definition": testHelperDefinitionPY,
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
@@ -288,7 +405,8 @@ func TestAcc_NotebookResource_CRUD(t *testing.T) {
"workspace_id": workspaceID,
"display_name": entityUpdateDisplayName,
"description": entityUpdateDescription,
- "definition": testHelperDefinition,
+ "format": "py",
+ "definition": testHelperDefinitionPY,
},
)),
Check: resource.ComposeAggregateTestCheckFunc(
diff --git a/internal/services/report/base.go b/internal/services/report/base.go
index 84e3c701..8dd5e0f3 100644
--- a/internal/services/report/base.go
+++ b/internal/services/report/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,11 +18,18 @@ const (
ItemType = fabcore.ItemTypeReport
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/power-bi/developer/projects/projects-report"
- ItemFormatTypeDefault = "PBIR-Legacy"
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/report-definition"
)
-var (
- ItemFormatTypes = []string{"PBIR-Legacy"} //nolint:gochecknoglobals
- ItemDefinitionPathsPBIRLegacy = []string{"report.json", "definition.pbir", "StaticResources/RegisteredResources/*", "StaticResources/SharedResources/*"} //nolint:gochecknoglobals
-)
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: "PBIR-Legacy",
+ API: "PBIR-Legacy",
+ Paths: []string{"report.json", "definition.pbir", "StaticResources/RegisteredResources/*", "StaticResources/SharedResources/*"},
+ },
+ {
+ Type: "PBIR",
+ API: "PBIR",
+ Paths: []string{"definition/report.json", "definition/version.json", "definition.pbir", "definition/pages/*.json", "StaticResources/RegisteredResources/*", "StaticResources/SharedResources/*"},
+ },
+}
diff --git a/internal/services/report/data_report.go b/internal/services/report/data_report.go
index fd6e4002..208cf5a9 100644
--- a/internal/services/report/data_report.go
+++ b/internal/services/report/data_report.go
@@ -18,9 +18,7 @@ func NewDataSourceReport() datasource.DataSource {
"Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
IsDisplayNameUnique: false,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
- DefinitionPathKeys: ItemDefinitionPathsPBIRLegacy,
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewDataSourceFabricItemDefinition(config)
diff --git a/internal/services/report/data_report_test.go b/internal/services/report/data_report_test.go
index 3a2b32c2..791d89b5 100644
--- a/internal/services/report/data_report_test.go
+++ b/internal/services/report/data_report_test.go
@@ -156,6 +156,7 @@ func TestAcc_ReportDataSource(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"id": entityID,
+ "format": "PBIR-Legacy",
"output_definition": true,
},
),
diff --git a/internal/services/report/resource_report.go b/internal/services/report/resource_report.go
index 37935f31..99e7d649 100644
--- a/internal/services/report/resource_report.go
+++ b/internal/services/report/resource_report.go
@@ -4,13 +4,14 @@
package report
import (
- "regexp"
-
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ fwvalidators "github.com/microsoft/terraform-provider-fabric/internal/framework/validators"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
)
@@ -26,21 +27,27 @@ func NewResourceReport() resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPathsPBIRLegacy,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtLeast(3),
mapvalidator.KeysAre(
- stringvalidator.RegexMatches(
- regexp.MustCompile(`^(report\.json|definition\.pbir|StaticResources/RegisteredResources/.*|StaticResources/SharedResources/.*)$`),
- "Definition path must match one of the following: "+utils.ConvertStringSlicesToString(ItemDefinitionPathsPBIRLegacy, true, false),
+ fwvalidators.PatternsIfAttributeIsOneOf(
+ path.MatchRoot("format"),
+ []attr.Value{types.StringValue("PBIR-Legacy")},
+ fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "PBIR-Legacy"),
+ "Definition path must match one of the following: "+utils.ConvertStringSlicesToString(fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "PBIR-Legacy"), true, false),
+ ),
+ fwvalidators.PatternsIfAttributeIsOneOf(
+ path.MatchRoot("format"),
+ []attr.Value{types.StringValue("PBIR")},
+ fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "PBIR"),
+ "Definition path must match one of the following: "+utils.ConvertStringSlicesToString(fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "PBIR"), true, false),
),
),
},
DefinitionRequired: true,
DefinitionEmpty: "",
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewResourceFabricItemDefinition(config)
diff --git a/internal/services/report/resource_report_test.go b/internal/services/report/resource_report_test.go
index ff56b02b..328193b5 100644
--- a/internal/services/report/resource_report_test.go
+++ b/internal/services/report/resource_report_test.go
@@ -68,6 +68,7 @@ func TestUnit_ReportResource_Attributes(t *testing.T) {
map[string]any{
"workspace_id": "invalid uuid",
"display_name": "test",
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -84,6 +85,7 @@ func TestUnit_ReportResource_Attributes(t *testing.T) {
"workspace_id": "00000000-0000-0000-0000-000000000000",
"display_name": "test",
"unexpected_attr": "test",
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -98,6 +100,7 @@ func TestUnit_ReportResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"display_name": "test",
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -112,6 +115,7 @@ func TestUnit_ReportResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"workspace_id": "00000000-0000-0000-0000-000000000000",
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -149,6 +153,7 @@ func TestUnit_ReportResource_ImportState(t *testing.T) {
map[string]any{
"workspace_id": *entity.WorkspaceID,
"display_name": *entity.DisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
))
@@ -231,6 +236,7 @@ func TestUnit_ReportResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityExist.WorkspaceID,
"display_name": *entityExist.DisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -246,6 +252,7 @@ func TestUnit_ReportResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityBefore.DisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -265,6 +272,7 @@ func TestUnit_ReportResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityAfter.DisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -300,6 +308,7 @@ func TestAcc_ReportResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityCreateDisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
@@ -319,6 +328,7 @@ func TestAcc_ReportResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityUpdateDisplayName,
+ "format": "PBIR-Legacy",
"definition": testHelperDefinition,
},
)),
diff --git a/internal/services/semanticmodel/base.go b/internal/services/semanticmodel/base.go
index b28d72dd..ee8fa797 100644
--- a/internal/services/semanticmodel/base.go
+++ b/internal/services/semanticmodel/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,11 +18,18 @@ const (
ItemType = fabcore.ItemTypeSemanticModel
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/power-bi/developer/projects/projects-dataset"
- ItemFormatTypeDefault = "TMSL"
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/semantic-model-definition"
)
-var (
- ItemFormatTypes = []string{"TMSL"} //nolint:gochecknoglobals
- ItemDefinitionPathsTMSL = []string{"model.bim", "definition.pbism", "diagramLayout.json"} //nolint:gochecknoglobals
-)
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: "TMSL",
+ API: "TMSL",
+ Paths: []string{"model.bim", "definition.pbism", "diagramLayp.json"},
+ },
+ {
+ Type: "TMDL",
+ API: "TMDL",
+ Paths: []string{"definition/database.tmdl", "definition/model.tmdl", "definition.pbism", "diagramLayp.json", "definition/tables/*.tmdl"},
+ },
+}
diff --git a/internal/services/semanticmodel/data_semantic_model.go b/internal/services/semanticmodel/data_semantic_model.go
index b0ed23bb..b67e03ed 100644
--- a/internal/services/semanticmodel/data_semantic_model.go
+++ b/internal/services/semanticmodel/data_semantic_model.go
@@ -18,9 +18,7 @@ func NewDataSourceSemanticModel() datasource.DataSource {
"Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
IsDisplayNameUnique: false,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
- DefinitionPathKeys: ItemDefinitionPathsTMSL,
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewDataSourceFabricItemDefinition(config)
diff --git a/internal/services/semanticmodel/data_semantic_model_test.go b/internal/services/semanticmodel/data_semantic_model_test.go
index 302f8865..0ea4792e 100644
--- a/internal/services/semanticmodel/data_semantic_model_test.go
+++ b/internal/services/semanticmodel/data_semantic_model_test.go
@@ -156,6 +156,7 @@ func TestAcc_SemanticModelDataSource(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"id": entityID,
+ "format": "TMSL",
"output_definition": true,
},
),
diff --git a/internal/services/semanticmodel/resource_semantic_model.go b/internal/services/semanticmodel/resource_semantic_model.go
index 8cfcfafa..df2a61ff 100644
--- a/internal/services/semanticmodel/resource_semantic_model.go
+++ b/internal/services/semanticmodel/resource_semantic_model.go
@@ -5,11 +5,15 @@ package semanticmodel
import (
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ fwvalidators "github.com/microsoft/terraform-provider-fabric/internal/framework/validators"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
)
func NewResourceSemanticModel() resource.Resource {
@@ -23,16 +27,27 @@ func NewResourceSemanticModel() resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPathsTMSL,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtLeast(2),
- mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPathsTMSL...)),
+ mapvalidator.KeysAre(
+ fwvalidators.PatternsIfAttributeIsOneOf(
+ path.MatchRoot("format"),
+ []attr.Value{types.StringValue("TMSL")},
+ fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "TMSL"),
+ "Definition path must match one of the following: "+utils.ConvertStringSlicesToString(fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "TMSL"), true, false),
+ ),
+ fwvalidators.PatternsIfAttributeIsOneOf(
+ path.MatchRoot("format"),
+ []attr.Value{types.StringValue("TMDL")},
+ fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "TMDL"),
+ "Definition path must match one of the following: "+utils.ConvertStringSlicesToString(fabricitem.GetDefinitionFormatPaths(itemDefinitionFormats, "TMDL"), true, false),
+ ),
+ ),
},
DefinitionRequired: true,
DefinitionEmpty: "",
+ DefinitionFormats: itemDefinitionFormats,
}
return fabricitem.NewResourceFabricItemDefinition(config)
diff --git a/internal/services/semanticmodel/resource_semantic_model_test.go b/internal/services/semanticmodel/resource_semantic_model_test.go
index 40af2ada..d35a51cb 100644
--- a/internal/services/semanticmodel/resource_semantic_model_test.go
+++ b/internal/services/semanticmodel/resource_semantic_model_test.go
@@ -64,6 +64,7 @@ func TestUnit_SemanticModelResource_Attributes(t *testing.T) {
map[string]any{
"workspace_id": "invalid uuid",
"display_name": "test",
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -80,6 +81,7 @@ func TestUnit_SemanticModelResource_Attributes(t *testing.T) {
"workspace_id": "00000000-0000-0000-0000-000000000000",
"display_name": "test",
"unexpected_attr": "test",
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -94,6 +96,7 @@ func TestUnit_SemanticModelResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"display_name": "test",
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -108,6 +111,7 @@ func TestUnit_SemanticModelResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"workspace_id": "00000000-0000-0000-0000-000000000000",
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -145,6 +149,7 @@ func TestUnit_SemanticModelResource_ImportState(t *testing.T) {
map[string]any{
"workspace_id": *entity.WorkspaceID,
"display_name": *entity.DisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
))
@@ -222,6 +227,7 @@ func TestUnit_SemanticModelResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityExist.WorkspaceID,
"display_name": *entityExist.DisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -237,6 +243,7 @@ func TestUnit_SemanticModelResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityBefore.DisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -256,6 +263,7 @@ func TestUnit_SemanticModelResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityAfter.DisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -286,6 +294,7 @@ func TestAcc_SemanticModelResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityCreateDisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
@@ -305,6 +314,7 @@ func TestAcc_SemanticModelResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityUpdateDisplayName,
+ "format": "TMSL",
"definition": testHelperDefinition,
},
)),
diff --git a/internal/services/sparkjobdefinition/base.go b/internal/services/sparkjobdefinition/base.go
index 5a9ed42a..9c4d024b 100644
--- a/internal/services/sparkjobdefinition/base.go
+++ b/internal/services/sparkjobdefinition/base.go
@@ -7,6 +7,7 @@ import (
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
"github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
const (
@@ -17,12 +18,14 @@ const (
ItemType = fabcore.ItemTypeSparkJobDefinition
ItemDocsSPNSupport = common.DocsSPNSupported
ItemDocsURL = "https://learn.microsoft.com/fabric/data-engineering/spark-job-definition"
- ItemFormatTypeDefault = "SparkJobDefinitionV1"
ItemDefinitionEmpty = `{"executableFile":null,"defaultLakehouseArtifactId":null,"mainClass":null,"additionalLakehouseIds":[],"retryPolicy":null,"commandLineArguments":null,"additionalLibraryUris":null,"language":null,"environmentArtifactId":null}`
ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/spark-job-definition"
)
-var (
- ItemFormatTypes = []string{"SparkJobDefinitionV1"} //nolint:gochecknoglobals
- ItemDefinitionPaths = []string{"SparkJobDefinitionV1.json"} //nolint:gochecknoglobals
-)
+var itemDefinitionFormats = []fabricitem.DefinitionFormat{ //nolint:gochecknoglobals
+ {
+ Type: "SparkJobDefinitionV1",
+ API: "SparkJobDefinitionV1",
+ Paths: []string{"SparkJobDefinitionV1.json"},
+ },
+}
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition.go b/internal/services/sparkjobdefinition/data_spark_job_definition.go
index 386396e5..9a382a9e 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definition.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definition.go
@@ -78,9 +78,7 @@ func NewDataSourceSparkJobDefinition() datasource.DataSource {
"Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
IsDisplayNameUnique: true,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
- DefinitionPathKeys: ItemDefinitionPaths,
+ DefinitionFormats: itemDefinitionFormats,
},
PropertiesAttributes: getDataSourceSparkJobDefinitionPropertiesAttributes(),
PropertiesSetter: propertiesSetter,
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
index cb6d1700..d2e952c0 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
@@ -227,6 +227,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"id": entityID,
+ "format": "SparkJobDefinitionV1",
"output_definition": true,
},
),
diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition.go b/internal/services/sparkjobdefinition/resource_spark_job_definition.go
index 3c22c21c..a6c1aad0 100644
--- a/internal/services/sparkjobdefinition/resource_spark_job_definition.go
+++ b/internal/services/sparkjobdefinition/resource_spark_job_definition.go
@@ -7,7 +7,6 @@ import (
"context"
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
@@ -61,16 +60,14 @@ func NewResourceSparkJobDefinition() resource.Resource {
ItemDocsSPNSupport,
DisplayNameMaxLength: 123,
DescriptionMaxLength: 256,
- FormatTypeDefault: ItemFormatTypeDefault,
- FormatTypes: ItemFormatTypes,
DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
- DefinitionPathKeys: ItemDefinitionPaths,
DefinitionPathKeysValidator: []validator.Map{
mapvalidator.SizeAtMost(1),
- mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)),
+ mapvalidator.KeysAre(fabricitem.DefinitionPathKeysValidator(itemDefinitionFormats)...),
},
DefinitionRequired: false,
DefinitionEmpty: ItemDefinitionEmpty,
+ DefinitionFormats: itemDefinitionFormats,
},
PropertiesAttributes: getResourceSparkJobDefinitionPropertiesAttributes(),
PropertiesSetter: propertiesSetter,
diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
index 057529af..75c7ff0e 100644
--- a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
+++ b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
@@ -58,6 +58,7 @@ func TestUnit_SparkJobDefinitionResource_Attributes(t *testing.T) {
map[string]any{
"workspace_id": "invalid uuid",
"display_name": "test",
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -74,6 +75,7 @@ func TestUnit_SparkJobDefinitionResource_Attributes(t *testing.T) {
"workspace_id": "00000000-0000-0000-0000-000000000000",
"display_name": "test",
"unexpected_attr": "test",
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -88,6 +90,7 @@ func TestUnit_SparkJobDefinitionResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"display_name": "test",
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -102,6 +105,7 @@ func TestUnit_SparkJobDefinitionResource_Attributes(t *testing.T) {
testResourceItemHeader,
map[string]any{
"workspace_id": "00000000-0000-0000-0000-000000000000",
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -125,6 +129,7 @@ func TestUnit_SparkJobDefinitionResource_ImportState(t *testing.T) {
map[string]any{
"workspace_id": *entity.WorkspaceID,
"display_name": *entity.DisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
))
@@ -202,6 +207,7 @@ func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityExist.WorkspaceID,
"display_name": *entityExist.DisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -217,6 +223,7 @@ func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityBefore.DisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -236,6 +243,7 @@ func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": *entityBefore.WorkspaceID,
"display_name": *entityAfter.DisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -267,6 +275,7 @@ func TestAcc_SparkJobDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityCreateDisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
@@ -287,6 +296,7 @@ func TestAcc_SparkJobDefinitionResource_CRUD(t *testing.T) {
map[string]any{
"workspace_id": workspaceID,
"display_name": entityUpdateDisplayName,
+ "format": "SparkJobDefinitionV1",
"definition": testHelperDefinition,
},
)),
diff --git a/internal/services/workspace/resource_workspace_git.go b/internal/services/workspace/resource_workspace_git.go
index ea1f2e35..2b1dc366 100644
--- a/internal/services/workspace/resource_workspace_git.go
+++ b/internal/services/workspace/resource_workspace_git.go
@@ -336,7 +336,7 @@ func (r *resourceWorkspaceGit) Read(ctx context.Context, req resource.ReadReques
return
}
- resp.Diagnostics.Append(resp.State.Set(ctx, &state)...)
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
tflog.Debug(ctx, "READ", map[string]any{
"action": "end",
diff --git a/internal/testhelp/fixtures/notebook/notebook.py.tmpl b/internal/testhelp/fixtures/notebook/notebook.py.tmpl
new file mode 100644
index 00000000..6e270f50
--- /dev/null
+++ b/internal/testhelp/fixtures/notebook/notebook.py.tmpl
@@ -0,0 +1,22 @@
+# Fabric notebook source
+
+# METADATA ********************
+
+# META {
+# META "kernel_info": {
+# META "name": "synapse_pyspark"
+# META },
+# META "dependencies": {}
+# META }
+
+# CELL ********************
+
+# Welcome to your new notebook
+# Type here in the cell editor to add code!
+
+# METADATA ********************
+
+# META {
+# META "language": "python",
+# META "language_group": "synapse_pyspark"
+# META }