Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/added-20260223-073505.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: added
body: Add export and import formats support for Semantic Model and Spark job definition
time: 2026-02-23T07:35:05.735812862Z
custom:
Author: ohadedry
AuthorLink: https://github.com/ohadedry
2 changes: 1 addition & 1 deletion docs/commands/fs/export.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fab export <path> -o <output_path> [-a] [--format <format>] [-f]
- `<path>`: Path to the item to export.
- `-o, --output <output_path>`: Output directory path.
- `-a, --all`: Export all items. Optional.
- `--format <format>`: Format of the export. Supported for items allowing multiple export formats, currently only Notebooks (`.ipynb`, `.py`). Optional.
- `--format <format>`: Format of the export. Supported for items allowing multiple export formats, currently only Notebooks (`.ipynb`, `.py`), Semantic Models (`TMDL`, `TMSL`) and Spark Job Definition (`SparkJobDefinitionV1`, `SparkJobDefinitionV2`). Optional.
- `-f, --force`: Force export without confirmation. Optional.

**Example:**
Expand Down
2 changes: 1 addition & 1 deletion docs/commands/fs/import.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fab import <path> -i <input_path> [--format <format>] [-f]

- `<path>`: Path to import to.
- `-i, --input <input_path>`: Input path.
- `--format <format>`: Format of the input. Supported only for Notebooks (`.ipynb`, `.py`). Optional.
- `--format <format>`: Format of the input. Supported only for Notebooks (`.ipynb`, `.py`), Semantic Models (`TMDL`, `TMSL`) and Spark Job Definition (`SparkJobDefinitionV1`, `SparkJobDefinitionV2`). Optional.
- `-f, --force`: Force import without confirmation. Optional.

**Example:**
Expand Down
3 changes: 0 additions & 3 deletions docs/examples/item_examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -345,9 +345,6 @@ Import an item definition from a local directory into the workspace.
fab import ws1.Workspace/nb1_imported.Notebook -i /tmp/exports/nb1.Notebook
```

**Supported Import Formats:** `.ipynb` (default) and `.py`.


### Start/Stop Mirrored Databases

#### Start Mirrored Database
Expand Down
30 changes: 21 additions & 9 deletions src/fabric_cli/commands/fs/impor/fab_fs_import_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from fabric_cli.client.fab_api_types import ApiResponse
from fabric_cli.core import fab_constant, fab_logger
from fabric_cli.core.fab_exceptions import FabricCLIError
from fabric_cli.core.fab_types import ItemType
from fabric_cli.core.fab_types import ItemType, definition_format_mapping
from fabric_cli.core.hiearchy.fab_hiearchy import Item
from fabric_cli.utils import fab_cmd_import_utils as utils_import
from fabric_cli.utils import fab_mem_store as utils_mem_store
Expand All @@ -20,10 +20,21 @@ def import_single_item(item: Item, args: Namespace) -> None:
_input_format = None
if args.format:
_input_format = args.format
if _input_format not in (".py", ".ipynb"):
if item.item_type in definition_format_mapping:
valid_formats = []
valid_formats = list(
definition_format_mapping[item.item_type].keys())
if _input_format not in valid_formats:
available_formats = [
k for k in valid_formats if k != "default"]
raise FabricCLIError(
f"Invalid format. Only {', '.join(available_formats)} are supported.",
fab_constant.ERROR_INVALID_INPUT,
)
else:
raise FabricCLIError(
"Invalid format. Only '.py' and '.ipynb' are supported.",
fab_constant.ERROR_INVALID_INPUT,
f"Import format not supported for item type '{item.item_type}'",
fab_constant.ERROR_NOT_SUPPORTED,
)

args.ws_id = item.workspace.id
Expand Down Expand Up @@ -64,11 +75,12 @@ def import_single_item(item: Item, args: Namespace) -> None:
else:
_import_update_item(args, payload)

utils_ui.print_output_format(args, message=f"'{item.name}' imported"
)
utils_ui.print_output_format(
args, message=f"'{item.name}' imported")
else:
# Create
utils_ui.print_grey(f"Importing '{_input_path}' → '{item.path}'...")
utils_ui.print_grey(
f"Importing '{_input_path}' → '{item.path}'...")

# Environment item type, not supporting definition yet
if item.item_type == ItemType.ENVIRONMENT:
Expand All @@ -77,8 +89,8 @@ def import_single_item(item: Item, args: Namespace) -> None:
response = _import_create_item(args, payload)

if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{item.name}' imported"
)
utils_ui.print_output_format(
args, message=f"'{item.name}' imported")
data = json.loads(response.text)
item._id = data["id"]

Expand Down
12 changes: 11 additions & 1 deletion src/fabric_cli/core/fab_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,6 +404,7 @@ class SQLDatabaseFolders(Enum):
FILES = "Files"
CODE = "Code"


class CosmosDBDatabaseFolders(Enum):
TABLES = "Tables"
FILES = "Files"
Expand Down Expand Up @@ -576,12 +577,21 @@ class MirroredDatabaseFolders(Enum):
# Item Payload definition

definition_format_mapping = {
ItemType.SPARK_JOB_DEFINITION: {"default": "?format=SparkJobDefinitionV1"},
ItemType.SPARK_JOB_DEFINITION: {
"default": "?format=SparkJobDefinitionV1",
"SparkJobDefinitionV1": "?format=SparkJobDefinitionV1",
"SparkJobDefinitionV2": "?format=SparkJobDefinitionV2",
},
ItemType.NOTEBOOK: {
"default": "?format=ipynb",
".py": "?format=fabricGitSource",
".ipynb": "?format=ipynb",
},
ItemType.SEMANTIC_MODEL: {
"default": "",
"TMDL": "?format=TMDL",
"TMSL": "?format=TMSL",
},
ItemType.COSMOS_DB_DATABASE: {"default": ""},
ItemType.USER_DATA_FUNCTION: {"default": ""},
ItemType.GRAPH_QUERY_SET: {"default": ""},
Expand Down
22 changes: 20 additions & 2 deletions src/fabric_cli/core/hiearchy/fab_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,11 @@ def get_payload(self, definition, input_format=None) -> dict:
"folderId": self.folder_id,
"displayName": self.short_name,
"definition": {
"format": "SparkJobDefinitionV1",
"format": (
"SparkJobDefinitionV1"
if input_format is None
else input_format
),
"parts": definition["parts"],
},
}
Expand All @@ -102,9 +106,23 @@ def get_payload(self, definition, input_format=None) -> dict:
)
},
}
case ItemType.SEMANTIC_MODEL:
return {
"type": str(self.item_type),
"description": "Imported from fab",
"folderId": self.folder_id,
"displayName": self.short_name,
"definition": (
definition
if input_format is None
else {
"format": input_format,
"parts": definition["parts"],
}
),
}
case (
ItemType.REPORT
| ItemType.SEMANTIC_MODEL
| ItemType.KQL_DASHBOARD
| ItemType.DATA_PIPELINE
| ItemType.KQL_QUERYSET
Expand Down
2 changes: 1 addition & 1 deletion src/fabric_cli/parsers/fab_fs_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,7 @@ def register_import_parser(subparsers: _SubParsersAction) -> None:
import_parser.add_argument(
"--format",
metavar="",
help="Input format. Optional, supported for notebooks (.ipynb, .py)",
help="Input format. Optional",
)
import_parser.add_argument(
"-f", "--force", required=False, action="store_true", help="Force. Optional"
Expand Down
32 changes: 20 additions & 12 deletions tests/test_commands/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,17 @@
ItemType.GRAPH_QUERY_SET
])

export_item_format_parameters = pytest.mark.parametrize("item_type,export_format,expected_file_extension", [
(ItemType.NOTEBOOK, ".py", ".py"),
(ItemType.NOTEBOOK, ".ipynb", ".ipynb")
])
export_item_format_parameters = pytest.mark.parametrize(
"item_type,export_format,expected_file_extensions,expected_folders",
[
(ItemType.NOTEBOOK, ".py", [".py"], []),
(ItemType.NOTEBOOK, ".ipynb", [".ipynb"], []),
(ItemType.SPARK_JOB_DEFINITION, "SparkJobDefinitionV1", [".json"], []),
(ItemType.SPARK_JOB_DEFINITION, "SparkJobDefinitionV2", [".json"], []),
(ItemType.SEMANTIC_MODEL, "TMDL", [".pbism"], ["definition"]),
(ItemType.SEMANTIC_MODEL, "TMSL", [".pbism", ".bim"], []),
],
)

export_item_default_format_parameters = pytest.mark.parametrize("item_type,expected_file_count", [
(ItemType.NOTEBOOK, 2), # Default format for notebook is ipynb
Expand All @@ -102,14 +109,15 @@
(ItemType.GRAPH_QUERY_SET, 2)
])

export_item_invalid_format_parameters = pytest.mark.parametrize("item_type,invalid_format,expected_error_suffix", [
# (ItemType.NOTEBOOK, ".txt", "Only the following formats are supported: .py, .ipynb"),
(ItemType.SPARK_JOB_DEFINITION, ".txt", "No formats are supported"),
(ItemType.DATA_PIPELINE, ".txt", "No formats are supported"),
(ItemType.MIRRORED_DATABASE, ".txt", "No formats are supported"),
(ItemType.COSMOS_DB_DATABASE, ".txt", "No formats are supported"),
(ItemType.USER_DATA_FUNCTION, ".txt", "No formats are supported"),
(ItemType.GRAPH_QUERY_SET, ".txt", "No formats are supported")
export_item_invalid_format_parameters = pytest.mark.parametrize("item_type,invalid_format", [
(ItemType.NOTEBOOK, ".txt"),
(ItemType.SPARK_JOB_DEFINITION, ".txt"),
(ItemType.SEMANTIC_MODEL, ".txt"),
(ItemType.DATA_PIPELINE, ".txt"),
(ItemType.MIRRORED_DATABASE, ".txt"),
(ItemType.COSMOS_DB_DATABASE, ".txt"),
(ItemType.USER_DATA_FUNCTION, ".txt"),
(ItemType.GRAPH_QUERY_SET, ".txt")
])

assign_entity_item_not_supported_failure_parameters = pytest.mark.parametrize("entity_type,factory_key,path_template", [
Expand Down
Loading