Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 9 additions & 8 deletions src/fabric_cicd/_common/_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,19 +136,20 @@ def exception_handler(exception_type: type[BaseException], exception: BaseExcept
sys.__excepthook__(exception_type, exception, traceback)


def print_header(message: str) -> None:
def log_header(logger: logging.Logger, message: str) -> None:
"""
Prints a header message with a decorative line above and below it.
Logs a header message with a decorative line above and below it.

Args:
message: The header message to print.
logger: The logger to use for logging the header message.
message: The header message to log.
"""
line_separator = "#" * 100
formatted_message = f"########## {message}"
formatted_message = f"{formatted_message} {line_separator[len(formatted_message) + 1 :]}"

print() # Print a blank line before the header
print(f"{Fore.GREEN}{Style.BRIGHT}{line_separator}{Style.RESET_ALL}")
print(f"{Fore.GREEN}{Style.BRIGHT}{formatted_message}{Style.RESET_ALL}")
print(f"{Fore.GREEN}{Style.BRIGHT}{line_separator}{Style.RESET_ALL}")
print()
logger.info("") # Log a blank line before the header
logger.info(f"{Fore.GREEN}{Style.BRIGHT}{line_separator}{Style.RESET_ALL}")
logger.info(f"{Fore.GREEN}{Style.BRIGHT}{formatted_message}{Style.RESET_ALL}")
logger.info(f"{Fore.GREEN}{Style.BRIGHT}{line_separator}{Style.RESET_ALL}")
logger.info("")
6 changes: 3 additions & 3 deletions src/fabric_cicd/fabric_workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from fabric_cicd._common._exceptions import FailedPublishedItemStatusError, InputError, ParameterFileError, ParsingError
from fabric_cicd._common._fabric_endpoint import FabricEndpoint, _generate_fabric_credential, _is_fabric_runtime
from fabric_cicd._common._item import Item
from fabric_cicd._common._logging import print_header
from fabric_cicd._common._logging import log_header

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -241,7 +241,7 @@ def _refresh_parameter_file(self) -> None:
"""Load parameters if file is present."""
from fabric_cicd._parameter._parameter import Parameter

print_header("Validating Parameter File")
log_header(logger, "Validating Parameter File")

# Initialize the parameter dict and Parameter object
self.environment_parameter = {}
Expand Down Expand Up @@ -826,7 +826,7 @@ def _publish_folders(self) -> None:
"""Publishes all folders from the repository."""
# Sort folders by the number of '/' in their paths (ascending order)
sorted_folders = sorted(self.repository_folders.keys(), key=lambda path: path.count("/"))
print_header("Publishing Workspace Folders")
log_header(logger, "Publishing Workspace Folders")
logger.info("Publishing Workspace Folders")
for folder_path in sorted_folders:
if folder_path in self.deployed_folders:
Expand Down
58 changes: 29 additions & 29 deletions src/fabric_cicd/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
load_config_file,
)
from fabric_cicd._common._exceptions import FailedPublishedItemStatusError, InputError
from fabric_cicd._common._logging import print_header
from fabric_cicd._common._logging import log_header
from fabric_cicd._common._validate_input import (
validate_environment,
validate_fabric_workspace_obj,
Expand Down Expand Up @@ -217,84 +217,84 @@ def _should_publish_item_type(item_type: str) -> bool:
)

if _should_publish_item_type("VariableLibrary"):
print_header("Publishing Variable Libraries")
log_header(logger, "Publishing Variable Libraries")
items.publish_variablelibraries(fabric_workspace_obj)
if _should_publish_item_type("Warehouse"):
print_header("Publishing Warehouses")
log_header(logger, "Publishing Warehouses")
items.publish_warehouses(fabric_workspace_obj)
if _should_publish_item_type("MirroredDatabase"):
print_header("Publishing Mirrored Databases")
log_header(logger, "Publishing Mirrored Databases")
items.publish_mirroreddatabase(fabric_workspace_obj)
if _should_publish_item_type("Lakehouse"):
print_header("Publishing Lakehouses")
log_header(logger, "Publishing Lakehouses")
items.publish_lakehouses(fabric_workspace_obj)
if _should_publish_item_type("SQLDatabase"):
print_header("Publishing SQL Databases")
log_header(logger, "Publishing SQL Databases")
items.publish_sqldatabases(fabric_workspace_obj)
if _should_publish_item_type("Environment"):
print_header("Publishing Environments")
log_header(logger, "Publishing Environments")
items.publish_environments(fabric_workspace_obj)
if _should_publish_item_type("UserDataFunction"):
print_header("Publishing User Data Functions")
log_header(logger, "Publishing User Data Functions")
items.publish_userdatafunctions(fabric_workspace_obj)
if _should_publish_item_type("Eventhouse"):
print_header("Publishing Eventhouses")
log_header(logger, "Publishing Eventhouses")
items.publish_eventhouses(fabric_workspace_obj)
if _should_publish_item_type("SparkJobDefinition"):
print_header("Publishing Spark Job Definitions")
log_header(logger, "Publishing Spark Job Definitions")
items.publish_sparkjobdefinitions(fabric_workspace_obj)
if _should_publish_item_type("Notebook"):
print_header("Publishing Notebooks")
log_header(logger, "Publishing Notebooks")
items.publish_notebooks(fabric_workspace_obj)
if _should_publish_item_type("SemanticModel"):
print_header("Publishing Semantic Models")
log_header(logger, "Publishing Semantic Models")
items.publish_semanticmodels(fabric_workspace_obj)
if _should_publish_item_type("Report"):
print_header("Publishing Reports")
log_header(logger, "Publishing Reports")
items.publish_reports(fabric_workspace_obj)
if _should_publish_item_type("CopyJob"):
print_header("Publishing Copy Jobs")
log_header(logger, "Publishing Copy Jobs")
items.publish_copyjobs(fabric_workspace_obj)
if _should_publish_item_type("KQLDatabase"):
print_header("Publishing KQL Databases")
log_header(logger, "Publishing KQL Databases")
items.publish_kqldatabases(fabric_workspace_obj)
if _should_publish_item_type("KQLQueryset"):
print_header("Publishing KQL Querysets")
log_header(logger, "Publishing KQL Querysets")
items.publish_kqlquerysets(fabric_workspace_obj)
if _should_publish_item_type("Reflex"):
print_header("Publishing Activators")
log_header(logger, "Publishing Activators")
items.publish_activators(fabric_workspace_obj)
if _should_publish_item_type("Eventstream"):
print_header("Publishing Eventstreams")
log_header(logger, "Publishing Eventstreams")
items.publish_eventstreams(fabric_workspace_obj)
if _should_publish_item_type("KQLDashboard"):
print_header("Publishing KQL Dashboards")
log_header(logger, "Publishing KQL Dashboards")
items.publish_kqldashboard(fabric_workspace_obj)
if _should_publish_item_type("Dataflow"):
print_header("Publishing Dataflows")
log_header(logger, "Publishing Dataflows")
items.publish_dataflows(fabric_workspace_obj)
if _should_publish_item_type("DataPipeline"):
print_header("Publishing Data Pipelines")
log_header(logger, "Publishing Data Pipelines")
items.publish_datapipelines(fabric_workspace_obj)
if _should_publish_item_type("GraphQLApi"):
print_header("Publishing GraphQL APIs")
log_header(logger, "Publishing GraphQL APIs")
items.publish_graphqlapis(fabric_workspace_obj)
if _should_publish_item_type("ApacheAirflowJob"):
print_header("Publishing Apache Airflow Jobs")
log_header(logger, "Publishing Apache Airflow Jobs")
items.publish_apacheairflowjobs(fabric_workspace_obj)
if _should_publish_item_type("MountedDataFactory"):
print_header("Publishing Mounted Data Factories")
log_header(logger, "Publishing Mounted Data Factories")
items.publish_mounteddatafactories(fabric_workspace_obj)
if _should_publish_item_type("DataAgent"):
print_header("Publishing Data Agents")
log_header(logger, "Publishing Data Agents")
items.publish_dataagents(fabric_workspace_obj)
if _should_publish_item_type("MLExperiment"):
print_header("Publishing ML Experiments")
log_header(logger, "Publishing ML Experiments")
items.publish_mlexperiments(fabric_workspace_obj)

# Check Environment Publish
if _should_publish_item_type("Environment"):
print_header("Checking Environment Publish State")
log_header("Checking Environment Publish State")
items.check_environment_publish_state(fabric_workspace_obj)

# Return response data if feature flag is enabled and responses were collected
Expand Down Expand Up @@ -364,7 +364,7 @@ def unpublish_all_orphan_items(

fabric_workspace_obj._refresh_deployed_items()
fabric_workspace_obj._refresh_repository_items()
print_header("Unpublishing Orphaned Items")
log_header("Unpublishing Orphaned Items")

if items_to_include:
if (
Expand Down Expand Up @@ -525,7 +525,7 @@ def deploy_with_config(
msg = "Config file-based deployment is currently an experimental feature. Both 'enable_experimental_features' and 'enable_config_deploy' feature flags must be set."
raise InputError(msg, logger)

print_header("Config-Based Deployment")
log_header("Config-Based Deployment")
logger.info(f"Loading configuration from {config_file_path} for environment '{environment}'")

# Validate environment
Expand Down
Loading