Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docker/compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ services:
- ~/.ssh:/root/.ssh:ro
networks:
- bag3d-network
extra_hosts:
- "3dbag.docker.internal:host-gateway"
depends_on:
data-postgresql:
condition: service_healthy
Expand Down
8 changes: 7 additions & 1 deletion makefile
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,12 @@ test_integration:
docker compose -p $(COMPOSE_PROJECT_NAME) exec bag3d-floors-estimation pytest /opt/3dbag-pipeline/packages/floors_estimation/tests/test_integration.py -v -s --run-all || FAILED=1; \
exit $$FAILED

test_deploy:
@set -e; \
FAILED=0; \
docker compose -p $(COMPOSE_PROJECT_NAME) exec bag3d-core pytest /opt/3dbag-pipeline/packages/core/tests/test_integration.py -v -s --run-all --run-deploy -k 'test_integration_deploy_release' || FAILED=1; \
exit $$FAILED

test_all:
@set -e; \
FAILED=0; \
Expand All @@ -107,7 +113,7 @@ include .env
download:
rm -rf $(BAG3D_TEST_DATA)
mkdir -p $(BAG3D_TEST_DATA)
cd $(BAG3D_TEST_DATA) ; curl -O https://data.3dbag.nl/testdata/pipeline/test_data_v12.zip ; unzip -q test_data_v12.zip ; rm test_data_v12.zip
cd $(BAG3D_TEST_DATA) ; curl -O https://data.3dbag.nl/testdata/pipeline/test_data_v13.zip ; unzip -q test_data_v13.zip ; rm test_data_v13.zip


install_uv:
Expand Down
50 changes: 47 additions & 3 deletions packages/common/src/bag3d/common/resources/server_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,58 @@
class ServerTransferResource(ConfigurableResource):
"""
A resource for transferring files to other servers.

Attributes:
host: Optional[str]
The hostname or IP address of the remote server.
port: Optional[int]
The port to connect to on the remote server.
user: Optional[str]
The username to use for authentication.
password: Optional[str]
The password to use for authentication (if not using key).
key_filename: Optional[str]
The path to the private key file for key-based authentication.
target_dir: Optional[str]
The default target directory on the remote server for file transfers.
public_dir: Optional[str]
The 3DBAG public directory on the remote server.
"""

host: Optional[str] = None
port: Optional[int] = None
user: Optional[str] = None
password: Optional[str] = None
key_filename: Optional[str] = None
target_dir: Optional[str] = None
public_dir: Optional[str] = None

@property
def connect(self):
conn = Connection(host=self.host, user=self.user)
return conn
def connection(self) -> Connection:
connect_kwargs = {}
if self.key_filename:
connect_kwargs["key_filename"] = self.key_filename
elif self.password:
connect_kwargs["password"] = self.password
return Connection(
host=self.host,
port=self.port,
user=self.user,
connect_kwargs=connect_kwargs,
)

def transfer_file(self, local_path, remote_path) -> bool:
"""Transfer a file to remote server."""
with self.connection as conn:
# Upload the file
conn.put(local_path, remote_path)

# Verify the file was uploaded
result = conn.run(f"test -f {remote_path}", warn=True, hide=True)
return result.ok

def file_exists(self, remote_path) -> bool:
"""Check if file exists on remote server."""
with self.connection as conn:
result = conn.run(f"test -f {remote_path}", warn=True, hide=True)
return result.ok
88 changes: 43 additions & 45 deletions packages/core/src/bag3d/core/assets/deploy/servers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,38 +8,39 @@

from bag3d.common.utils.database import load_sql
from bag3d.common.types import PostgresTableIdentifier
from bag3d.common.resources import ServerTransferResource
from bag3d.common.resources import ServerTransferResource, DatabaseResource
from dagster import get_dagster_logger


logger = get_dagster_logger("deploy")


@asset(
ins={"reconstruction_output_multitiles_nl": AssetIn(key_prefix="export")},
ins={"metadata": AssetIn(key_prefix="export")},
deps=[
AssetKey(("export", "geopackage_nl")),
AssetKey(("export", "export_index")),
AssetKey(("export", "metadata")),
AssetKey(("export", "compressed_tiles")),
AssetKey(("export", "compressed_tiles_validation")),
AssetKey(("export", "reconstruction_output_multitiles_nl")),
AssetKey(("export", "reconstruction_output_3dtiles_lod12_nl")),
AssetKey(("export", "reconstruction_output_3dtiles_lod13_nl")),
AssetKey(("export", "reconstruction_output_3dtiles_lod22_nl")),
],
required_resource_keys={"version"},
)
def compressed_export_nl(context, reconstruction_output_multitiles_nl):
def compressed_export_nl(context, metadata):
"""Create a compressed tar.gz archive containing the complete 3D BAG export.
The archive will be named `export_<version>.tar.gz`.

Args:
context: Dagster execution context
reconstruction_output_multitiles_nl: Path to the exported data directory
metadata: Path to the 3DBAG metadata file

Returns:
Output: Path to the created export_{version}.tar.gz file with size metadata
"""
export_dir = reconstruction_output_multitiles_nl
export_dir = metadata.parent
version = context.resources.version.version
output_tarfile = export_dir.parent / f"export_{version}.tar.gz"
with tarfile.open(output_tarfile, "w:gz") as tar:
Expand All @@ -56,7 +57,7 @@ def transfer_to_server(
compressed_export_nl: Path,
metadata: Path,
target_dir: str,
) -> str:
) -> tuple[Path, Path]:
"""Transfer and extract export file to a remote server.

Args:
Expand All @@ -66,7 +67,7 @@ def transfer_to_server(
target_dir: Base directory on remote server for deployment

Returns:
str: Path to the deployment directory on the remote server
(Path to the deployment directory on the remote server, Path to the compressed export on the remote server)

Raises:
AssertionError: If SSH commands fail during transfer or extraction
Expand All @@ -76,11 +77,11 @@ def transfer_to_server(
with metadata.open("r") as fo:
metadata_json = json.load(fo)
version = metadata_json["identificationInfo"]["citation"]["edition"]
deploy_dir = f"{target_dir}/{version}"
deploy_dir = Path(target_dir) / version
compressed_file = Path(target_dir) / compressed_export_nl.name

try:
with server.connect as c:
with server.connection as c:
# test connection
result = c.run("echo connected", hide=True)
assert result.ok, "Connection command failed"
Expand All @@ -106,50 +107,47 @@ def transfer_to_server(
except Exception as e:
logger.error(f"SSH connection failed: {e}")
raise
return deploy_dir
return deploy_dir, compressed_file


@asset(
ins={"metadata": AssetIn(key_prefix="export")},
required_resource_keys={"podzilla_server"},
)
def transfer_to_podzilla(
context,
compressed_export_nl: Path,
metadata: Path,
compressed_export_nl: Path, metadata: Path, podzilla_server: ServerTransferResource
):
"""Transfer the 3D BAG export to the podzilla server for API access."""
return transfer_to_server(
context.resources.podzilla_server,
podzilla_server,
compressed_export_nl,
metadata,
context.resources.podzilla_server.target_dir,
podzilla_server.target_dir,
)


@asset(
ins={"metadata": AssetIn(key_prefix="export")},
required_resource_keys={"godzilla_server"},
)
def transfer_to_godzilla(
context,
compressed_export_nl: Path,
metadata: Path,
compressed_export_nl: Path, metadata: Path, godzilla_server: ServerTransferResource
):
"""Transfer the 3D BAG export to the godzilla server for public downloads and webservices."""
return transfer_to_server(
context.resources.godzilla_server,
godzilla_server,
compressed_export_nl,
metadata,
context.resources.godzilla_server.target_dir,
godzilla_server.target_dir,
)


@asset(
deps={AssetKey(("transfer_to_godzilla"))},
required_resource_keys={"db_connection", "godzilla_server"},
)
def webservice_godzilla(context, transfer_to_godzilla):
def webservice_godzilla(
transfer_to_godzilla,
db_connection: DatabaseResource,
godzilla_server: ServerTransferResource,
):
"""
Load the layers for WFS, WMS to the database on Godzilla.
The layers will be loaded into the schema `webservice_dev` and
Expand All @@ -158,8 +156,8 @@ def webservice_godzilla(context, transfer_to_godzilla):
"""
schema = "webservice_dev"
sql = f"drop schema if exists {schema} cascade; create schema {schema};"
with context.resources.godzilla_server.connect as c:
context.log.debug(sql)
with godzilla_server.connection as c:
logger.debug(sql)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'"
)
Expand All @@ -185,10 +183,10 @@ def webservice_godzilla(context, transfer_to_godzilla):
layer + "_tmp",
]
)
with context.resources.godzilla_server.connect as c:
context.log.debug(cmd)
with godzilla_server.connection as c:
logger.debug(cmd)
r = c.run(cmd)
context.log.debug(r.stdout)
logger.debug(r.stdout)

pand_table = PostgresTableIdentifier(schema, "pand_tmp")
lod12_2d_tmp = PostgresTableIdentifier(schema, "lod12_2d_tmp")
Expand All @@ -211,9 +209,9 @@ def webservice_godzilla(context, transfer_to_godzilla):
"lod22_2d": lod22_2d,
},
)
sql = context.resources.db_connection.connect.print_query(sql)
with context.resources.godzilla_server.connect as c:
context.log.debug(sql)
sql = db_connection.connect.print_query(sql)
with godzilla_server.connection as c:
logger.debug(sql)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'"
)
Expand All @@ -230,15 +228,15 @@ def webservice_godzilla(context, transfer_to_godzilla):
"validate_compressed_files": validate_compressed_files,
},
)
sql = context.resources.db_connection.connect.print_query(sql)
with context.resources.godzilla_server.connect as c:
context.log.debug(sql)
sql = db_connection.connect.print_query(sql)
with godzilla_server.connection as c:
logger.debug(sql)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'"
)

# Load the CSV files into the intermediary tables
with context.resources.godzilla_server.connect as c:
with godzilla_server.connection as c:
filepath = f"{deploy_dir}/export_index.csv"
copy_cmd = (
"\copy "
Expand All @@ -247,7 +245,7 @@ def webservice_godzilla(context, transfer_to_godzilla):
+ filepath
+ "' DELIMITER ',' CSV HEADER "
)
context.log.debug(f"{copy_cmd}")
logger.debug(f"{copy_cmd}")
c.run(
rf'psql --dbname baseregisters --port 5432 --host localhost --user etl -c "{copy_cmd}" '
)
Expand All @@ -259,7 +257,7 @@ def webservice_godzilla(context, transfer_to_godzilla):
+ filepath
+ "' DELIMITER ',' CSV HEADER "
)
context.log.debug(f"{copy_cmd}")
logger.debug(f"{copy_cmd}")
c.run(
rf'psql --dbname baseregisters --port 5432 --host localhost --user etl -c "{copy_cmd}" '
)
Expand All @@ -274,22 +272,22 @@ def webservice_godzilla(context, transfer_to_godzilla):
"validate_compressed_files": validate_compressed_files,
},
)
sql = context.resources.db_connection.connect.print_query(sql)
with context.resources.godzilla_server.connect as c:
context.log.debug(sql)
sql = db_connection.connect.print_query(sql)
with godzilla_server.connection as c:
logger.debug(sql)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'"
)

grant_usage = f"GRANT USAGE ON SCHEMA {schema} TO bag_geoserver;"
grant_select = f"GRANT SELECT ON ALL TABLES IN SCHEMA {schema} TO bag_geoserver;"

with context.resources.godzilla_server.connect as c:
context.log.debug(grant_usage)
with godzilla_server.connection as c:
logger.debug(grant_usage)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{grant_usage}'"
)
context.log.debug(grant_select)
logger.debug(grant_select)
c.run(
f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{grant_select}'"
)
Expand Down
Loading
Loading