diff --git a/docker/compose.yaml b/docker/compose.yaml index f9f00c30..622e76d3 100644 --- a/docker/compose.yaml +++ b/docker/compose.yaml @@ -88,6 +88,8 @@ services: - ~/.ssh:/root/.ssh:ro networks: - bag3d-network + extra_hosts: + - "3dbag.docker.internal:host-gateway" depends_on: data-postgresql: condition: service_healthy diff --git a/makefile b/makefile index b4c69d53..c4cca2a6 100644 --- a/makefile +++ b/makefile @@ -93,6 +93,12 @@ test_integration: docker compose -p $(COMPOSE_PROJECT_NAME) exec bag3d-floors-estimation pytest /opt/3dbag-pipeline/packages/floors_estimation/tests/test_integration.py -v -s --run-all || FAILED=1; \ exit $$FAILED +test_deploy: + @set -e; \ + FAILED=0; \ + docker compose -p $(COMPOSE_PROJECT_NAME) exec bag3d-core pytest /opt/3dbag-pipeline/packages/core/tests/test_integration.py -v -s --run-all --run-deploy -k 'test_integration_deploy_release' || FAILED=1; \ + exit $$FAILED + test_all: @set -e; \ FAILED=0; \ @@ -107,7 +113,7 @@ include .env download: rm -rf $(BAG3D_TEST_DATA) mkdir -p $(BAG3D_TEST_DATA) - cd $(BAG3D_TEST_DATA) ; curl -O https://data.3dbag.nl/testdata/pipeline/test_data_v12.zip ; unzip -q test_data_v12.zip ; rm test_data_v12.zip + cd $(BAG3D_TEST_DATA) ; curl -O https://data.3dbag.nl/testdata/pipeline/test_data_v13.zip ; unzip -q test_data_v13.zip ; rm test_data_v13.zip install_uv: diff --git a/packages/common/src/bag3d/common/resources/server_transfer.py b/packages/common/src/bag3d/common/resources/server_transfer.py index 277db6ac..35f65c54 100644 --- a/packages/common/src/bag3d/common/resources/server_transfer.py +++ b/packages/common/src/bag3d/common/resources/server_transfer.py @@ -8,14 +8,58 @@ class ServerTransferResource(ConfigurableResource): """ A resource for transferring files to other servers. + + Attributes: + host: Optional[str] + The hostname or IP address of the remote server. + port: Optional[int] + The port to connect to on the remote server. + user: Optional[str] + The username to use for authentication. + password: Optional[str] + The password to use for authentication (if not using key). + key_filename: Optional[str] + The path to the private key file for key-based authentication. + target_dir: Optional[str] + The default target directory on the remote server for file transfers. + public_dir: Optional[str] + The 3DBAG public directory on the remote server. """ host: Optional[str] = None + port: Optional[int] = None user: Optional[str] = None + password: Optional[str] = None + key_filename: Optional[str] = None target_dir: Optional[str] = None public_dir: Optional[str] = None @property - def connect(self): - conn = Connection(host=self.host, user=self.user) - return conn + def connection(self) -> Connection: + connect_kwargs = {} + if self.key_filename: + connect_kwargs["key_filename"] = self.key_filename + elif self.password: + connect_kwargs["password"] = self.password + return Connection( + host=self.host, + port=self.port, + user=self.user, + connect_kwargs=connect_kwargs, + ) + + def transfer_file(self, local_path, remote_path) -> bool: + """Transfer a file to remote server.""" + with self.connection as conn: + # Upload the file + conn.put(local_path, remote_path) + + # Verify the file was uploaded + result = conn.run(f"test -f {remote_path}", warn=True, hide=True) + return result.ok + + def file_exists(self, remote_path) -> bool: + """Check if file exists on remote server.""" + with self.connection as conn: + result = conn.run(f"test -f {remote_path}", warn=True, hide=True) + return result.ok diff --git a/packages/core/src/bag3d/core/assets/deploy/servers.py b/packages/core/src/bag3d/core/assets/deploy/servers.py index d143e210..5c7d46b4 100644 --- a/packages/core/src/bag3d/core/assets/deploy/servers.py +++ b/packages/core/src/bag3d/core/assets/deploy/servers.py @@ -8,7 +8,7 @@ from bag3d.common.utils.database import load_sql from bag3d.common.types import PostgresTableIdentifier -from bag3d.common.resources import ServerTransferResource +from bag3d.common.resources import ServerTransferResource, DatabaseResource from dagster import get_dagster_logger @@ -16,30 +16,31 @@ @asset( - ins={"reconstruction_output_multitiles_nl": AssetIn(key_prefix="export")}, + ins={"metadata": AssetIn(key_prefix="export")}, deps=[ AssetKey(("export", "geopackage_nl")), AssetKey(("export", "export_index")), - AssetKey(("export", "metadata")), AssetKey(("export", "compressed_tiles")), AssetKey(("export", "compressed_tiles_validation")), + AssetKey(("export", "reconstruction_output_multitiles_nl")), AssetKey(("export", "reconstruction_output_3dtiles_lod12_nl")), AssetKey(("export", "reconstruction_output_3dtiles_lod13_nl")), AssetKey(("export", "reconstruction_output_3dtiles_lod22_nl")), ], required_resource_keys={"version"}, ) -def compressed_export_nl(context, reconstruction_output_multitiles_nl): +def compressed_export_nl(context, metadata): """Create a compressed tar.gz archive containing the complete 3D BAG export. The archive will be named `export_.tar.gz`. + Args: context: Dagster execution context - reconstruction_output_multitiles_nl: Path to the exported data directory + metadata: Path to the 3DBAG metadata file Returns: Output: Path to the created export_{version}.tar.gz file with size metadata """ - export_dir = reconstruction_output_multitiles_nl + export_dir = metadata.parent version = context.resources.version.version output_tarfile = export_dir.parent / f"export_{version}.tar.gz" with tarfile.open(output_tarfile, "w:gz") as tar: @@ -56,7 +57,7 @@ def transfer_to_server( compressed_export_nl: Path, metadata: Path, target_dir: str, -) -> str: +) -> tuple[Path, Path]: """Transfer and extract export file to a remote server. Args: @@ -66,7 +67,7 @@ def transfer_to_server( target_dir: Base directory on remote server for deployment Returns: - str: Path to the deployment directory on the remote server + (Path to the deployment directory on the remote server, Path to the compressed export on the remote server) Raises: AssertionError: If SSH commands fail during transfer or extraction @@ -76,11 +77,11 @@ def transfer_to_server( with metadata.open("r") as fo: metadata_json = json.load(fo) version = metadata_json["identificationInfo"]["citation"]["edition"] - deploy_dir = f"{target_dir}/{version}" + deploy_dir = Path(target_dir) / version compressed_file = Path(target_dir) / compressed_export_nl.name try: - with server.connect as c: + with server.connection as c: # test connection result = c.run("echo connected", hide=True) assert result.ok, "Connection command failed" @@ -106,50 +107,47 @@ def transfer_to_server( except Exception as e: logger.error(f"SSH connection failed: {e}") raise - return deploy_dir + return deploy_dir, compressed_file @asset( ins={"metadata": AssetIn(key_prefix="export")}, - required_resource_keys={"podzilla_server"}, ) def transfer_to_podzilla( - context, - compressed_export_nl: Path, - metadata: Path, + compressed_export_nl: Path, metadata: Path, podzilla_server: ServerTransferResource ): """Transfer the 3D BAG export to the podzilla server for API access.""" return transfer_to_server( - context.resources.podzilla_server, + podzilla_server, compressed_export_nl, metadata, - context.resources.podzilla_server.target_dir, + podzilla_server.target_dir, ) @asset( ins={"metadata": AssetIn(key_prefix="export")}, - required_resource_keys={"godzilla_server"}, ) def transfer_to_godzilla( - context, - compressed_export_nl: Path, - metadata: Path, + compressed_export_nl: Path, metadata: Path, godzilla_server: ServerTransferResource ): """Transfer the 3D BAG export to the godzilla server for public downloads and webservices.""" return transfer_to_server( - context.resources.godzilla_server, + godzilla_server, compressed_export_nl, metadata, - context.resources.godzilla_server.target_dir, + godzilla_server.target_dir, ) @asset( deps={AssetKey(("transfer_to_godzilla"))}, - required_resource_keys={"db_connection", "godzilla_server"}, ) -def webservice_godzilla(context, transfer_to_godzilla): +def webservice_godzilla( + transfer_to_godzilla, + db_connection: DatabaseResource, + godzilla_server: ServerTransferResource, +): """ Load the layers for WFS, WMS to the database on Godzilla. The layers will be loaded into the schema `webservice_dev` and @@ -158,8 +156,8 @@ def webservice_godzilla(context, transfer_to_godzilla): """ schema = "webservice_dev" sql = f"drop schema if exists {schema} cascade; create schema {schema};" - with context.resources.godzilla_server.connect as c: - context.log.debug(sql) + with godzilla_server.connection as c: + logger.debug(sql) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'" ) @@ -185,10 +183,10 @@ def webservice_godzilla(context, transfer_to_godzilla): layer + "_tmp", ] ) - with context.resources.godzilla_server.connect as c: - context.log.debug(cmd) + with godzilla_server.connection as c: + logger.debug(cmd) r = c.run(cmd) - context.log.debug(r.stdout) + logger.debug(r.stdout) pand_table = PostgresTableIdentifier(schema, "pand_tmp") lod12_2d_tmp = PostgresTableIdentifier(schema, "lod12_2d_tmp") @@ -211,9 +209,9 @@ def webservice_godzilla(context, transfer_to_godzilla): "lod22_2d": lod22_2d, }, ) - sql = context.resources.db_connection.connect.print_query(sql) - with context.resources.godzilla_server.connect as c: - context.log.debug(sql) + sql = db_connection.connect.print_query(sql) + with godzilla_server.connection as c: + logger.debug(sql) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'" ) @@ -230,15 +228,15 @@ def webservice_godzilla(context, transfer_to_godzilla): "validate_compressed_files": validate_compressed_files, }, ) - sql = context.resources.db_connection.connect.print_query(sql) - with context.resources.godzilla_server.connect as c: - context.log.debug(sql) + sql = db_connection.connect.print_query(sql) + with godzilla_server.connection as c: + logger.debug(sql) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'" ) # Load the CSV files into the intermediary tables - with context.resources.godzilla_server.connect as c: + with godzilla_server.connection as c: filepath = f"{deploy_dir}/export_index.csv" copy_cmd = ( "\copy " @@ -247,7 +245,7 @@ def webservice_godzilla(context, transfer_to_godzilla): + filepath + "' DELIMITER ',' CSV HEADER " ) - context.log.debug(f"{copy_cmd}") + logger.debug(f"{copy_cmd}") c.run( rf'psql --dbname baseregisters --port 5432 --host localhost --user etl -c "{copy_cmd}" ' ) @@ -259,7 +257,7 @@ def webservice_godzilla(context, transfer_to_godzilla): + filepath + "' DELIMITER ',' CSV HEADER " ) - context.log.debug(f"{copy_cmd}") + logger.debug(f"{copy_cmd}") c.run( rf'psql --dbname baseregisters --port 5432 --host localhost --user etl -c "{copy_cmd}" ' ) @@ -274,9 +272,9 @@ def webservice_godzilla(context, transfer_to_godzilla): "validate_compressed_files": validate_compressed_files, }, ) - sql = context.resources.db_connection.connect.print_query(sql) - with context.resources.godzilla_server.connect as c: - context.log.debug(sql) + sql = db_connection.connect.print_query(sql) + with godzilla_server.connection as c: + logger.debug(sql) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{sql}'" ) @@ -284,12 +282,12 @@ def webservice_godzilla(context, transfer_to_godzilla): grant_usage = f"GRANT USAGE ON SCHEMA {schema} TO bag_geoserver;" grant_select = f"GRANT SELECT ON ALL TABLES IN SCHEMA {schema} TO bag_geoserver;" - with context.resources.godzilla_server.connect as c: - context.log.debug(grant_usage) + with godzilla_server.connection as c: + logger.debug(grant_usage) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{grant_usage}'" ) - context.log.debug(grant_select) + logger.debug(grant_select) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{grant_select}'" ) diff --git a/packages/core/src/bag3d/core/assets/export/archive.py b/packages/core/src/bag3d/core/assets/export/archive.py index 4092247d..4b810fe9 100644 --- a/packages/core/src/bag3d/core/assets/export/archive.py +++ b/packages/core/src/bag3d/core/assets/export/archive.py @@ -132,35 +132,6 @@ def create_path_layer(id_layer, path_tiles_dir): return path_lod12_2d -class CompressionConfig(Config): - concurrency: int - - -@asset( - deps={ - AssetKey("geopackage_nl"), - }, - required_resource_keys={"file_store", "version"}, -) -def compressed_tiles(context, config: CompressionConfig, export_index): - """Each format is gzipped individually in each tile, for better transfer over the - web. The OBJ files are collected into a single .zip file.""" - path_export_dir = bag3d_export_dir( - context.resources.file_store.file_store.data_dir, - version=context.resources.version.version, - ) - path_tiles_dir = path_export_dir.joinpath("tiles") - logger.info(f"Compressing files in {path_tiles_dir}") - with export_index.open("r") as fo: - csvreader = csv.reader(fo) - _ = next(csvreader) # skip header - tile_ids = tuple((row[0], path_tiles_dir) for row in csvreader) - - with ProcessPoolExecutor(max_workers=config.concurrency) as executor: - for result in executor.map(compress_files, tile_ids): - pass - - def compress_files(input): tile_id, path_tiles_dir = input logger.debug(f"Compressing tile {tile_id}") @@ -197,3 +168,31 @@ def compress_files(input): gpkg_file.unlink() else: logger.warning(f"GPKG file {gpkg_file} does not exist, skipping compression.") + + +class CompressionConfig(Config): + concurrency: int + + +@asset( + deps={ + AssetKey("geopackage_nl"), + }, + required_resource_keys={"file_store", "version"}, +) +def compressed_tiles(context, config: CompressionConfig, export_index): + """Each format is gzipped individually in each tile, for better transfer over the + web. The OBJ files are collected into a single .zip file.""" + path_export_dir = bag3d_export_dir( + context.resources.file_store.file_store.data_dir, + version=context.resources.version.version, + ) + path_tiles_dir = path_export_dir.joinpath("tiles") + with export_index.open("r") as fo: + csvreader = csv.reader(fo) + _ = next(csvreader) # skip header + tile_ids = tuple((row[0], path_tiles_dir) for row in csvreader) + + with ProcessPoolExecutor(max_workers=config.concurrency) as executor: + for result in executor.map(compress_files, tile_ids): + pass diff --git a/packages/core/src/bag3d/core/assets/export/metadata.py b/packages/core/src/bag3d/core/assets/export/metadata.py index 242ee566..6d99269b 100644 --- a/packages/core/src/bag3d/core/assets/export/metadata.py +++ b/packages/core/src/bag3d/core/assets/export/metadata.py @@ -161,7 +161,7 @@ def feature_evaluation(context): deps={AssetKey(("export", "reconstruction_output_multitiles_nl"))}, required_resource_keys={"file_store", "version"}, ) -def export_index(context): +def export_index(context) -> Path: """Index of the distribution tiles. Parses the quadtree.tsv file output by *tyler* and checks if all formats exist for diff --git a/packages/core/src/bag3d/core/assets/release/publish.py b/packages/core/src/bag3d/core/assets/release/publish.py index dfdece0e..cacc282e 100644 --- a/packages/core/src/bag3d/core/assets/release/publish.py +++ b/packages/core/src/bag3d/core/assets/release/publish.py @@ -13,31 +13,28 @@ @asset( - deps={AssetKey(("deploy", "transfer_to_godzilla"))}, ins={ "metadata": AssetIn(key_prefix="export"), - "compressed_export_nl": AssetIn(key_prefix="deploy"), + "transfer_to_godzilla": AssetIn(key_prefix="deploy"), }, required_resource_keys={"godzilla_server"}, ) def publish_data( context, - compressed_export_nl: Path, + transfer_to_godzilla: tuple[Path, Path], metadata: Path, ): """On godzilla, create symlink to the 'export' to the current version and add the current version to the tar.gz archive. """ - data_dir: str = context.resources.godzilla_server.target_dir - public_dir: str = context.resources.godzilla_server.target_dir + public_dir: str = context.resources.godzilla_server.public_dir + deploy_dir, compressed_file = transfer_to_godzilla with metadata.open("r") as fo: metadata_json = json.load(fo) version = metadata_json["identificationInfo"]["citation"]["edition"] - deploy_dir = f"{data_dir}/{version}" - compressed_file = Path(data_dir) / compressed_export_nl.name try: - with context.resources.godzilla_server.connect as c: + with context.resources.godzilla_server.connection as c: # test connection result = c.run("echo connected", hide=True) assert result.ok, "Connection command failed" @@ -56,6 +53,14 @@ def publish_data( result = c.run(f"ln -s {deploy_dir} {public_dir}/{version_nopoints}") assert result.ok, "Creating symlink failed" + logger.debug( + f"Setting published version {version_nopoints} to latest version" + ) + result = c.run(f"rm -f {public_dir}/latest") + assert result.ok, "Removing public/latest symlink failed" + result = c.run(f"ln -s {public_dir}/{version_nopoints} {public_dir}/latest") + assert result.ok, "Setting latest version failed" + logger.debug(f"Removing compressed file {compressed_file}") result = c.run(f"rm {compressed_file}") assert result.ok, "Removing compressed file failed" @@ -84,7 +89,7 @@ def publish_webservices(context): alter_dev_to_latest = f"ALTER SCHEMA {dev_schema} RENAME TO {latest_schema};" try: - with context.resources.godzilla_server.connect as c: + with context.resources.godzilla_server.connection as c: context.log.debug(alter_latest_to_archive) c.run( f"psql --dbname baseregisters --port 5432 --host localhost --user etl -c '{alter_latest_to_archive}'" diff --git a/packages/core/tests/conftest.py b/packages/core/tests/conftest.py index 801f196a..a99c07e8 100644 --- a/packages/core/tests/conftest.py +++ b/packages/core/tests/conftest.py @@ -27,21 +27,45 @@ @pytest.fixture(scope="session") -def godzilla_server(): - yield ServerTransferResource( - host="godzilla", - user="", - target_dir="/tmp", - public_dir="/tmp/3dbag_public", +def deployment_server(): + """Connection to the dockerized deployment setup. + The dockerized deployment setup is in the 3dbag-admin repo and it needs to be + managed manually, similar to the 3dbag-pipeline docker setup. + These credentials provide access to the ``deployment-server`` service of the + deployment setup. + """ + server = ServerTransferResource( + host="3dbag.docker.internal", + port=2222, + user="deploy", + password="deploy", + target_dir="/data/3DBAG", + public_dir="/data/3DBAG/public", ) + yield server + # + # with server.connection as conn: + # conn.run(f"rm -rf {server.target_dir}") + # conn.run(f"rm -rf {server.public_dir}") + # conn.run(f"mkdir -p {server.target_dir}") + # conn.run(f"mkdir -p {server.public_dir}") + + +@pytest.fixture(scope="session") +def godzilla_server(deployment_server): + yield deployment_server + @pytest.fixture(scope="session") def podzilla_server(): yield ServerTransferResource( - host="podzilla", - user="gstavropoulou", + host="3dbag.docker.internal", + port=2222, + user="deploy", + password="deploy", target_dir="/tmp", + public_dir="/tmp/podzilla_public", ) @@ -190,6 +214,12 @@ def pytest_addoption(parser): parser.addoption( "--run-slow", action="store_true", default=False, help="run slow tests" ) + parser.addoption( + "--run-deploy", + action="store_true", + default=False, + help="run deployment tests that require the dockerized deployment setup", + ) parser.addoption( "--run-all", action="store_true", @@ -203,6 +233,9 @@ def pytest_configure(config): config.addinivalue_line( "markers", "needs_tools: mark test as needing local builds of tools" ) + config.addinivalue_line( + "markers", "needs_deploy: mark test as needing the dockerized deployment setup" + ) def pytest_collection_modifyitems(config, items): @@ -218,6 +251,14 @@ def pytest_collection_modifyitems(config, items): if "needs_tools" in item.keywords: item.add_marker(skip_needs_tools) + if not config.getoption("--run-deploy"): # pragma: no cover + skip_needs_deploy = pytest.mark.skip( + reason="needs the --run-deploy option to run" + ) + for item in items: + if "needs_deploy" in item.keywords: + item.add_marker(skip_needs_deploy) + @pytest.fixture(scope="session") def test_data_dir(): @@ -361,3 +402,189 @@ def handle_output(self, context, obj): # pragma: no cover key=AssetKey(["ahn", "metadata_ahn5"]), io_manager_def=MockIOManager(), ) + + +@pytest.fixture(scope="session") +def mock_asset_compressed_tiles(): + class MockIOManager(IOManager): + def load_input(self, context): + return None + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "compressed_tiles"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_compressed_tiles_validation(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "validate_compressed_files.csv" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "compressed_tiles_validation"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_export_index(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "export_index.csv" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "export_index"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_geopackage_nl(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "3dbag_nl.gpkg.zip" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "geopackage_nl"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_metadata(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "metadata.json" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "metadata"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_reconstruction_output_3dtiles_lod12_nl(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "cesium3dtiles" + / "lod12" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "reconstruction_output_3dtiles_lod12_nl"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_reconstruction_output_3dtiles_lod13_nl(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "cesium3dtiles" + / "lod13" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "reconstruction_output_3dtiles_lod13_nl"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_reconstruction_output_3dtiles_lod22_nl(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "cesium3dtiles" + / "lod22" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "reconstruction_output_3dtiles_lod22_nl"]), + io_manager_def=MockIOManager(), + ) + + +@pytest.fixture(scope="session") +def mock_asset_reconstruction_output_multitiles_nl(test_data_dir): + class MockIOManager(IOManager): + def load_input(self, context): + return ( + test_data_dir + / "integration_deploy_release" + / "3DBAG" + / "export_test_version" + / "tiles" + ) + + def handle_output(self, context, obj): # pragma: no cover + raise NotImplementedError() + + return SourceAsset( + key=AssetKey(["export", "reconstruction_output_multitiles_nl"]), + io_manager_def=MockIOManager(), + ) diff --git a/packages/core/tests/test_assets_deploy.py b/packages/core/tests/test_assets_deploy.py index ab31b42e..c7245cfb 100644 --- a/packages/core/tests/test_assets_deploy.py +++ b/packages/core/tests/test_assets_deploy.py @@ -1,17 +1,10 @@ -from bag3d.core.assets.deploy.servers import ( - compressed_export_nl, - transfer_to_godzilla, - transfer_to_podzilla, -) +from bag3d.core.assets.deploy.servers import compressed_export_nl, transfer_to_server from pathlib import Path import pytest -@pytest.mark.skip( - reason="Skip until refactor so that it transfers to a docker container instead of our server, because we should not modify the state of the world outside the test environment" -) -@pytest.mark.needs_tools -def test_transfer_to_podzilla(context, test_data_dir): +@pytest.mark.skip("included in integration test") +def test_transfer_to_server(context, deployment_server, test_data_dir): # Create deployment dir export_dir = test_data_dir / "deployment" / "3DBAG" / "export_test_version" export_dir.mkdir(parents=True, exist_ok=True) @@ -34,79 +27,21 @@ def test_transfer_to_podzilla(context, test_data_dir): assert compressed_file.exists() # Check that the file was created # Test the transfer to podzilla - res = transfer_to_podzilla( - context, - compressed_file, - metadata_file, + res = transfer_to_server( + server=deployment_server, + compressed_export_nl=compressed_file, + metadata=metadata_file, + target_dir=deployment_server.target_dir, ) assert ( - res == f"{context.resources.podzilla_server.target_dir}/test_version" + res == f"{deployment_server.target_dir}/test_version" ) # Check that the function returns a value - finally: - # Clean up the test files - compressed_file.unlink(missing_ok=True) - metadata_file.unlink(missing_ok=True) - empty_file.unlink(missing_ok=True) - export_dir.rmdir() - with context.resources.podzilla_server.connect as c: - c.run( - f"rm -rf {context.resources.podzilla_server.target_dir}/test_version", - warn=True, - ) - c.run( - f"rm -f {context.resources.podzilla_server.target_dir}/export_test_version.tar.gz", - warn=True, - ) - - -@pytest.mark.skip( - reason="Skip until refactor so that it transfers to a docker container instead of our server, because we should not modify the state of the world outside the test environment" -) -@pytest.mark.needs_tools -def test_transfer_to_godzilla(context, test_data_dir): - # Create deployment dir - export_dir = test_data_dir / "deployment" / "3DBAG" / "export_test_version" - export_dir.mkdir(parents=True, exist_ok=True) - - # Create an empty file within the directory - empty_file = export_dir / "dummy.txt" - empty_file.touch() - - # Create a mock metadata file - metadata_file = test_data_dir / "deployment" / "3DBAG" / "metadata.json" - metadata_file.touch() - metadata_file.write_text( - '{"identificationInfo": {"citation": {"edition": "test_version"}}}' - ) - try: - # compress the export dir - res = compressed_export_nl(context, export_dir) - - compressed_file = Path(res.metadata["path"].text) - assert compressed_file.exists() # Check that the file was created - - # Test the transfer to godzilla - res = transfer_to_godzilla( - context, - compressed_file, - metadata_file, + assert deployment_server.file_exists( + f"{deployment_server.target_dir}/test_version/dummy.txt" ) - assert ( - res == f"{context.resources.godzilla_server.target_dir}/test_version" - ) # Check that the function returns a value finally: # Clean up the test files compressed_file.unlink(missing_ok=True) metadata_file.unlink(missing_ok=True) empty_file.unlink(missing_ok=True) export_dir.rmdir() - with context.resources.godzilla_server.connect as c: - c.run( - f"rm -rf {context.resources.godzilla_server.target_dir}/test_version", - warn=True, - ) - c.run(f"rm -rf {context.resources.godzilla_server.public_dir}", warn=True) - c.run( - f"rm -f {context.resources.godzilla_server.target_dir}/export_test_version.tar.gz", - warn=True, - ) diff --git a/packages/core/tests/test_integration.py b/packages/core/tests/test_integration.py index 7508c624..d41e6ba0 100644 --- a/packages/core/tests/test_integration.py +++ b/packages/core/tests/test_integration.py @@ -13,7 +13,7 @@ ) from bag3d.common.resources.files import FileStoreResource from bag3d.common.resources.version import VersionResource -from bag3d.core.assets import export, reconstruction, ahn +from bag3d.core.assets import export, reconstruction, ahn, deploy, release from bag3d.core.jobs import ( job_nl_export, job_nl_export_after_floors, @@ -23,6 +23,8 @@ job_ahn4, job_ahn5, job_ahn_metadata_index, + job_nl_deploy, + job_nl_release, ) from dagster import ( AssetKey, @@ -226,3 +228,71 @@ def test_integration_reconstruction_and_export( assert isinstance(result, ExecuteInProcessResult) assert result.success + + +@pytest.mark.needs_deploy +def test_integration_deploy_release( + test_data_dir, + godzilla_server, + podzilla_server, + database, + mock_asset_compressed_tiles, + mock_asset_compressed_tiles_validation, + mock_asset_export_index, + mock_asset_geopackage_nl, + mock_asset_metadata, + mock_asset_reconstruction_output_3dtiles_lod12_nl, + mock_asset_reconstruction_output_3dtiles_lod13_nl, + mock_asset_reconstruction_output_3dtiles_lod22_nl, + mock_asset_reconstruction_output_multitiles_nl, +): + """Can we deploy and release the 3DBAG, everything included?""" + + resources = { + "version": VersionResource("test_version"), + "godzilla_server": godzilla_server, + "podzilla_server": podzilla_server, + "db_connection": database, + } + + all_deploy_assets = load_assets_from_package_module( + deploy, key_prefix="deploy", group_name="deploy" + ) + + all_release_assets = load_assets_from_package_module( + release, key_prefix="release", group_name="release" + ) + + defs = Definitions( + resources=resources, + assets=[ + mock_asset_compressed_tiles, + mock_asset_compressed_tiles_validation, + mock_asset_export_index, + mock_asset_geopackage_nl, + mock_asset_metadata, + mock_asset_reconstruction_output_3dtiles_lod12_nl, + mock_asset_reconstruction_output_3dtiles_lod13_nl, + mock_asset_reconstruction_output_3dtiles_lod22_nl, + mock_asset_reconstruction_output_multitiles_nl, + *all_deploy_assets, + *all_release_assets, + ], + jobs=[job_nl_deploy, job_nl_release], + ) + + with DagsterInstance.ephemeral() as instance: + resolved_job = defs.get_job_def("nl_deploy") + result = resolved_job.execute_in_process(instance=instance, resources=resources) + + assert isinstance(result, ExecuteInProcessResult) + assert result.success + + resolved_job = defs.get_job_def("nl_release") + result = resolved_job.execute_in_process( + instance=instance, + resources=resources, + ) + + assert isinstance(result, ExecuteInProcessResult) + assert result.success