Skip to content

Commit a0d2395

Browse files
committed
Solve many more issues
1 parent bf82225 commit a0d2395

File tree

48 files changed

+271
-255
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+271
-255
lines changed

tests/apps/pathology/test_sliding_window_hovernet_inference.py

Lines changed: 29 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from monai.data import MetaTensor
2222
from monai.inferers import sliding_window_inference
2323
from monai.utils import optional_import
24-
from tests.test_sliding_window_inference import TEST_CASES
24+
from tests.inferers.test_sliding_window_inference import TEST_CASES
2525

2626
_, has_tqdm = optional_import("tqdm")
2727

@@ -36,7 +36,6 @@
3636

3737

3838
class TestSlidingWindowHoVerNetInference(unittest.TestCase):
39-
4039
@parameterized.expand(TEST_CASES_PADDING)
4140
def test_sliding_window_with_padding(
4241
self, key, image_shape, roi_shape, sw_batch_size, overlap, mode, device, extra_input_padding
@@ -122,21 +121,19 @@ def compute(self, data):
122121
sigma_scale=1.0,
123122
)
124123

125-
expected = np.array(
124+
expected = np.array([
126125
[
127126
[
128-
[
129-
[3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],
130-
[3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],
131-
[3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333],
132-
[3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667],
133-
[4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333],
134-
[4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000],
135-
[5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000],
136-
]
127+
[3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],
128+
[3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],
129+
[3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333],
130+
[3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667],
131+
[4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333],
132+
[4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000],
133+
[5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000],
137134
]
138135
]
139-
)
136+
])
140137
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
141138
result = sliding_window_inference(
142139
inputs,
@@ -150,31 +147,29 @@ def compute(self, data):
150147
sigma_scale=1.0,
151148
progress=has_tqdm,
152149
)
153-
expected = np.array(
150+
expected = np.array([
154151
[
155152
[
156-
[
157-
[3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],
158-
[3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],
159-
[3.3271625, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271625],
160-
[3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377],
161-
[4.3271623, 4.3271623, 4.3271627, 4.3271627, 4.3271627, 4.3271623, 4.3271623],
162-
[4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757],
163-
[4.9999995, 5.0, 5.0, 5.0, 5.0, 5.0, 4.9999995],
164-
]
153+
[3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],
154+
[3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],
155+
[3.3271625, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271625],
156+
[3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377],
157+
[4.3271623, 4.3271623, 4.3271627, 4.3271627, 4.3271627, 4.3271623, 4.3271623],
158+
[4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757],
159+
[4.9999995, 5.0, 5.0, 5.0, 5.0, 5.0, 4.9999995],
165160
]
166161
]
167-
)
162+
])
168163
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
169164

170165
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="gaussian", sigma_scale=1.0)(
171166
inputs, _Pred().compute
172167
)
173168
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
174169

175-
result = SlidingWindowHoVerNetInferer(
176-
roi_shape, sw_batch_size, overlap=0.5, mode="gaussian", sigma_scale=[1.0, 1.0]
177-
)(inputs, _Pred().compute)
170+
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="gaussian", sigma_scale=[1.0, 1.0])(
171+
inputs, _Pred().compute
172+
)
178173
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
179174

180175
result = SlidingWindowHoVerNetInferer(
@@ -205,9 +200,7 @@ def compute(data):
205200
expected = np.ones((1, 1, 3, 3)) * -6.0
206201
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
207202

208-
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1)(
209-
inputs, compute
210-
)
203+
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1)(inputs, compute)
211204
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
212205

213206
def test_args_kwargs(self):
@@ -245,9 +238,9 @@ def compute(data, test1, test2):
245238
expected = np.ones((1, 1, 3, 3)) + 2.0
246239
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
247240

248-
result = SlidingWindowHoVerNetInferer(
249-
roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1, progress=has_tqdm
250-
)(inputs, compute, t1, test2=t2)
241+
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1, progress=has_tqdm)(
242+
inputs, compute, t1, test2=t2
243+
)
251244
np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)
252245

253246
@parameterized.expand(TEST_CASES_MULTIOUTPUT)
@@ -287,9 +280,9 @@ def compute_dict(data):
287280
for rr, _ in zip(result_dict, expected_dict):
288281
np.testing.assert_allclose(result_dict[rr].cpu().numpy(), expected_dict[rr], rtol=1e-4)
289282

290-
result = SlidingWindowHoVerNetInferer(
291-
roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1, progress=has_tqdm
292-
)(inputs, compute)
283+
result = SlidingWindowHoVerNetInferer(roi_shape, sw_batch_size, overlap=0.5, mode="constant", cval=-1, progress=has_tqdm)(
284+
inputs, compute
285+
)
293286
for rr, ee in zip(result, expected):
294287
np.testing.assert_allclose(rr.cpu().numpy(), ee, rtol=1e-4)
295288

tests/bundle/test_bundle_ckpt_export.py

Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import os
1616
import tempfile
1717
import unittest
18+
from pathlib import Path
1819

1920
from parameterized import parameterized
2021

@@ -23,17 +24,9 @@
2324
from monai.networks import save_state
2425
from tests.utils import command_line_tests, skip_if_windows
2526

26-
TEST_CASE_1 = [True, "", ""]
27-
28-
TEST_CASE_2 = [True, "model", ""]
29-
30-
TEST_CASE_3 = [True, "model", "True"]
31-
32-
TEST_CASE_4 = [False, "", ""]
33-
34-
TEST_CASE_5 = [False, "model", ""]
35-
36-
TEST_CASE_6 = [False, "model", "True"]
27+
TEST_CASE_1 = ["", ""]
28+
TEST_CASE_2 = ["model", ""]
29+
TEST_CASE_3 = ["model", "True"]
3730

3831

3932
@skip_if_windows
@@ -42,8 +35,9 @@ def setUp(self):
4235
self.device = os.environ.get("CUDA_VISIBLE_DEVICES")
4336
if not self.device:
4437
os.environ["CUDA_VISIBLE_DEVICES"] = "0" # default
45-
self.meta_file = os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json")
46-
self.config_file = os.path.join(os.path.dirname(__file__), "testing_data", "inference.json")
38+
module_path = Path(__file__).resolve().parents[1].as_posix()
39+
self.meta_file = os.path.join(module_path, "testing_data", "metadata.json")
40+
self.config_file = os.path.join(module_path, "testing_data", "inference.json")
4741
self.tempdir_obj = tempfile.TemporaryDirectory()
4842
tempdir = self.tempdir_obj.name
4943
self.def_args = {"meta_file": "will be replaced by `meta_file` arg"}
@@ -81,9 +75,7 @@ def test_export(self, key_in_ckpt, use_trace):
8175
command_line_tests(full_cmd)
8276
self.assertTrue(os.path.exists(self.ts_file))
8377

84-
_, metadata, extra_files = load_net_with_metadata(
85-
self.ts_file, more_extra_files=["inference.json", "def_args.json"]
86-
)
78+
_, metadata, extra_files = load_net_with_metadata(self.ts_file, more_extra_files=["inference.json", "def_args.json"])
8779
self.assertIn("schema", metadata)
8880
self.assertIn("meta_file", json.loads(extra_files["def_args.json"]))
8981
self.assertIn("network_def", json.loads(extra_files["inference.json"]))

tests/bundle/test_bundle_trt_export.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import os
1616
import tempfile
1717
import unittest
18+
from pathlib import Path
1819

1920
from parameterized import parameterized
2021

@@ -55,7 +56,6 @@
5556
@skip_if_quick
5657
@SkipIfBeforeComputeCapabilityVersion((7, 5))
5758
class TestTRTExport(unittest.TestCase):
58-
5959
def setUp(self):
6060
self.device = os.environ.get("CUDA_VISIBLE_DEVICES")
6161
if not self.device:
@@ -70,8 +70,9 @@ def tearDown(self):
7070
@parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])
7171
@unittest.skipUnless(has_torchtrt and has_tensorrt, "Torch-TensorRT is required for conversion!")
7272
def test_trt_export(self, convert_precision, input_shape, dynamic_batch):
73-
meta_file = os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json")
74-
config_file = os.path.join(os.path.dirname(__file__), "testing_data", "inference.json")
73+
tests_dir = Path(__file__).resolve().parent
74+
meta_file = os.path.join(tests_dir, "testing_data", "metadata.json")
75+
config_file = os.path.join(tests_dir, "testing_data", "inference.json")
7576
with tempfile.TemporaryDirectory() as tempdir:
7677
def_args = {"meta_file": "will be replaced by `meta_file` arg"}
7778
def_args_file = os.path.join(tempdir, "def_args.yaml")
@@ -107,8 +108,9 @@ def test_trt_export(self, convert_precision, input_shape, dynamic_batch):
107108
has_onnx and has_torchtrt and has_tensorrt, "Onnx and TensorRT are required for onnx-trt conversion!"
108109
)
109110
def test_onnx_trt_export(self, convert_precision, input_shape, dynamic_batch):
110-
meta_file = os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json")
111-
config_file = os.path.join(os.path.dirname(__file__), "testing_data", "inference.json")
111+
tests_dir = Path(__file__).resolve().parent
112+
meta_file = os.path.join(tests_dir, "testing_data", "metadata.json")
113+
config_file = os.path.join(tests_dir, "testing_data", "inference.json")
112114
with tempfile.TemporaryDirectory() as tempdir:
113115
def_args = {"meta_file": "will be replaced by `meta_file` arg"}
114116
def_args_file = os.path.join(tempdir, "def_args.yaml")

tests/bundle/test_bundle_verify_metadata.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,20 +15,21 @@
1515
import os
1616
import tempfile
1717
import unittest
18+
from pathlib import Path
1819

1920
from parameterized import parameterized
2021

2122
from monai.bundle import ConfigParser, verify_metadata
2223
from tests.utils import command_line_tests, download_url_or_skip_test, skip_if_windows, testing_data_config
2324

24-
SCHEMA_FILE = os.path.join(os.path.dirname(__file__), "testing_data", "schema.json")
25+
TESTS_DIR = Path(__file__).parent.as_posix()
26+
SCHEMA_FILE = os.path.join(TESTS_DIR, "testing_data", "schema.json")
2527

26-
TEST_CASE_1 = [os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json"), SCHEMA_FILE]
28+
TEST_CASE_1 = [os.path.join(TESTS_DIR, "testing_data", "metadata.json"), SCHEMA_FILE]
2729

2830

2931
@skip_if_windows
3032
class TestVerifyMetaData(unittest.TestCase):
31-
3233
def setUp(self):
3334
self.config = testing_data_config("configs", "test_meta_file")
3435
download_url_or_skip_test(

tests/bundle/test_bundle_verify_net.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,21 +14,23 @@
1414
import os
1515
import tempfile
1616
import unittest
17+
from pathlib import Path
1718

1819
from parameterized import parameterized
1920

2021
from monai.bundle import ConfigParser, verify_net_in_out
2122
from tests.utils import command_line_tests, skip_if_no_cuda, skip_if_windows
2223

24+
TESTS_PATH = Path(__file__).parents[1].as_posix()
25+
2326
TEST_CASE_1 = [
24-
os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json"),
25-
os.path.join(os.path.dirname(__file__), "testing_data", "inference.json"),
27+
os.path.join(TESTS_PATH, "testing_data", "metadata.json"),
28+
os.path.join(TESTS_PATH, "testing_data", "inference.json"),
2629
]
2730

2831

2932
@skip_if_windows
3033
class TestVerifyNetwork(unittest.TestCase):
31-
3234
@parameterized.expand([TEST_CASE_1])
3335
def test_verify(self, meta_file, config_file):
3436
with tempfile.TemporaryDirectory() as tempdir:

tests/bundle/test_bundle_workflow.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
from __future__ import annotations
1313

14+
from pathlib import Path
1415
import os
1516
import shutil
1617
import sys
@@ -30,15 +31,15 @@
3031
from monai.transforms import Compose, LoadImage, LoadImaged, SaveImaged
3132
from tests.nonconfig_workflow import NonConfigWorkflow, PythonicWorkflowImpl
3233

33-
MODULE_PATH = os.path.dirname(os.path.abspath(__file__).parents[2])
34+
MODULE_PATH = Path(__file__).resolve().parents[1]
3435

35-
TEST_CASE_1 = [os.path.join(MODULE_PATH, "tests", "testing_data", "inference.json")]
36+
TEST_CASE_1 = [os.path.join(MODULE_PATH, "testing_data", "inference.json")]
3637

37-
TEST_CASE_2 = [os.path.join(MODULE_PATH, "tests", "testing_data", "inference.yaml")]
38+
TEST_CASE_2 = [os.path.join(MODULE_PATH, "testing_data", "inference.yaml")]
3839

39-
TEST_CASE_3 = [os.path.join(MODULE_PATH, "tests", "testing_data", "config_fl_train.json")]
40+
TEST_CASE_3 = [os.path.join(MODULE_PATH, "testing_data", "config_fl_train.json")]
4041

41-
TEST_CASE_4 = [os.path.join(MODULE_PATH, "tests", "testing_data", "responsive_inference.json")]
42+
TEST_CASE_4 = [os.path.join(MODULE_PATH, "testing_data", "responsive_inference.json")]
4243

4344
TEST_CASE_NON_CONFIG_WRONG_LOG = [None, "logging.conf", "Cannot find the logging config file: logging.conf."]
4445

@@ -106,7 +107,7 @@ def test_inference_config(self, config_file):
106107
inferer = ConfigWorkflow(
107108
workflow_type="infer",
108109
config_file=config_file,
109-
logging_file=os.path.join(os.path.dirname(__file__), "testing_data", "logging.conf"),
110+
logging_file=os.path.join(MODULE_PATH, "testing_data", "logging.conf"),
110111
**override,
111112
)
112113
self._test_inferer(inferer)
@@ -115,8 +116,8 @@ def test_inference_config(self, config_file):
115116
inferer = ConfigWorkflow(
116117
config_file=config_file,
117118
workflow_type="infer",
118-
properties_path=os.path.join(os.path.dirname(__file__), "testing_data", "fl_infer_properties.json"),
119-
logging_file=os.path.join(os.path.dirname(__file__), "testing_data", "logging.conf"),
119+
properties_path=os.path.join(MODULE_PATH, "testing_data", "fl_infer_properties.json"),
120+
logging_file=os.path.join(MODULE_PATH, "testing_data", "logging.conf"),
120121
**override,
121122
)
122123
self._test_inferer(inferer)
@@ -131,7 +132,7 @@ def test_responsive_inference_config(self, config_file):
131132
inferer = ConfigWorkflow(
132133
workflow_type="infer",
133134
config_file=config_file,
134-
logging_file=os.path.join(os.path.dirname(__file__), "testing_data", "logging.conf"),
135+
logging_file=os.path.join(MODULE_PATH, "testing_data", "logging.conf"),
135136
)
136137
# FIXME: temp add the property for test, we should add it to some formal realtime infer properties
137138
inferer.add_property(name="dataflow", required=True, config_id="dataflow")
@@ -157,7 +158,7 @@ def test_train_config(self, config_file):
157158
trainer = ConfigWorkflow(
158159
workflow_type="train",
159160
config_file=config_file,
160-
logging_file=os.path.join(os.path.dirname(__file__), "testing_data", "logging.conf"),
161+
logging_file=os.path.join(MODULE_PATH, "testing_data", "logging.conf"),
161162
init_id="initialize",
162163
run_id="run",
163164
final_id="finalize",
@@ -203,8 +204,8 @@ def test_non_config_wrong_log_cases(self, meta_file, logging_file, expected_erro
203204
def test_pythonic_workflow(self):
204205
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
205206
config_file = {"roi_size": (64, 64, 32)}
206-
meta_file = os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json")
207-
property_path = os.path.join(os.path.dirname(__file__), "testing_data", "python_workflow_properties.json")
207+
meta_file = os.path.join(MODULE_PATH, "testing_data", "metadata.json")
208+
property_path = os.path.join(MODULE_PATH, "testing_data", "python_workflow_properties.json")
208209
workflow = PythonicWorkflowImpl(
209210
workflow_type="infer", config_file=config_file, meta_file=meta_file, properties_path=property_path
210211
)
@@ -229,9 +230,9 @@ def test_pythonic_workflow(self):
229230
def test_create_pythonic_workflow(self):
230231
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
231232
config_file = {"roi_size": (64, 64, 32)}
232-
meta_file = os.path.join(os.path.dirname(__file__), "testing_data", "metadata.json")
233-
property_path = os.path.join(os.path.dirname(__file__), "testing_data", "python_workflow_properties.json")
234-
sys.path.append(os.path.dirname(__file__))
233+
meta_file = os.path.join(MODULE_PATH, "testing_data", "metadata.json")
234+
property_path = os.path.join(MODULE_PATH, "testing_data", "python_workflow_properties.json")
235+
sys.path.append(MODULE_PATH)
235236
workflow = create_workflow(
236237
"nonconfig_workflow.PythonicWorkflowImpl",
237238
workflow_type="infer",

0 commit comments

Comments
 (0)