Skip to content

Commit

Permalink
#37 Switched from multiprocessing.Pool to multiprocessing.ThreadPool
Browse files Browse the repository at this point in the history
  • Loading branch information
carljohnsen committed Aug 7, 2024
1 parent 1aaac04 commit 6164f51
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 3 deletions.
3 changes: 2 additions & 1 deletion src/processing_steps/0600_segment_implant_cc.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from lib.cpp.cpu.io import load_slice
from lib.cpp.cpu.connected_components import largest_connected_component, connected_components
import multiprocessing as mp
from multiprocessing.pool import ThreadPool
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
Expand Down Expand Up @@ -100,7 +101,7 @@ def label_chunk(i, chunk_size, chunk_prefix, implant_threshold_u16, global_shape
return n_features

start = datetime.datetime.now()
with mp.Pool(n_cores) as pool:
with ThreadPool(n_cores) as pool:
label_chunk_partial = partial(label_chunk, chunk_size=layers_per_chunk, chunk_prefix=f"{intermediate_folder}/{sample}_", implant_threshold_u16=implant_threshold_u16, global_shape=(nz,ny,nx))
n_labels = pool.map(label_chunk_partial, range(n_chunks))
end = datetime.datetime.now()
Expand Down
3 changes: 2 additions & 1 deletion src/processing_steps/0750_bone_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from scipy.ndimage import gaussian_filter1d
import datetime
import multiprocessing as mp
from multiprocessing.pool import ThreadPool
from functools import partial

# close = dilate then erode
Expand Down Expand Up @@ -94,7 +95,7 @@ def largest_cc_of(mask, mask_name):
return (label == largest_cc_ix)
else:
start = datetime.datetime.now()
with mp.Pool(n_cores) as pool:
with ThreadPool(n_cores) as pool:
label_chunk_partial = partial(label_chunk, chunk_prefix=f"{intermediate_folder}/{sample}_")
chunks = [mask[i*layers_per_chunk:(i+1)*layers_per_chunk] for i in range(n_chunks-1)]
chunks.append(mask[(n_chunks-1)*layers_per_chunk:])
Expand Down
3 changes: 2 additions & 1 deletion src/processing_steps/1500_segment_blood_cc.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from lib.py.resample import downsample2x, downsample3x
import cupy as cp
import multiprocessing as mp
from multiprocessing.pool import ThreadPool
import datetime
from functools import partial

Expand Down Expand Up @@ -78,7 +79,7 @@ def label_chunk(i, chunk_size, chunk_prefix, global_shape):
return n_features

start = datetime.datetime.now()
with mp.Pool(n_cores) as pool:
with ThreadPool(n_cores) as pool:
label_chunk_partial = partial(label_chunk, chunk_size=layers_per_chunk, chunk_prefix=f"{intermediate_folder}/{sample}_", global_shape=(nz,ny,nx))
n_labels = pool.map(label_chunk_partial, range(n_chunks))
end = datetime.datetime.now()
Expand Down

0 comments on commit 6164f51

Please sign in to comment.