Skip to content

Commit ce9ee14

Browse files
committed
WIP: Rewrite post-processing
Simply use a threshold with no other constraints.
1 parent dd24d28 commit ce9ee14

File tree

1 file changed

+41
-76
lines changed

1 file changed

+41
-76
lines changed

nanshe_ipython.ipynb

Lines changed: 41 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1195,15 +1195,7 @@
11951195
"### Postprocessing\n",
11961196
"\n",
11971197
"* `significance_threshold` (`float`): number of standard deviations below which to include in \"noise\" estimate\n",
1198-
"* `wavelet_scale` (`int`): scale of wavelet transform to apply (should be the same as the one used above)\n",
1199-
"* `noise_threshold` (`float`): number of units of \"noise\" above which something needs to be to be significant\n",
1200-
"* `accepted_region_shape_constraints` (`dict`): if ROIs don't match this, reduce the `wavelet_scale` once.\n",
1201-
"* `percentage_pixels_below_max` (`float`): upper bound on ratio of ROI pixels not at max intensity vs. all ROI pixels\n",
1202-
"* `min_local_max_distance` (`float`): minimum allowable euclidean distance between two ROIs maximum intensities\n",
1203-
"* `accepted_neuron_shape_constraints` (`dict`): shape constraints for ROI to be kept.\n",
1204-
"\n",
1205-
"* `alignment_min_threshold` (`float`): similarity measure of the intensity of two ROIs images used for merging.\n",
1206-
"* `overlap_min_threshold` (`float`): similarity measure of the masks of two ROIs used for merging."
1198+
"* `noise_threshold` (`float`): number of units of \"noise\" above which something needs to be to be significant"
12071199
]
12081200
},
12091201
{
@@ -1213,82 +1205,55 @@
12131205
"outputs": [],
12141206
"source": [
12151207
"significance_threshold = 3.0\n",
1216-
"wavelet_scale = 3\n",
1217-
"noise_threshold = 3.0\n",
1218-
"percentage_pixels_below_max = 0.8\n",
1219-
"min_local_max_distance = 16.0\n",
1220-
"\n",
1221-
"alignment_min_threshold = 0.6\n",
1222-
"overlap_min_threshold = 0.6\n",
1208+
"noise_threshold = 1.0\n",
12231209
"\n",
12241210
"\n",
12251211
"with suppress(KeyError):\n",
12261212
" del dask_store[subgroup_post]\n",
12271213
"zarr_store.require_group(subgroup_post)\n",
12281214
"\n",
12291215
"\n",
1230-
"imgs = dask_store._diskstore[subgroup_dict]\n",
1231-
"da_imgs = da.from_array(imgs, chunks=((1,) + imgs.shape[1:]))\n",
1232-
"\n",
1233-
"result = block_postprocess_data_parallel(client)(da_imgs,\n",
1234-
" **{\n",
1235-
" \"wavelet_denoising\" : {\n",
1236-
" \"estimate_noise\" : {\n",
1237-
" \"significance_threshold\" : significance_threshold\n",
1238-
" },\n",
1239-
" \"wavelet.transform\" : {\n",
1240-
" \"scale\" : wavelet_scale\n",
1241-
" },\n",
1242-
" \"significant_mask\" : {\n",
1243-
" \"noise_threshold\" : noise_threshold\n",
1244-
" },\n",
1245-
" \"accepted_region_shape_constraints\" : {\n",
1246-
" \"major_axis_length\" : {\n",
1247-
" \"min\" : 0.0,\n",
1248-
" \"max\" : 25.0\n",
1249-
" }\n",
1250-
" },\n",
1251-
" \"remove_low_intensity_local_maxima\" : {\n",
1252-
" \"percentage_pixels_below_max\" : percentage_pixels_below_max\n",
1253-
" },\n",
1254-
" \"remove_too_close_local_maxima\" : {\n",
1255-
" \"min_local_max_distance\" : min_local_max_distance\n",
1256-
" },\n",
1257-
" \"accepted_neuron_shape_constraints\" : {\n",
1258-
" \"area\" : {\n",
1259-
" \"min\" : 25,\n",
1260-
" \"max\" : 600\n",
1261-
" },\n",
1262-
" \"eccentricity\" : {\n",
1263-
" \"min\" : 0.0,\n",
1264-
" \"max\" : 0.9\n",
1265-
" }\n",
1266-
" }\n",
1267-
" },\n",
1268-
" \"merge_neuron_sets\" : {\n",
1269-
" \"alignment_min_threshold\" : alignment_min_threshold,\n",
1270-
" \"overlap_min_threshold\" : overlap_min_threshold,\n",
1271-
" \"fuse_neurons\" : {\n",
1272-
" \"fraction_mean_neuron_max_threshold\" : 0.01\n",
1273-
" }\n",
1274-
" }\n",
1275-
" }\n",
1276-
")\n",
1216+
"da_imgs = dask_store[subgroup_dict]\n",
1217+
"da_imgs = da_imgs.rechunk(((1,) + da_imgs.shape[1:]))\n",
12771218
"\n",
1278-
"# Store projections\n",
1279-
"dask_store.update(dict(zip(\n",
1280-
" [\"%s/%s\" % (subgroup_post, e) for e in result.dtype.names],\n",
1281-
" [result[e] for e in result.dtype.names]\n",
1282-
")))\n",
1219+
"da_imgs = da_imgs[0]\n",
12831220
"\n",
1284-
"dask.distributed.progress(\n",
1285-
" dask.distributed.futures_of([\n",
1286-
" dask_store[\"%s/%s\" % (subgroup_post, e)]\n",
1287-
" for e in result.dtype.names\n",
1288-
" ]),\n",
1289-
" notebook=False\n",
1290-
")\n",
1291-
"print(\"\")"
1221+
"da_imgs_thrd = (da_imgs - noise_threshold * (da_imgs - significance_threshold * da_imgs.std()).std()) > 0\n",
1222+
"\n",
1223+
"da_lbl_img, da_num_lbls = dask_ndmeasure.label(da_imgs_thrd)\n",
1224+
"da_lbl_img, da_num_lbls = client.persist([da_lbl_img, da_num_lbls])\n",
1225+
"\n",
1226+
"da_result = []\n",
1227+
"for i in irange(1, 1 + int(da_num_lbls)):\n",
1228+
" da_result.append(da_lbl_img == i)\n",
1229+
"da_result = da.stack(da_result)\n",
1230+
"\n",
1231+
"dask_store[subgroup_post_mask] = da_result\n",
1232+
"\n",
1233+
"dask.distributed.progress(dask_store[subgroup_post_mask], notebook=False)\n",
1234+
"print(\"\")\n",
1235+
"\n",
1236+
"\n",
1237+
"# View results\n",
1238+
"imgs_min, imgs_max = 0, 100\n",
1239+
"\n",
1240+
"da_imgs = dask_store[subgroup_post_mask]\n",
1241+
"da_imgs = da_imgs.astype(np.uint8)\n",
1242+
"\n",
1243+
"da_imgs_min, da_imgs_max = da_imgs.min(), da_imgs.max()\n",
1244+
"\n",
1245+
"status = client.compute([da_imgs_min, da_imgs_max])\n",
1246+
"dask.distributed.progress(status, notebook=False)\n",
1247+
"print(\"\")\n",
1248+
"\n",
1249+
"imgs_min, imgs_max = [s.result() for s in status]\n",
1250+
"\n",
1251+
"mplsv = plt.figure(FigureClass=MPLViewer)\n",
1252+
"mplsv.set_images(\n",
1253+
" da_imgs,\n",
1254+
" vmin=imgs_min,\n",
1255+
" vmax=imgs_max\n",
1256+
")"
12921257
]
12931258
},
12941259
{

0 commit comments

Comments
 (0)