Skip to content

Commit

Permalink
render multiscale 3D volume with TensorStore (wip4) #1
Browse files Browse the repository at this point in the history
  • Loading branch information
aloejhb committed Jul 22, 2022
1 parent d1f7501 commit 23172c1
Show file tree
Hide file tree
Showing 6 changed files with 116 additions and 36 deletions.
17 changes: 17 additions & 0 deletions config_example/align_sec.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"load_sections":{
"sbem_experiment": "/tungstenfs/scratch/gfriedri/hubo/em_alignment/results/sbem_experiments/20220524_Bo_juv20210731",
"grid_index": 1,
"start_section": 2500,
"end_section": 7000
},
"align_sections":{
"crop_size": [400, 400],
"downscale_factor": [1, 1],
"range_limit": 10,
"filter_size": 10
},
"output": {
"offset_dir": "/tungstenfs/scratch/gfriedri/hubo/em_alignment/results/sbem_experiments/20220524_Bo_juv20210731/zalign/ob_substack"
}
}
19 changes: 19 additions & 0 deletions scripts/run_multiscale_volume.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import os
import asyncio
from sbem.render_volume.multiscale import make_multiscale

if __name__ == "__main__":
sbem_experiment="/tungstenfs/scratch/gfriedri/hubo/em_alignment/results/sbem_experiments/20220524_Bo_juv20210731"
grid_index=1
start_section=5000
end_section=5005
volume_name = f"s{start_section}_s{end_section}"
volume_path = os.path.join(sbem_experiment, "volume", volume_name+"_ng")

n_scales = 2
downsample_factors_list = [[2, 2, 2, 1]] * n_scales
chunk_size_list = [[64, 64, 64]] * n_scales
asyncio.run(make_multiscale(volume_path,
downsample_factors_list,
chunk_size_list,
downsample_method="stride"))
2 changes: 1 addition & 1 deletion scripts/run_render_volume.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ async def main():
sbem_experiment="/tungstenfs/scratch/gfriedri/hubo/em_alignment/results/sbem_experiments/20220524_Bo_juv20210731"
grid_index=1
start_section=5000
end_section=5004
end_section=5005
resolution = [11, 11, 33]
volume_name = f"s{start_section}_s{end_section}"
volume_path = os.path.join(sbem_experiment, "volume", volume_name+"_ng")
Expand Down
83 changes: 54 additions & 29 deletions src/sbem/render_volume/multiscale.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
import async
import os
import asyncio
import numpy as np
import tensorstore as ts

async def open_scaled_view(base_path, downsample_factors, ori_resolution):
from sbem.render_volume.render_utils import (
get_scale_key, get_sharding_spec,
open_volume, get_resolution)

async def open_scaled_view(base_path, base_scale_key, downsample_factors,
downsample_method):
vscaled_spec = {
"driver": "downsample",
"downsample_factors": downsample_factors,
"downsample_method": "mean",
"downsample_method": downsample_method,
"base": {"driver": "neuroglancer_precomputed",
"kvstore": {"driver": "file",
"path": base_path
},
"scale_metadata": {
"resolution": ori_resolution
"key": base_scale_key
}
}
}
Expand All @@ -22,67 +30,84 @@ def get_scaled_resolution():
scaled_resolution = np.divide(ori_resolution, downsample_factors)


def get_scale_key(resolution):
scale_key = "_".join(resolution)
return scale_ key


async def create_scaled_volume(base_path, size, chunk_size, resolution,
async def create_scaled_volume(base_path, scale_key,
size, chunk_size, resolution,
sharding=True,
sharding_spec=get_sharding_spec()):
scale_key = get_scale_key(resolution)
scaled_path = os.path.join(base_path, scale_key)
if not os.path.exists():
os.mkdir(scaled_path)
else:
# TODO delete the scaled volume if needed
raise FileExistsError(f"Scaled volume {scaled_path} already exists.")

scaled_spec = {
"driver": "neuroglancer_precomputed",
"kvstore": {"driver": "file",
"path": path},
"path": base_path},
"scale_metadata": {
"size": size,
"encoding": "jpeg",
"chunk_size": chunk_size,
"resolution": resolution,
"key": scale_key
},
"create": True,
}

if not os.path.exists(scaled_path):
os.mkdir(scaled_path)
scaled_spec["create"] = True

if sharding:
scaled_spec["scale_metadata"]["sharding"] = sharding_spec
scaled = await ts.open(scaled_spec)
return scaled


async def write_scaled_volume(base_path, downsample_factors,
ori_resolution,
async def write_scaled_volume(base_path,
base_scale_key,
scale_key,
downsample_factors,
chunk_size,
downsample_method,
sharding=True,
queue=None):
vscaled = await open_scaled_view(base_path, downsample_factors,
ori_resolution)
scaled = await create scaled_volume(base_path, size, chunk_size, resolution,
vscaled = await open_scaled_view(base_path, base_scale_key,
downsample_factors,
downsample_method)

size = vscaled.shape[:-1]
resolution = get_resolution(vscaled)
scaled = await create_scaled_volume(base_path, scale_key,
size, chunk_size, resolution,
sharding=True)
await scaled[:].write(vscaled)


async def make_multiscale(volume_path,
downsample_factors_list,
chunk_size_list,
downsample_method='mean'):
downsample_method='stride'):
if len(downsample_factors_list) != len(chunk_size_list):
raise ValueError("downsample_factors_list and chunk_size_list"+\
"should have same number of scales")

n_scales = len(downsample_factors_list)
tasks = []
volume = open_volume(volume_path)
ori_resolution = volume.xxx.resolution
ori_resolution_list = xxx
volume = await open_volume(volume_path, scale_index=0)

base_resolution = get_resolution(volume)
volume_scale_key = get_scale_key(base_resolution)
resolutions = [np.multiply(base_resolution, df[:-1]).astype(int)
for df in downsample_factors_list]
scale_keys = [get_scale_key(r) for r in resolutions]
scale_keys.insert(0, volume_scale_key)

for k,df in enumerate(downsample_factors_list):
chunk_size = chunk_size_list[k]
ori_resolution = ori_resolution_list[k]
task = asyncio.create_task(write_downsampled_volume(base_path, df,
ori_resolution, chunk_size))
base_scale_key = scale_keys[k]
scale_key = scale_keys[k+1]
task = asyncio.create_task(
write_scaled_volume(volume_path,
base_scale_key,
scale_key,
df,
chunk_size,
downsample_method))
await asyncio.wait_for(task, None)
30 changes: 24 additions & 6 deletions src/sbem/render_volume/render_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,32 @@ async def create_volume(path: str,
volume = await volume_future
return volume


async def open_volume(path, scale_index):
{
"driver": "neuroglancer_precomputed",
"kvstore": {"driver": "gcs", "bucket": "my-bucket"},
"path": "path/to/volume",
"scale_index": 1
}
volume_spec = {
"driver": "neuroglancer_precomputed",
"kvstore": {"driver": "file",
"path": path
},
"scale_index": scale_index
}

volume = await ts.open(volume_spec)
return volume


def get_scale_key(resolution):
# This assumes resolution is integer
scale_key = "_".join(map(lambda x: str(int(x)), resolution))
return scale_key


def get_resolution(volume):
# this assumes that each dimension unit is of the form [number, string]
# and resolution is integer
dimension_units = volume.dimension_units
resolution = [int(du.multiplier) for du in dimension_units[:-1]]
return resolution


async def estimate_volume_size(stitched_sections, xy_coords):
Expand Down
1 change: 1 addition & 0 deletions src/sbem/section_align/align_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,5 +138,6 @@ def offsets_to_coords(xy_offsets, non_neg=True):
xy_coords = np.cumsum(xy_offsets, axis=0)
if non_neg:
xy_coords = xy_coords - xy_coords.min(axis=0)
xy_coords = np.insert(xy_coords, 0, [0,0], axis=0)
xy_coords = xy_coords.astype(int)
return xy_coords

0 comments on commit 23172c1

Please sign in to comment.