Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

manual ome #450

Merged
merged 2 commits into from
Jan 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 60 additions & 1 deletion brainlit/utils/tests/test_write.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import pytest
import zarr
import numpy as np
from brainlit.utils.write import zarr_to_omezarr, czi_to_zarr, write_trace_layer
from brainlit.utils.write import (
zarr_to_omezarr,
zarr_to_omezarr_single,
czi_to_zarr,
write_trace_layer,
)
import os
import shutil
import zipfile
Expand Down Expand Up @@ -87,6 +92,27 @@ def test_writeome_baddim(init_3dzarr, init_4dzarr):
shutil.rmtree(out_path)


def test_writeome_single_baddim(init_3dzarr, init_4dzarr):
# error for 4d zarrs
zarr_path, data_dir = init_4dzarr
out_path = data_dir / "fg_ome.zarr"
with pytest.raises(ValueError, match=r"Conversion only supported for 3D arrays"):
zarr_to_omezarr_single(zarr_path=zarr_path, out_path=out_path, res=[1, 1, 1])

# error if ome already exists
zarr_path, data_dir = init_3dzarr
out_path = data_dir / "fg_ome.zarr"
zarr_to_omezarr_single(zarr_path=zarr_path, out_path=out_path, res=[1, 1, 1])

with pytest.raises(
ValueError,
match=f"{out_path} already exists, please delete the existing file or change the name of the ome-zarr to be created.",
):
zarr_to_omezarr_single(zarr_path=zarr_path, out_path=out_path, res=[1, 1, 1])

shutil.rmtree(out_path)


def test_writezarr_badpar(init_4dczi):
czi_path, data_dir = init_4dczi
with pytest.raises(ValueError, match="parallel must be positive integer, not 1"):
Expand Down Expand Up @@ -159,6 +185,39 @@ def test_writeome(init_3dzarr):
)


def test_writeome_single(init_3dzarr):
res = [1, 1, 2] # in nm
dimension_map = {"x": 0, "y": 1, "z": 2}
zarr_path, data_dir = init_3dzarr
out_path = data_dir / "fg_ome_single.zarr"

assert not os.path.exists(out_path)
zarr_to_omezarr_single(zarr_path=zarr_path, out_path=out_path, res=res)
assert os.path.exists(out_path)

# check units are micrometers
ome_zarr = zarr.open(out_path)
metadata = ome_zarr.attrs["multiscales"][0]

dimension_names = []
for dimension in metadata["axes"]:
assert dimension["unit"] == "micrometer"
assert dimension["type"] == "space"
dimension_names.append(dimension["name"])

# check resolutions are multiples of 2 scaled in xy
for resolution in metadata["datasets"]:
lvl = int(resolution["path"])
true_res = np.multiply(res, [2**lvl, 2**lvl, 1]) / 1000 # in microns
true_res = [
true_res[dimension_map[dimension_name]]
for dimension_name in dimension_names
]
np.testing.assert_almost_equal(
true_res, resolution["coordinateTransformations"][0]["scale"], decimal=3
)


def test_write_trace_layer(init_omezarr):
data_dir, res = init_omezarr

Expand Down
100 changes: 100 additions & 0 deletions brainlit/utils/write.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import os
from cloudvolume import CloudVolume
import json
from skimage.measure import block_reduce


def _read_czi_slice(czi, C, Z):
Expand Down Expand Up @@ -140,6 +141,105 @@ def zarr_to_omezarr(zarr_path: str, out_path: str, res: list):
_edit_ome_metadata(out_path, res)


def _write_slice_ome(z: int, lvl: int, z_in_path: str, zgr_path: str):
z_in = zarr.open(z_in_path)
zgr = zarr.open_group(zgr_path)
z_out = zgr[str(lvl)]

im_slice = np.squeeze(z_in[z, :, :])
if lvl > 0:
im_ds = block_reduce(im_slice, block_size=2**lvl)
else:
im_ds = im_slice

z_out[z, :, :] = im_ds


def zarr_to_omezarr_single(zarr_path: str, out_path: str, res: list, parallel: int = 1):
"""Convert 3D zarr to ome-zarr manually. Chunk size in z is 1.

Args:
zarr_path (str): Path to zarr.
out_path (str): Path of ome-zarr to be created.
res (list): List of xyz resolution values in nanometers.
parallel (int): Number of cores to use.

Raises:
ValueError: If zarr to be written already exists.
ValueError: If conversion is not 3D array.
"""
if os.path.exists(out_path):
raise ValueError(
f"{out_path} already exists, please delete the existing file or change the name of the ome-zarr to be created."
)

zra = zarr.open(zarr_path)
sz0 = zra.shape

if len(sz0) != 3:
raise ValueError("Conversion only supported for 3D arrays")

zgr = zarr.group(out_path)

for lvl in tqdm(range(5), desc="Writing different levels..."):
im_slice = np.squeeze(zra[0, :, :])
if lvl > 0:
im_ds = block_reduce(im_slice, block_size=2**lvl)
else:
im_ds = im_slice
chunk_size = [1, np.amin((200, im_ds.shape[0])), np.amin((200, im_ds.shape[1]))]

zra_lvl = zgr.create(
str(lvl),
shape=(sz0[0], im_ds.shape[0], im_ds.shape[1]),
chunks=chunk_size,
dtype=zra.dtype,
dimension_separator="/",
)

if parallel == 1:
for z in tqdm(range(sz0[0]), desc="Writing slices...", leave=False):
_write_slice_ome(z, lvl, zarr_path, out_path)
else:
Parallel(n_jobs=parallel, backend="threading")(
delayed(_write_slice_ome)(
z, lvl, z_in_path=zarr_path, zgr_path=out_path
)
for z in tqdm(range(sz0[0]), desc="Saving slices...")
)

axes = []
for dim in ["z", "x", "y"]:
axes.append({"name": dim, "type": "space", "unit": "micrometer"})

datasets = []
for lvl in range(5):
datasets.append(
{
"path": str(lvl),
"coordinateTransformations": [
{
"type": "scale",
"scale": [
res[2] / 1000,
res[0] * 2**lvl / 1000,
res[1] * 2**lvl / 1000,
],
}
],
}
)

json_data = {
"multiscales": [
{"axes": axes, "datasets": datasets, "name": "/", "version": "0.4"}
]
}

with open(Path(out_path) / ".zattrs", "w") as f:
json.dump(json_data, f, indent=4)


def _edit_ome_metadata(out_path: str, res: list):
res = np.divide([res[-1], res[0], res[1]], 1000)
ome_zarr = zarr.open(
Expand Down
Loading
Loading