Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[settings]
known_third_party = dask,numpy,ome_zarr,omero,omero_rois,omero_zarr,pytest,setuptools,skimage,zarr
known_third_party = dask,numpy,ome_zarr,ome_zarr_models,omero,omero_rois,omero_zarr,pytest,setuptools,skimage,zarr
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def get_long_description() -> str:
author="The Open Microscopy Team",
author_email="",
python_requires=">=3",
install_requires=["omero-py>=5.6.0", "ome-zarr>=0.5.0"],
install_requires=["omero-py>=5.6.0", "zarr>=2.18.0,<3" "ome-zarr-models>=0.1.5"],
long_description=long_description,
keywords=["OMERO.CLI", "plugin"],
url="https://github.com/ome/omero-cli-zarr/",
Expand Down
69 changes: 48 additions & 21 deletions src/omero_zarr/raw_pixels.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,16 @@
import math
import os
import time
from typing import Any, Dict, List, Optional, Tuple
from collections import defaultdict
from typing import Any, Dict, List, Optional, Tuple, Union

import dask.array as da
import numpy as np
import omero.clients # noqa
import omero.gateway # required to allow 'from omero_zarr import raw_pixels'
from ome_zarr.dask_utils import resize as da_resize
from ome_zarr.writer import (
write_multiscales_metadata,
write_plate_metadata,
write_well_metadata,
)
from ome_zarr_models.v04.coordinate_transformations import VectorScale
from ome_zarr_models.v04.image import ImageAttrs
from ome_zarr_models.v04.multiscales import Dataset, Multiscale
from omero.model import Channel
from omero.model.enums import (
PixelsTypedouble,
Expand All @@ -49,6 +47,7 @@
from . import __version__
from . import ngff_version as VERSION
from .util import marshal_axes, marshal_transformations, open_store, print_status
from .util import resize as da_resize


def image_to_zarr(image: omero.gateway.ImageWrapper, args: argparse.Namespace) -> None:
Expand Down Expand Up @@ -89,14 +88,25 @@ def add_image(

paths = add_raw_image(image, parent, level_count, tile_width, tile_height)

# create the image metadata
axes = marshal_axes(image)
transformations = marshal_transformations(image, len(paths))

datasets: List[Dict[Any, Any]] = [{"path": path} for path in paths]
for dataset, transform in zip(datasets, transformations):
dataset["coordinateTransformations"] = transform
ds_models = []
for path, transform in zip(paths, transformations):
transforms_dset = (VectorScale.build(transform[0]["scale"]),)
ds_models.append(Dataset(path=path, coordinateTransformations=transforms_dset))

multi = Multiscale(
axes=axes, datasets=tuple(ds_models), version="0.4", name=image.name
)
image = ImageAttrs(
multiscales=[multi],
)

write_multiscales_metadata(parent, datasets, axes=axes)
# populate the zarr group with the image metadata (only "multiscales")
for k, v in image.model_dump(exclude_none=True).items():
parent.attrs[k] = v

return (level_count, axes)

Expand Down Expand Up @@ -290,10 +300,20 @@ def plate_to_zarr(plate: omero.gateway._PlateWrapper, args: argparse.Namespace)
# sort by row then column...
wells = sorted(wells, key=lambda x: (x.row, x.column))

well_list = []
fields_by_acq_well: dict[int, dict] = defaultdict(lambda: defaultdict(set))

for well in wells:
row = plate.getRowLabels()[well.row]
col = plate.getColumnLabels()[well.column]
fields = []
well_list.append(
{
"path": f"{row}/{col}",
"rowIndex": well.row,
"columnIndex": well.column,
}
)
for field in range(n_fields[0], n_fields[1] + 1):
ws = well.getWellSample(field)
if ws and ws.getImage():
Expand All @@ -305,27 +325,34 @@ def plate_to_zarr(plate: omero.gateway._PlateWrapper, args: argparse.Namespace)
field_info = {"path": f"{field_name}"}
if ac:
field_info["acquisition"] = ac.id
fields_by_acq_well[ac.id][well.id].add(field)
fields.append(field_info)
row_group = root.require_group(row)
col_group = row_group.require_group(col)
field_group = col_group.require_group(field_name)
add_image(img, field_group)
add_omero_metadata(field_group, img)
# Update Well metadata after each image
write_well_metadata(col_group, fields)
# write_well_metadata(col_group, fields)
col_group.attrs["well"] = fields
max_fields = max(max_fields, field + 1)
print_status(int(t0), int(time.time()), count, total)

# Update plate_metadata after each Well
write_plate_metadata(
root,
row_names,
col_names,
wells=list(well_paths),
field_count=max_fields,
acquisitions=plate_acq,
name=plate.name,
)
plate_data: dict[str, Union[str, int, list[dict]]] = {
"columns": [{"name": str(col)} for col in col_names],
"rows": [{"name": str(row)} for row in row_names],
"wells": well_list,
"version": "0.4",
"name": plate.name,
"field_count": max_fields,
}
if plate_acq is not None:
for acq in plate_acq:
fcounts = [len(f) for f in fields_by_acq_well[acq["id"]].values()]
acq["maximumfieldcount"] = max(fcounts)
plate_data["acquisitions"] = plate_acq
root.attrs["plate"] = plate_data

add_toplevel_metadata(root)
print("Finished.")
Expand Down
82 changes: 71 additions & 11 deletions src/omero_zarr/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,12 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.

import time
from typing import Dict, List
from typing import Any, Dict, List

import dask.array as da
import numpy as np
import skimage.transform
from ome_zarr_models.v04.axes import Axis
from omero.gateway import ImageWrapper
from zarr.storage import FSStore

Expand Down Expand Up @@ -79,7 +83,7 @@ def marshal_pixel_sizes(image: ImageWrapper) -> Dict[str, Dict]:
return pixel_sizes


def marshal_axes(image: ImageWrapper) -> List[Dict]:
def marshal_axes(image: ImageWrapper) -> List[Axis]:
# Prepare axes and transformations info...
size_c = image.getSizeC()
size_z = image.getSizeZ()
Expand All @@ -88,18 +92,20 @@ def marshal_axes(image: ImageWrapper) -> List[Dict]:

axes = []
if size_t > 1:
axes.append({"name": "t", "type": "time"})
axes.append(Axis(name="t", type="time", unit=None))
if size_c > 1:
axes.append({"name": "c", "type": "channel"})
axes.append(Axis(name="c", type="channel", unit=None))
if size_z > 1:
axes.append({"name": "z", "type": "space"})
zunit = None
if pixel_sizes and "z" in pixel_sizes:
axes[-1]["unit"] = pixel_sizes["z"]["unit"]
zunit = pixel_sizes["z"]["unit"]
axes.append(Axis(name="z", type="space", unit=zunit))
# last 2 dimensions are always y and x
for dim in ("y", "x"):
axes.append({"name": dim, "type": "space"})
xyunit = None
if pixel_sizes and dim in pixel_sizes:
axes[-1]["unit"] = pixel_sizes[dim]["unit"]
xyunit = pixel_sizes[dim]["unit"]
axes.append(Axis(name=dim, type="space", unit=xyunit))

return axes

Expand All @@ -118,13 +124,67 @@ def marshal_transformations(
scales = []
for index, axis in enumerate(axes):
pixel_size = 1
if axis["name"] in pixel_sizes:
pixel_size = pixel_sizes[axis["name"]].get("value", 1)
scales.append(zooms[axis["name"]] * pixel_size)
if axis.name in pixel_sizes:
pixel_size = pixel_sizes[axis.name].get("value", 1)
scales.append(zooms[axis.name] * pixel_size)
# ...with a single 'scale' transformation each
transformations.append([{"type": "scale", "scale": scales}])
# NB we rescale X and Y for each level, but not Z, C, T
zooms["x"] = zooms["x"] * multiscales_zoom
zooms["y"] = zooms["y"] * multiscales_zoom

return transformations


def resize(
image: da.Array, output_shape: tuple[int, ...], *args: Any, **kwargs: Any
) -> da.Array:
r"""
Wrapped copy of "skimage.transform.resize"
Resize image to match a certain size.
:type image: :class:`dask.array`
:param image: The dask array to resize
:type output_shape: tuple
:param output_shape: The shape of the resize array
:type \*args: list
:param \*args: Arguments of skimage.transform.resize
:type \*\*kwargs: dict
:param \*\*kwargs: Keyword arguments of skimage.transform.resize
:return: Resized image.
"""
factors = np.array(output_shape) / np.array(image.shape).astype(float)
# Rechunk the input blocks so that the factors achieve an output
# blocks size of full numbers.
better_chunksize = tuple(
np.maximum(1, np.round(np.array(image.chunksize) * factors) / factors).astype(
int
)
)
image_prepared = image.rechunk(better_chunksize)

# If E.g. we resize image from 6675 by 0.5 to 3337, factor is 0.49992509 so each
# chunk of size e.g. 1000 will resize to 499. When assumbled into a new array, the
# array will now be of size 3331 instead of 3337 because each of 6 chunks was
# smaller by 1. When we compute() this, dask will read 6 chunks of 1000 and expect
# last chunk to be 337 but instead it will only be 331.
# So we use ceil() here (and in resize_block) to round 499.925 up to chunk of 500
block_output_shape = tuple(
np.ceil(np.array(better_chunksize) * factors).astype(int)
)

# Map overlap
def resize_block(image_block: da.Array, block_info: dict) -> da.Array:
# if the input block is smaller than a 'regular' chunk (e.g. edge of image)
# we need to calculate target size for each chunk...
chunk_output_shape = tuple(
np.ceil(np.array(image_block.shape) * factors).astype(int)
)
return skimage.transform.resize(
image_block, chunk_output_shape, *args, **kwargs
).astype(image_block.dtype)

output_slices = tuple(slice(0, d) for d in output_shape)
output = da.map_blocks(
resize_block, image_prepared, dtype=image.dtype, chunks=block_output_shape
)[output_slices]
return output.rechunk(image.chunksize).astype(image.dtype)
Loading