Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cloudvolume/chunks.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,7 @@ def encode_compressed_segmentation(
return cseg.compress(subvol, block_size=block_size, order=order)

def encode_raw(subvol):
subvol = np.asfortranarray(subvol)
return subvol.tobytes('F')

def encode_kempressed(subvol):
Expand Down
19 changes: 12 additions & 7 deletions cloudvolume/cloudvolume.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,11 @@ def __new__(cls,
info:dict = None,
provenance:dict = None,
compress:CompressType = None,
compress_level:Optional[int] = None,
compress_level:Optional[int] = None,
non_aligned_writes:bool = False,
overwrite_partial_chunks:bool = False,
parallel:ParallelType = 1,
delete_black_uploads:bool = False,
delete_black_uploads:bool = False,
background_color:int = 0,
green_threads:bool = False,
use_https:bool = False,
Expand Down Expand Up @@ -226,12 +227,16 @@ def __new__(cls,
mip: (int or iterable) Which level of downsampling to read and write from.
0 is the highest resolution. You can also specify the voxel resolution
like mip=[6,6,30] which will search for the appropriate mip level.
non_aligned_writes: (bool) Enable non-aligned writes. Not multiprocessing
safe without careful design. When not enabled, a
cloudvolume.exceptions.AlignmentError is thrown for non-aligned writes.

https://github.com/seung-lab/cloud-volume/wiki/Advanced-Topic:-Non-Aligned-Writes
non_aligned_writes: (bool) Enable non-aligned writes. Not multiprocessing
safe without careful design. When not enabled, a
cloudvolume.exceptions.AlignmentError is thrown for non-aligned writes.

https://github.com/seung-lab/cloud-volume/wiki/Advanced-Topic:-Non-Aligned-Writes
overwrite_partial_chunks: (bool) When True with non_aligned_writes=True,
shell chunks are created filled with background_color instead of being
downloaded. Unsafe without careful design, and should only be used when no
chunks are written to more than once. Requires non_aligned_writes=True.
Defaults to False.
parallel (int: 1, bool): Number of extra processes to launch, 1 means only
use the main process. If parallel is True use the number of CPUs
returned by multiprocessing.cpu_count(). When parallel > 1, shared
Expand Down
6 changes: 4 additions & 2 deletions cloudvolume/datasource/precomputed/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@ def create_precomputed(
fill_missing:bool=False, cache:CacheType=False, compress_cache:CompressType=None,
cdn_cache:bool=True, progress:bool=False, info:Optional[dict]=None,
provenance:Optional[dict]=None, compress:CompressType=None,
compress_level:Optional[int]=None, non_aligned_writes:bool=False,
parallel:ParallelType=1, delete_black_uploads:bool=False, background_color:int=0,
compress_level:Optional[int]=None, non_aligned_writes:bool=False,
overwrite_partial_chunks:bool=False,
parallel:ParallelType=1, delete_black_uploads:bool=False, background_color:int=0,
green_threads:bool=False, use_https:bool=False,
max_redirects:int=10, mesh_dir:Optional[str]=None, skel_dir:Optional[str]=None,
secrets:SecretsType=None, spatial_index_db:Optional[str]=None,
Expand Down Expand Up @@ -94,6 +95,7 @@ def create_precomputed(
autocrop=bool(autocrop),
bounded=bool(bounded),
non_aligned_writes=bool(non_aligned_writes),
overwrite_partial_chunks=bool(overwrite_partial_chunks),
fill_missing=bool(fill_missing),
delete_black_uploads=bool(delete_black_uploads),
background_color=background_color,
Expand Down
7 changes: 5 additions & 2 deletions cloudvolume/datasource/precomputed/image/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@
class PrecomputedImageSource(ImageSourceInterface):
def __init__(
self, config, meta, cache,
autocrop:bool = False,
autocrop:bool = False,
bounded:bool = True,
non_aligned_writes:bool = False,
fill_missing:bool = False,
overwrite_partial_chunks:bool = False,
fill_missing:bool = False,
delete_black_uploads:bool = False,
background_color:int = 0,
readonly:bool = False,
Expand All @@ -57,6 +58,7 @@ def __init__(

self.delete_black_uploads = bool(delete_black_uploads)
self.background_color = background_color
self.overwrite_partial_chunks = bool(overwrite_partial_chunks)

self.shared_memory_id = self.generate_shared_memory_location()

Expand Down Expand Up @@ -443,6 +445,7 @@ def upload(
delete_black_uploads=self.delete_black_uploads,
background_color=self.background_color,
non_aligned_writes=self.non_aligned_writes,
overwrite_partial_chunks=self.overwrite_partial_chunks,
secrets=self.config.secrets,
green=self.config.green,
fill_missing=self.fill_missing, # applies only to unaligned writes
Expand Down
62 changes: 52 additions & 10 deletions cloudvolume/datasource/precomputed/image/tx.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from functools import partial
import os

import fastremap
import numpy as np
Expand Down Expand Up @@ -33,6 +32,34 @@
progress_queue = None # defined in common.initialize_synchronization
fs_lock = None # defined in common.initialize_synchronization

def upload_with_overwrite_partial_chunks(
meta, cache, lru, image, offset, mip,
bounds, **options):
"""
Handle uploads when overwrite_partial_chunks is enabled.
Creates a padded image and uses shade to copy the user's image,
then uploads the padded image as aligned chunks.
"""
background_color = options.get('background_color', 0)
expanded = bounds.expand_to_chunk_size(meta.chunk_size(mip), meta.voxel_offset(mip))

padded_shape = list(expanded.size3())
if image.ndim > 3:
padded_shape.append(image.shape[3])
else:
padded_shape.append(1)

padded_image = np.full(padded_shape, background_color, dtype=meta.dtype, order='F')

# Use shade to copy user's image into the padded image
shade(padded_image, expanded, image, bounds)

upload_aligned(
meta, cache, lru,
padded_image, expanded.minpt, mip,
**options
)

def upload(
meta, cache, lru, lru_encoding,
image, offset, mip,
Expand All @@ -41,9 +68,10 @@ def upload(
cdn_cache=None,
parallel=1,
progress=False,
delete_black_uploads=False,
delete_black_uploads=False,
background_color=0,
non_aligned_writes=False,
overwrite_partial_chunks=False,
location=None, location_bbox=None, location_order='F',
use_shared_memory=False, use_file=False,
green=False, fill_missing=False, secrets=None
Expand All @@ -52,13 +80,20 @@ def upload(

if not np.issubdtype(image.dtype, np.dtype(meta.dtype).type):
raise ValueError("""
The uploaded image data type must match the volume data type.
The uploaded image data type must match the volume data type.

Volume: {}
Image: {}
""".format(meta.dtype, image.dtype)
)

if overwrite_partial_chunks and not non_aligned_writes:
raise ValueError(
"overwrite_partial_chunks=True requires non_aligned_writes=True. "
"This parameter only applies to non-aligned writes where shell chunks "
"need to be handled. Set non_aligned_writes=True to enable this feature."
)

shape = Vec(*image.shape)[:3]
offset = Vec(*offset)[:3]
bounds = Bbox( offset, shape + offset)
Expand Down Expand Up @@ -102,12 +137,19 @@ def upload(
)
return

if overwrite_partial_chunks:
upload_with_overwrite_partial_chunks(
meta, cache, lru, image, offset, mip,
bounds, **options
)
return

# Upload the aligned core
retracted = bounds.shrink_to_chunk_size(meta.chunk_size(mip), meta.voxel_offset(mip))
core_bbox = retracted.clone() - bounds.minpt

if not core_bbox.subvoxel():
core_img = image[ core_bbox.to_slices() ]
core_img = image[ core_bbox.to_slices() ]
upload_aligned(
meta, cache, lru,
core_img, retracted.minpt, mip,
Expand All @@ -129,9 +171,9 @@ def shade_and_upload(img3d, bbox):
threaded_upload_chunks(
meta, cache, lru,
img3d, mip,
(( Vec(0,0,0), Vec(*img3d.shape[:3]), bbox.minpt, bbox.maxpt),),
(( Vec(0,0,0), Vec(*img3d.shape[:3]), bbox.minpt, bbox.maxpt),),
compress=compress, cdn_cache=cdn_cache,
progress=False, n_threads=0,
progress=False, n_threads=0,
delete_black_uploads=delete_black_uploads,
green=green, secrets=secrets, lru_encoding=lru_encoding,
)
Expand All @@ -140,13 +182,13 @@ def shade_and_upload(img3d, bbox):

decode_fn = partial(decode, allow_none=False)
download_chunks_threaded(
meta, cache, None, lru_encoding, mip, shell_chunks,
meta, cache, None, lru_encoding, mip, shell_chunks,
fn=shade_and_upload, decode_fn=decode_fn,
fill_missing=fill_missing,
progress=("Shading Border" if progress else None),
fill_missing=fill_missing,
progress=("Shading Border" if progress else None),
compress_cache=compress_cache,
green=green, secrets=secrets,
)
)

def upload_aligned(
meta, cache, lru,
Expand Down
8 changes: 8 additions & 0 deletions cloudvolume/frontends/precomputed.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,14 @@ def delete_black_uploads(self):
def delete_black_uploads(self, val):
self.image.delete_black_uploads = val

@property
def overwrite_partial_chunks(self) -> bool:
return self.image.overwrite_partial_chunks

@overwrite_partial_chunks.setter
def overwrite_partial_chunks(self, val: bool):
self.image.overwrite_partial_chunks = bool(val)

@property
def parallel(self):
return self.config.parallel
Expand Down
70 changes: 69 additions & 1 deletion test/test_cloudvolume.py
Original file line number Diff line number Diff line change
Expand Up @@ -795,7 +795,75 @@ def test_non_aligned_write(lru_bytes):
cv[ middle.to_slices() ] = np.ones(shape=middle.size3(), dtype=cv.dtype)
answer = np.zeros(shape=cv.shape, dtype=cv.dtype)
answer[ 362:662, 362:662, : ] = 1
assert np.all(cv[:] == answer)
assert np.all(cv[:] == answer)

@pytest.mark.parametrize('lru_bytes', (0,1024,1e6))
def test_overwrite_partial_chunks(lru_bytes):
delete_layer()
offset = Vec(0, 0, 0)
cv, _ = create_layer(size=(128, 128, 128, 1), offset=offset)
cv.image.lru.resize(lru_bytes)

cv.non_aligned_writes = False
cv.overwrite_partial_chunks = True
try:
cv[10:50, 10:50, 10:50] = np.ones(shape=(40,40,40,1), dtype=cv.dtype) * 5
assert False
except ValueError as e:
assert "non_aligned_writes" in str(e)

cv.overwrite_partial_chunks = False
cv[:] = np.zeros(shape=cv.shape, dtype=cv.dtype)
cv.non_aligned_writes = True
cv.overwrite_partial_chunks = True
cv.background_color = 3

cv[10:50, 10:50, 10:50] = np.ones(shape=(40,40,40,1), dtype=cv.dtype) * 5
chunk_data = cv[0:64, 0:64, 0:64]

assert np.all(chunk_data[10:50, 10:50, 10:50] == 5)
assert np.all(chunk_data[0:10, :, :] == 3)
assert np.all(chunk_data[50:64, :, :] == 3)

# Write across chunk boundaries
delete_layer()
cv, _ = create_layer(size=(128, 128, 128, 1), offset=(0,0,0))
cv[:] = np.zeros(shape=cv.shape, dtype=cv.dtype)
cv.non_aligned_writes = True
cv.overwrite_partial_chunks = True
cv.background_color = 2

cv[50:80, 50:80, 50:80] = np.ones(shape=(30,30,30,1), dtype=cv.dtype) * 8

assert np.all(cv[50:80, 50:80, 50:80] == 8)
chunk1 = cv[0:64, 0:64, 0:64]
assert np.all(chunk1[50:64, 50:64, 50:64] == 8)
assert np.all(chunk1[0:50, :, :] == 2)

chunk2 = cv[64:128, 64:128, 64:128]
assert np.all(chunk2[0:16, 0:16, 0:16] == 8)
assert np.all(chunk2[16:64, :, :] == 2)

def test_overwrite_partial_chunks_multichannel():
delete_layer()
cv = CloudVolume.from_numpy(
np.zeros((64, 64, 64, 3), dtype=np.uint8),
vol_path='file:///tmp/removeme/multichannel',
resolution=(1,1,1),
voxel_offset=(0,0,0),
chunk_size=(32, 32, 32),
layer_type='image',
)

cv.non_aligned_writes = True
cv.overwrite_partial_chunks = True
cv.background_color = 7
cv[10:30, 10:30, 10:30] = np.ones((20, 20, 20, 3), dtype=np.uint8) * 9

result = cv[0:32, 0:32, 0:32]
assert np.all(result[10:30, 10:30, 10:30] == 9)
assert np.all(result[0:10, :, :] == 7)
assert np.all(result[30:32, :, :] == 7)

def test_autocropped_write():
delete_layer()
Expand Down