Skip to content

Commit 284df02

Browse files
committed
delete redudant text
1 parent 1f08798 commit 284df02

2 files changed

Lines changed: 2 additions & 69 deletions

File tree

src/iohub/core/config.py

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -38,23 +38,10 @@ class TensorStoreConfig(BaseModel):
3838
3939
Parameters
4040
----------
41-
compressor : CompressorConfig
42-
Default compressor settings used at array creation.
43-
data_copy_concurrency : int
44-
Concurrency limit for TensorStore's ``data_copy_concurrency``
45-
resource, bounding threads used to copy data between buffers.
46-
context : dict or None
47-
Optional raw ``ts.Context`` options merged into the context built
48-
from this config. Individual fields on this config take precedence.
4941
file_io_concurrency : int or None
5042
Concurrency limit for TensorStore's ``file_io_concurrency``
51-
resource, bounding concurrent filesystem reads/writes. Useful to
52-
raise on high-latency networked filesystems (e.g. NFS) where the
53-
default (32) under-saturates the link.
54-
file_io_sync : bool
55-
Whether TensorStore issues ``fsync`` on writes.
56-
file_io_locking : {"auto", "disabled"}
57-
File-locking policy for the ``file`` kvstore driver.
43+
resource. Raise above the default (32) on high-latency networked
44+
filesystems (e.g. NFS) where the default under-saturates the link.
5845
cache_pool_bytes : int or None
5946
Aggregate byte budget for TensorStore's chunk cache pool. ``None``
6047
disables caching.
@@ -66,9 +53,6 @@ class TensorStoreConfig(BaseModel):
6653
and trusts the cache thereafter — recommended for long-running
6754
read-heavy workloads on NFS/VAST where the underlying zarr files
6855
do not change. ``False`` disables freshness checks entirely.
69-
extra_context : dict or None
70-
Additional raw ``ts.Context`` entries merged after all typed
71-
fields, useful as an escape hatch for knobs not modeled here.
7256
"""
7357

7458
compressor: CompressorConfig = Field(default_factory=CompressorConfig)

tests/ngff/test_ngff.py

Lines changed: 0 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -553,57 +553,6 @@ def test_ome_zarr_to_tensorstore(channels_and_random_5d, arr_name, version, conc
553553
assert_array_equal(read_only[arr_name].numpy(), zeros)
554554

555555

556-
@pytest.mark.parametrize("recheck_cached_data", [None, "open", True, False])
557-
def test_tensorstore_recheck_cached_data(monkeypatch, recheck_cached_data):
558-
"""``TensorStoreConfig.recheck_cached_data`` propagates into ``ts.open``.
559-
560-
When the option is ``None`` (default) the kwarg must not be forwarded,
561-
so the TensorStore driver falls back to its own default. For any other
562-
value (``"open"``, ``True``, ``False``) the exact value must reach the
563-
``ts.open`` call — this is the knob used to suppress per-chunk
564-
revalidation on networked filesystems during read-heavy training.
565-
"""
566-
pytest.importorskip("tensorstore")
567-
import tensorstore as ts
568-
569-
from iohub.core.config import TensorStoreConfig
570-
from iohub.core.implementations import tensorstore as ts_impl
571-
572-
captured_kwargs: list[dict] = []
573-
real_ts_open = ts_impl._ts_open
574-
575-
def spy_ts_open(spec, **kwargs):
576-
captured_kwargs.append(kwargs)
577-
return real_ts_open(spec, **kwargs)
578-
579-
monkeypatch.setattr(ts_impl, "_ts_open", spy_ts_open)
580-
581-
channel_names = ["DAPI"]
582-
random_5d = np.random.default_rng(0).random((1, 1, 1, 4, 4), dtype=np.float32)
583-
ts_config = TensorStoreConfig(recheck_cached_data=recheck_cached_data)
584-
585-
with _temp_ome_zarr(random_5d, channel_names, arr_name="0", version="0.5") as dataset:
586-
store_path = Path(dataset.zgroup.store.root)
587-
with open_ome_zarr(
588-
store_path,
589-
layout="fov",
590-
mode="r",
591-
implementation="tensorstore",
592-
implementation_config=ts_config,
593-
) as ts_dataset:
594-
handle = ts_dataset["0"].native
595-
assert isinstance(handle, ts.TensorStore)
596-
assert_array_equal(np.asarray(handle.read().result()), random_5d)
597-
598-
open_calls = [k for k in captured_kwargs if k.get("open") is True]
599-
assert open_calls, "Expected at least one ts.open(open=True) call"
600-
last = open_calls[-1]
601-
if recheck_cached_data is None:
602-
assert "recheck_cached_data" not in last
603-
else:
604-
assert last.get("recheck_cached_data") == recheck_cached_data
605-
606-
607556
@given(
608557
channels_and_random_5d=_channels_and_random_5d(),
609558
arr_name=short_alpha_numeric,

0 commit comments

Comments
 (0)